hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acdef1f5f9d2abf69d13bb2b357478a3f2374e92 | 10,672 | py | Python | grr/server/grr_response_server/bin/frontend_test.py | billstackpole/grr | 203a0a99990a2d4004aed84a5cd822cbda2b418c | [
"Apache-2.0"
] | 1 | 2019-03-28T07:09:41.000Z | 2019-03-28T07:09:41.000Z | grr/server/grr_response_server/bin/frontend_test.py | gingogo/grr | 203a0a99990a2d4004aed84a5cd822cbda2b418c | [
"Apache-2.0"
] | null | null | null | grr/server/grr_response_server/bin/frontend_test.py | gingogo/grr | 203a0a99990a2d4004aed84a5cd822cbda2b418c | [
"Apache-2.0"
] | 1 | 2018-08-30T14:50:24.000Z | 2018-08-30T14:50:24.000Z | #!/usr/bin/env python
"""Unittest for grr http server."""
import hashlib
import os
import socket
import threading
from future.utils import iteritems
import ipaddr
import portpicker
import requests
from google.protobuf import json_format
from grr_response_core.lib import flags
from grr_response_core.lib import utils
from grr_response_core.lib.rdfvalues import file_finder as rdf_file_finder
from grr_response_core.lib.rdfvalues import paths as rdf_paths
from grr_response_core.lib.rdfvalues import rekall_types as rdf_rekall_types
from grr_response_server import aff4
from grr_response_server import data_store_utils
from grr_response_server import flow
from grr_response_server import frontend_lib
from grr_response_server.aff4_objects import aff4_grr
from grr_response_server.aff4_objects import filestore
from grr_response_server.bin import frontend
from grr_response_server.flows.general import file_finder
from grr.test_lib import action_mocks
from grr.test_lib import flow_test_lib
from grr.test_lib import rekall_test_lib
from grr.test_lib import test_lib
from grr.test_lib import worker_mocks
class GRRHTTPServerTest(test_lib.GRRBaseTest):
"""Test the http server."""
@classmethod
def setUpClass(cls):
super(GRRHTTPServerTest, cls).setUpClass()
cls.config_overrider = test_lib.ConfigOverrider({
"Rekall.profile_server":
rekall_test_lib.TestRekallRepositoryProfileServer.__name__,
"FileUploadFileStore.root_dir":
test_lib.TempDirPath()
})
cls.config_overrider.Start()
# Frontend must be initialized to register all the stats counters.
frontend_lib.FrontendInit().RunOnce()
# Bring up a local server for testing.
port = portpicker.PickUnusedPort()
ip = utils.ResolveHostnameToIP("localhost", port)
cls.httpd = frontend.GRRHTTPServer((ip, port),
frontend.GRRHTTPServerHandler)
if ipaddr.IPAddress(ip).version == 6:
cls.address_family = socket.AF_INET6
cls.base_url = "http://[%s]:%d/" % (ip, port)
else:
cls.address_family = socket.AF_INET
cls.base_url = "http://%s:%d/" % (ip, port)
cls.httpd_thread = threading.Thread(target=cls.httpd.serve_forever)
cls.httpd_thread.daemon = True
cls.httpd_thread.start()
@classmethod
def tearDownClass(cls):
cls.httpd.shutdown()
cls.config_overrider.Stop()
def setUp(self):
super(GRRHTTPServerTest, self).setUp()
self.client_id = self.SetupClient(0)
def testServerPem(self):
req = requests.get(self.base_url + "server.pem")
self.assertEqual(req.status_code, 200)
self.assertTrue("BEGIN CERTIFICATE" in req.content)
def _RunClientFileFinder(self,
paths,
action,
network_bytes_limit=None,
client_id=None):
client_id = client_id or self.SetupClient(0)
with test_lib.ConfigOverrider({"Client.server_urls": [self.base_url]}):
session_id = flow_test_lib.TestFlowHelper(
file_finder.ClientFileFinder.__name__,
action_mocks.ClientFileFinderClientMock(
client_worker=worker_mocks.FakeClientWorker()),
client_id=client_id,
paths=paths,
pathtype=rdf_paths.PathSpec.PathType.OS,
action=action,
process_non_regular_files=True,
network_bytes_limit=network_bytes_limit,
token=self.token)
return session_id
def testClientFileFinderUpload(self):
paths = [os.path.join(self.base_path, "{**,.}/*.plist")]
action = rdf_file_finder.FileFinderAction.Download()
session_id = self._RunClientFileFinder(paths, action)
collection = flow.GRRFlow.ResultCollectionForFID(session_id)
results = list(collection)
self.assertEqual(len(results), 4)
relpaths = [
os.path.relpath(p.stat_entry.pathspec.path, self.base_path)
for p in results
]
self.assertItemsEqual(relpaths, [
"History.plist", "History.xml.plist", "test.plist",
"parser_test/com.google.code.grr.plist"
])
for r in results:
aff4_obj = aff4.FACTORY.Open(
r.stat_entry.pathspec.AFF4Path(self.client_id), token=self.token)
data = open(r.stat_entry.pathspec.path, "rb").read()
self.assertEqual(aff4_obj.Read(100), data[:100])
hash_obj = data_store_utils.GetFileHashEntry(aff4_obj)
self.assertEqual(hash_obj.md5, hashlib.md5(data).hexdigest())
self.assertEqual(hash_obj.sha1, hashlib.sha1(data).hexdigest())
self.assertEqual(hash_obj.sha256, hashlib.sha256(data).hexdigest())
def testClientFileFinderUploadLimit(self):
paths = [os.path.join(self.base_path, "{**,.}/*.plist")]
action = rdf_file_finder.FileFinderAction.Download()
# TODO(hanuszczak): Instead of catching arbitrary runtime errors, we should
# catch specific instance that was thrown. Unfortunately, all errors are
# intercepted in the `MockWorker` class and converted to runtime errors.
with self.assertRaisesRegexp(RuntimeError, "exceeded network send limit"):
self._RunClientFileFinder(paths, action, network_bytes_limit=1500)
def testClientFileFinderUploadBound(self):
paths = [os.path.join(self.base_path, "{**,.}/*.plist")]
action = rdf_file_finder.FileFinderAction.Download(
oversized_file_policy="DOWNLOAD_TRUNCATED", max_size=300)
session_id = self._RunClientFileFinder(paths, action)
collection = flow.GRRFlow.ResultCollectionForFID(session_id)
results = list(collection)
self.assertEqual(len(results), 4)
relpaths = [
os.path.relpath(p.stat_entry.pathspec.path, self.base_path)
for p in results
]
self.assertItemsEqual(relpaths, [
"History.plist", "History.xml.plist", "test.plist",
"parser_test/com.google.code.grr.plist"
])
for r in results:
aff4_obj = aff4.FACTORY.Open(
r.stat_entry.pathspec.AFF4Path(self.client_id), token=self.token)
data = aff4_obj.read()
self.assertLessEqual(len(data), 300)
self.assertEqual(data,
open(r.stat_entry.pathspec.path, "rb").read(len(data)))
def testClientFileFinderUploadSkip(self):
paths = [os.path.join(self.base_path, "{**,.}/*.plist")]
action = rdf_file_finder.FileFinderAction.Download(
oversized_file_policy="SKIP", max_size=300)
session_id = self._RunClientFileFinder(paths, action)
collection = flow.GRRFlow.ResultCollectionForFID(session_id)
results = list(collection)
skipped = []
uploaded = []
for result in results:
if result.HasField("transferred_file"):
uploaded.append(result)
else:
skipped.append(result)
self.assertEqual(len(uploaded), 2)
self.assertEqual(len(skipped), 2)
relpaths = [
os.path.relpath(p.stat_entry.pathspec.path, self.base_path)
for p in uploaded
]
self.assertItemsEqual(relpaths, ["History.plist", "test.plist"])
for r in uploaded:
aff4_obj = aff4.FACTORY.Open(
r.stat_entry.pathspec.AFF4Path(self.client_id), token=self.token)
self.assertEqual(
aff4_obj.Read(100),
open(r.stat_entry.pathspec.path, "rb").read(100))
def testClientFileFinderFilestoreIntegration(self):
paths = [os.path.join(self.base_path, "{**,.}/*.plist")]
action = rdf_file_finder.FileFinderAction.Download()
client_ids = self.SetupClients(2)
session_ids = {
c: self._RunClientFileFinder(paths, action, client_id=c)
for c in client_ids
}
collections = {
c: flow.GRRFlow.ResultCollectionForFID(session_id)
for c, session_id in iteritems(session_ids)
}
for client_id, collection in iteritems(collections):
results = list(collection)
self.assertEqual(len(results), 4)
relpaths = [
os.path.relpath(p.stat_entry.pathspec.path, self.base_path)
for p in results
]
self.assertItemsEqual(relpaths, [
"History.plist", "History.xml.plist", "test.plist",
"parser_test/com.google.code.grr.plist"
])
for r in results:
aff4_obj = aff4.FACTORY.Open(
r.stat_entry.pathspec.AFF4Path(client_id), token=self.token)
# When files are uploaded to the server they are stored as VFSBlobImage.
self.assertIsInstance(aff4_obj, aff4_grr.VFSBlobImage)
# There is a STAT entry.
self.assertTrue(aff4_obj.Get(aff4_obj.Schema.STAT))
# Make sure the HashFileStore has references to this file for
# all hashes.
hash_entry = data_store_utils.GetFileHashEntry(aff4_obj)
fs = filestore.HashFileStore
md5_refs = list(fs.GetReferencesMD5(hash_entry.md5, token=self.token))
self.assertIn(aff4_obj.urn, md5_refs)
sha1_refs = list(
fs.GetReferencesSHA1(hash_entry.sha1, token=self.token))
self.assertIn(aff4_obj.urn, sha1_refs)
sha256_refs = list(
fs.GetReferencesSHA256(hash_entry.sha256, token=self.token))
self.assertIn(aff4_obj.urn, sha256_refs)
# Open the file inside the file store.
urn, _ = fs(None, token=self.token).CheckHashes([hash_entry]).next()
filestore_fd = aff4.FACTORY.Open(urn, token=self.token)
# This is a VFSBlobImage too.
self.assertIsInstance(filestore_fd, aff4_grr.VFSBlobImage)
# No STAT object attached.
self.assertFalse(filestore_fd.Get(filestore_fd.Schema.STAT))
def testRekallProfiles(self):
req = requests.get(self.base_url + "rekall_profiles")
self.assertEqual(req.status_code, 500)
req = requests.get(self.base_url + "rekall_profiles/v1.0")
self.assertEqual(req.status_code, 500)
known_profile = "F8E2A8B5C9B74BF4A6E4A48F180099942"
unknown_profile = "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"
req = requests.get(self.base_url + "rekall_profiles/v1.0/nt/GUID/" +
unknown_profile)
self.assertEqual(req.status_code, 404)
req = requests.get(self.base_url + "rekall_profiles/v1.0/nt/GUID/" +
known_profile)
self.assertEqual(req.status_code, 200)
pb = rdf_rekall_types.RekallProfile.protobuf()
json_format.Parse(req.content.lstrip(")]}'\n"), pb)
profile = rdf_rekall_types.RekallProfile.FromSerializedString(
pb.SerializeToString())
self.assertEqual(profile.name, "nt/GUID/F8E2A8B5C9B74BF4A6E4A48F180099942")
self.assertEqual(profile.version, "v1.0")
self.assertEqual(profile.data[:2], "\x1f\x8b")
def main(args):
test_lib.main(args)
if __name__ == "__main__":
flags.StartMain(main)
| 36.547945 | 80 | 0.693778 |
acdef20ccd4816ad8a8693b7dca02ceb2de6f321 | 3,713 | py | Python | src/task.py | Johanna-Seif/style-transfer | 0db964d8d11402588d00443b1507063da7d8b54f | [
"MIT"
] | null | null | null | src/task.py | Johanna-Seif/style-transfer | 0db964d8d11402588d00443b1507063da7d8b54f | [
"MIT"
] | null | null | null | src/task.py | Johanna-Seif/style-transfer | 0db964d8d11402588d00443b1507063da7d8b54f | [
"MIT"
] | null | null | null | import argparse
from train import style_transfer
def argument_parser():
'''
Parse the arguments for the program
'''
parser = argparse.ArgumentParser(description='Style Transfer Program')
parser.add_argument(
'--device', metavar='DEVICE',
type=str, default='cpu',
choices=['cpu', 'cuda'],
help='Specify cuda if you want to run on gpu. Default is cpu')
parser.add_argument(
'--content_image_path', metavar='CONTENT_IMAGE_PATH',
type=str, required=True,
help='Path to the content image')
parser.add_argument(
'--style_image_path', metavar='STYLE_IMAGE_PATH',
type=str, required=True,
help='Path to the style image')
parser.add_argument(
'--output_image_path', metavar='OUTPUT_IMAGE_PATH',
type=str, default='./images/output',
help='Path to the save the output image. Default ./images/output')
parser.add_argument(
'--max_size', metavar='MAX_SIZE',
type=int, default=400,
help='Max size for the input and output images. Default 400.')
parser.add_argument(
'--steps', metavar='STEPS',
type=int, default=2000,
help='Number of update steps. Default 2000.')
parser.add_argument(
'--show_every', metavar='SHOW_EVERY',
type=int,
help='Save the target image every SHOW_EVERY steps. Optional.')
parser.add_argument(
'--content_weight', metavar='CONTENT_WEIGHT',
type=int, default=1,
help='Content image weight. Default 1.')
parser.add_argument(
'--style_weight', metavar='STYLE_WEIGHT',
type=float, default=1e3,
help='Style image weight. Default 1e3.')
parser.add_argument(
'--conv1_1', metavar='CONV1_1',
type=int, default=0.2,
help='First convolutional layer weight. Default 0.2.')
parser.add_argument(
'--conv1_2', metavar='CONV1_2',
type=int, default=0.2,
help='Second convolutional layer weight. Default 0.2.')
parser.add_argument(
'--conv1_3', metavar='CONV1_3',
type=int, default=0.2,
help='Third convolutional layer weight. Default 0.2.')
parser.add_argument(
'--conv1_4', metavar='CONV1_4',
type=int, default=0.2,
help='Fourth convolutional layer weight. Default 0.2.')
parser.add_argument(
'--conv1_5', metavar='CONV1_5',
type=int, default=0.2,
help='Fifth convolutional layer weight. Default 0.2.')
return parser
if __name__ == '__main__':
# breakpoint()
parser = argument_parser()
args = parser.parse_args()
device = args.device
content_image_path = args.content_image_path
style_image_path = args.style_image_path
output_image_path = args.output_image_path
max_size = args.max_size
content_weight = args.content_weight # alpha matching Gatys et al (2016)
style_weight = args.style_weight # beta matching Gatys et al (2016)
steps = args.steps
show_every = args.show_every
if show_every is None:
show_every = steps + 1
# Weights for each style layer
# Weighting earlier layers more will result in *larger* style artifacts
conv_style_weights = {'conv1_1': args.conv1_1,
'conv2_1': args.conv1_2,
'conv3_1': args.conv1_3,
'conv4_1': args.conv1_4,
'conv5_1': args.conv1_5}
style_transfer(device, content_image_path, style_image_path,
output_image_path, max_size, content_weight,
style_weight, steps, show_every, conv_style_weights)
print("\n end of main \n")
| 36.048544 | 77 | 0.62968 |
acdef22e77ebcd5b2081ebc9d46cf6090cc125d6 | 8,902 | py | Python | packages/nonebot-adapter-cqhttp/nonebot/adapters/cqhttp/message.py | emicoto/none | 396697f2107cf5de5408d86623ca9c852e97739c | [
"MIT"
] | 2 | 2022-02-20T09:59:30.000Z | 2022-02-20T10:01:15.000Z | packages/nonebot-adapter-cqhttp/nonebot/adapters/cqhttp/message.py | emicoto/none | 396697f2107cf5de5408d86623ca9c852e97739c | [
"MIT"
] | 1 | 2021-05-07T09:17:21.000Z | 2021-05-07T09:17:21.000Z | packages/nonebot-adapter-cqhttp/nonebot/adapters/cqhttp/message.py | emicoto/none | 396697f2107cf5de5408d86623ca9c852e97739c | [
"MIT"
] | 1 | 2021-08-20T06:41:06.000Z | 2021-08-20T06:41:06.000Z | import re
from functools import reduce
from typing import Any, Dict, Union, Tuple, Mapping, Iterable, Optional
from nonebot.typing import overrides
from nonebot.adapters import Message as BaseMessage, MessageSegment as BaseMessageSegment
from .utils import log, escape, unescape, _b2s
class MessageSegment(BaseMessageSegment):
"""
CQHTTP 协议 MessageSegment 适配。具体方法参考协议消息段类型或源码。
"""
@overrides(BaseMessageSegment)
def __init__(self, type: str, data: Dict[str, Any]) -> None:
super().__init__(type=type, data=data)
@overrides(BaseMessageSegment)
def __str__(self) -> str:
type_ = self.type
data = self.data.copy()
# process special types
if type_ == "text":
return escape(
data.get("text", ""), # type: ignore
escape_comma=False)
params = ",".join(
[f"{k}={escape(str(v))}" for k, v in data.items() if v is not None])
return f"[CQ:{type_}{',' if params else ''}{params}]"
@overrides(BaseMessageSegment)
def __add__(self, other) -> "Message":
return Message(self) + other
@overrides(BaseMessageSegment)
def __radd__(self, other) -> "Message":
return (MessageSegment.text(other)
if isinstance(other, str) else Message(other)) + self
@overrides(BaseMessageSegment)
def is_text(self) -> bool:
return self.type == "text"
@staticmethod
def anonymous(ignore_failure: Optional[bool] = None) -> "MessageSegment":
return MessageSegment("anonymous", {"ignore": _b2s(ignore_failure)})
@staticmethod
def at(user_id: Union[int, str]) -> "MessageSegment":
return MessageSegment("at", {"qq": str(user_id)})
@staticmethod
def contact(type_: str, id: int) -> "MessageSegment":
return MessageSegment("contact", {"type": type_, "id": str(id)})
@staticmethod
def contact_group(group_id: int) -> "MessageSegment":
return MessageSegment("contact", {"type": "group", "id": str(group_id)})
@staticmethod
def contact_user(user_id: int) -> "MessageSegment":
return MessageSegment("contact", {"type": "qq", "id": str(user_id)})
@staticmethod
def dice() -> "MessageSegment":
return MessageSegment("dice", {})
@staticmethod
def face(id_: int) -> "MessageSegment":
return MessageSegment("face", {"id": str(id_)})
@staticmethod
def forward(id_: str) -> "MessageSegment":
log("WARNING", "Forward Message only can be received!")
return MessageSegment("forward", {"id": id_})
@staticmethod
def image(file: str,
type_: Optional[str] = None,
cache: bool = True,
proxy: bool = True,
timeout: Optional[int] = None) -> "MessageSegment":
return MessageSegment(
"image", {
"file": file,
"type": type_,
"cache": cache,
"proxy": proxy,
"timeout": timeout
})
@staticmethod
def json(data: str) -> "MessageSegment":
return MessageSegment("json", {"data": data})
@staticmethod
def location(latitude: float,
longitude: float,
title: Optional[str] = None,
content: Optional[str] = None) -> "MessageSegment":
return MessageSegment(
"location", {
"lat": str(latitude),
"lon": str(longitude),
"title": title,
"content": content
})
@staticmethod
def music(type_: str, id_: int) -> "MessageSegment":
return MessageSegment("music", {"type": type_, "id": id_})
@staticmethod
def music_custom(url: str,
audio: str,
title: str,
content: Optional[str] = None,
img_url: Optional[str] = None) -> "MessageSegment":
return MessageSegment(
"music", {
"type": "custom",
"url": url,
"audio": audio,
"title": title,
"content": content,
"image": img_url
})
@staticmethod
def node(id_: int) -> "MessageSegment":
return MessageSegment("node", {"id": str(id_)})
@staticmethod
def node_custom(user_id: int, nickname: str,
content: Union[str, "Message"]) -> "MessageSegment":
return MessageSegment("node", {
"user_id": str(user_id),
"nickname": nickname,
"content": content
})
@staticmethod
def poke(type_: str, id_: str) -> "MessageSegment":
return MessageSegment("poke", {"type": type_, "id": id_})
@staticmethod
def record(file: str,
magic: Optional[bool] = None,
cache: Optional[bool] = None,
proxy: Optional[bool] = None,
timeout: Optional[int] = None) -> "MessageSegment":
return MessageSegment(
"record", {
"file": file,
"magic": _b2s(magic),
"cache": cache,
"proxy": proxy,
"timeout": timeout
})
@staticmethod
def reply(id_: int) -> "MessageSegment":
return MessageSegment("reply", {"id": str(id_)})
@staticmethod
def rps() -> "MessageSegment":
return MessageSegment("rps", {})
@staticmethod
def shake() -> "MessageSegment":
return MessageSegment("shake", {})
@staticmethod
def share(url: str = "",
title: str = "",
content: Optional[str] = None,
image: Optional[str] = None) -> "MessageSegment":
return MessageSegment("share", {
"url": url,
"title": title,
"content": content,
"image": image
})
@staticmethod
def text(text: str) -> "MessageSegment":
return MessageSegment("text", {"text": text})
@staticmethod
def video(file: str,
cache: Optional[bool] = None,
proxy: Optional[bool] = None,
timeout: Optional[int] = None) -> "MessageSegment":
return MessageSegment("video", {
"file": file,
"cache": cache,
"proxy": proxy,
"timeout": timeout
})
@staticmethod
def xml(data: str) -> "MessageSegment":
return MessageSegment("xml", {"data": data})
class Message(BaseMessage):
"""
CQHTTP 协议 Message 适配。
"""
def __radd__(self, other: Union[str, MessageSegment,
"Message"]) -> "Message":
result = MessageSegment.text(other) if isinstance(other, str) else other
return super(Message, self).__radd__(result)
@staticmethod
@overrides(BaseMessage)
def _construct(
msg: Union[str, Mapping,
Iterable[Mapping]]) -> Iterable[MessageSegment]:
if isinstance(msg, Mapping):
yield MessageSegment(msg["type"], msg.get("data") or {})
return
elif isinstance(msg, Iterable) and not isinstance(msg, str):
for seg in msg:
yield MessageSegment(seg["type"], seg.get("data") or {})
return
elif isinstance(msg, str):
def _iter_message(msg: str) -> Iterable[Tuple[str, str]]:
text_begin = 0
for cqcode in re.finditer(
r"\[CQ:(?P<type>[a-zA-Z0-9-_.]+)"
r"(?P<params>"
r"(?:,[a-zA-Z0-9-_.]+=[^,\]]+)*"
r"),?\]", msg):
yield "text", msg[text_begin:cqcode.pos + cqcode.start()]
text_begin = cqcode.pos + cqcode.end()
yield cqcode.group("type"), cqcode.group("params").lstrip(
",")
yield "text", msg[text_begin:]
for type_, data in _iter_message(msg):
if type_ == "text":
if data:
# only yield non-empty text segment
yield MessageSegment(type_, {"text": unescape(data)})
else:
data = {
k: unescape(v) for k, v in map(
lambda x: x.split("=", maxsplit=1),
filter(lambda x: x, (
x.lstrip() for x in data.split(","))))
}
yield MessageSegment(type_, data)
def extract_plain_text(self) -> str:
def _concat(x: str, y: MessageSegment) -> str:
return f"{x} {y.data['text']}" if y.is_text() else x
plain_text = reduce(_concat, self, "")
return plain_text[1:] if plain_text else plain_text
| 33.216418 | 89 | 0.522804 |
acdef25cdc3a63a2485108aca09898d9efe4e318 | 3,656 | py | Python | dwca/test/test_gbifresultsreader.py | zedomel/python-dwca-reader | 04343b3776f1454084809e1a84b355a9c1edf193 | [
"BSD-3-Clause"
] | null | null | null | dwca/test/test_gbifresultsreader.py | zedomel/python-dwca-reader | 04343b3776f1454084809e1a84b355a9c1edf193 | [
"BSD-3-Clause"
] | null | null | null | dwca/test/test_gbifresultsreader.py | zedomel/python-dwca-reader | 04343b3776f1454084809e1a84b355a9c1edf193 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import unittest
import xml.etree.ElementTree as ET
from dwca.darwincore.utils import qualname as qn
from dwca.read import GBIFResultsReader
from .helpers import sample_data_path
class TestGBIFResultsReader(unittest.TestCase):
"""Unit tests for GBIFResultsReader class."""
CITATIONS_CONTENT = """Please cite this data as follows, and pay attention
to the rights documented in the rights.txt: blablabla"""
RIGHTS_CONTENT = """Dataset: Collection Pisces SMF
Rights as supplied: Not supplied"""
def test_dwcareader_features(self):
"""Ensure we didn't break inherited basic DwCAReader features."""
with GBIFResultsReader(sample_data_path('gbif-results.zip')) as results_dwca:
self.assertEqual(158, len(results_dwca.rows))
self.assertEqual('http://rs.tdwg.org/dwc/terms/Occurrence',
results_dwca.descriptor.core.type)
row1 = results_dwca.rows[0]
self.assertEqual('Tetraodontidae', row1.data[qn('family')])
self.assertEqual([], row1.extensions)
# Specific GBIFResultsReader feature
def test_citations_access(self):
"""Check the content of citations.txt is accessible."""
with GBIFResultsReader(sample_data_path('gbif-results.zip')) as results_dwca:
self.assertEqual(self.CITATIONS_CONTENT, results_dwca.citations)
def test_rights_access(self):
"""Check the content of rights.txt is accessible."""
with GBIFResultsReader(sample_data_path('gbif-results.zip')) as results_dwca:
self.assertEqual(self.RIGHTS_CONTENT, results_dwca.rights)
def test_source_metadata(self):
with GBIFResultsReader(sample_data_path('gbif-results.zip')) as results:
# We have 23 EML files in dataset/
self.assertEqual(23, len(results.source_metadata))
# Assert a key is present
self.assertTrue('eccf4b09-f0c8-462d-a48c-41a7ce36815a' in
results.source_metadata)
self.assertFalse('incorrect-UUID' in results.source_metadata)
# Assert it's the correct EML file (content!)
sm = results.source_metadata
metadata = sm['eccf4b09-f0c8-462d-a48c-41a7ce36815a']
self.assertIsInstance(metadata, ET.Element)
# Assert we can read basic fields from EML:
self.assertEqual(metadata.find('dataset').find('creator').find('individualName')
.find('givenName').text,
'Rob')
def test_row_source_metadata(self):
with GBIFResultsReader(sample_data_path('gbif-results.zip')) as results:
first_row = results.get_corerow_by_id('607759330')
m = first_row.source_metadata
self.assertIsInstance(m, ET.Element)
v = (m.find('dataset').find('creator').find('individualName')
.find('givenName').text)
self.assertEqual(v, 'Stanley')
last_row = results.get_corerow_by_id('782700656')
m = last_row.source_metadata
self.assertIsInstance(m, ET.Element)
v = m.find('dataset').find('language').text
self.assertEqual(v, 'en')
def test_row_source_missing_metadata(self):
with GBIFResultsReader(sample_data_path('gbif-results-lacks-s-metadata.zip')) as results:
# We have source metadata, but not for all datasets/line...
# We sould have None in this cases
first_row = results.get_corerow_by_id('607759330')
self.assertEqual(None, first_row.source_metadata)
| 41.545455 | 97 | 0.653993 |
acdef2745a6630d2c3546cf56150a6a8fc4f14cd | 1,581 | py | Python | lint.py | MattUebel/git-good-splunk-indexmanager | e7ba17f86ca81647e546377287e6b1eb393413d0 | [
"MIT"
] | null | null | null | lint.py | MattUebel/git-good-splunk-indexmanager | e7ba17f86ca81647e546377287e6b1eb393413d0 | [
"MIT"
] | null | null | null | lint.py | MattUebel/git-good-splunk-indexmanager | e7ba17f86ca81647e546377287e6b1eb393413d0 | [
"MIT"
] | null | null | null | import configparser
import os
import sys
ALERT_THRESHOLD = 86400
WARNING_THRESHOLD = 864000
ERROR_MESSAGES = []
WARNING_MESSAGES = []
OUTPUT_FILE = "output.txt"
config_files = []
for root, dirs, files in os.walk("."):
for file in files:
if file.endswith(".conf"):
config_files.append(os.path.join(root, file))
for file in config_files:
config = configparser.ConfigParser()
config.read(file)
for section in config.sections():
if config.has_option(section, "frozenTimePeriodInSecs"):
frozenTimePeriodInSecs = config.getint(section, "frozenTimePeriodInSecs")
if frozenTimePeriodInSecs < ALERT_THRESHOLD:
ERROR_MESSAGES.append(
f"The index `{section}` in `{file}` has a frozenTimePeriodInSecs of `{frozenTimePeriodInSecs}` which is less than the required value of `{ALERT_THRESHOLD}`"
)
elif frozenTimePeriodInSecs < WARNING_THRESHOLD:
WARNING_MESSAGES.append(
f"The index `{section}` in `{file}` has a frozenTimePeriodInSecs of `{frozenTimePeriodInSecs}` which is less than the recommended value of `{WARNING_THRESHOLD}`"
)
with open(OUTPUT_FILE, "w") as f:
if ERROR_MESSAGES:
print(f"::set-output name=status::failure")
f.write("### Errors :red_circle:\n")
f.write("\n".join(ERROR_MESSAGES))
else:
f.write("### No errors found! :tada:\n")
if WARNING_MESSAGES:
f.write("\n### Warnings :warning:\n")
f.write("\n".join(WARNING_MESSAGES))
| 37.642857 | 181 | 0.643264 |
acdef32edf85120800edfa6a02359ea606cb4dd6 | 5,227 | py | Python | examples/hvi2/test_1D_fast_scan_hvi_queueing.py | peendebak/core_tools | 2e43edf0bbc1d7ceb7042559db499535e8f6a076 | [
"BSD-2-Clause"
] | null | null | null | examples/hvi2/test_1D_fast_scan_hvi_queueing.py | peendebak/core_tools | 2e43edf0bbc1d7ceb7042559db499535e8f6a076 | [
"BSD-2-Clause"
] | null | null | null | examples/hvi2/test_1D_fast_scan_hvi_queueing.py | peendebak/core_tools | 2e43edf0bbc1d7ceb7042559db499535e8f6a076 | [
"BSD-2-Clause"
] | null | null | null | import logging
import time
import numpy as np
import matplotlib.pyplot as pt
from keysight_fpga.sd1.fpga_utils import \
print_fpga_info, config_fpga_debug_log, print_fpga_log
from keysight_fpga.sd1.dig_iq import load_iq_image
from keysight_fpga.qcodes.M3202A_fpga import M3202A_fpga
from core_tools.drivers.M3102A import SD_DIG, MODES
from core_tools.HVI2.hvi2_schedule_loader import Hvi2ScheduleLoader
from core_tools.HVI2.hvi2_video_mode import Hvi2VideoMode
from core_tools.GUI.keysight_videomaps.data_getter.scan_generator_Keysight import construct_1D_scan_fast
from pulse_lib.base_pulse import pulselib
from pulse_lib.virtual_channel_constructors import virtual_gates_constructor
import qcodes
import qcodes.logger as logger
from qcodes.logger import start_all_logging
start_all_logging()
logger.get_file_handler().setLevel(logging.DEBUG)
# close objects still active since previous run (IPython)
try:
oldLoader.close_all()
except: pass
oldLoader = Hvi2ScheduleLoader
try:
qcodes.Instrument.close_all()
except: pass
def create_pulse_lib(awgs):
pulse = pulselib(backend='M3202A')
channels = []
# add to pulse_lib
for i, awg in enumerate(awgs):
pulse.add_awgs(awg.name, awg)
# define channels
for ch in range(1,5):
channel_name = f'{awg.name}_{ch}'
pulse.define_channel(channel_name, awg.name, ch)
channels.append(channel_name)
n_ch = len(channels)
# set a virtual gate matrix
virtual_gate_set_1 = virtual_gates_constructor(pulse)
virtual_gate_set_1.add_real_gates(*channels)
virtual_gate_set_1.add_virtual_gates(*[f'vP{i+1}' for i in range(n_ch)])
# copy data of AWG1 to all other AWG
inv_matrix = np.zeros((n_ch,)*2)
for i in range(4):
inv_matrix[i::4,i] = 1.0
for i in range(4, n_ch):
inv_matrix[i,i] = 1.0
virtual_gate_set_1.add_virtual_gate_matrix(np.linalg.inv(inv_matrix))
pulse.finish_init()
return pulse
awg_slots = [3,7]
dig_slot = 5
dig_channels = [1,2,3,4]
full_scale = 2.0
dig_mode = 1
t_measure = 380 #20
lo_f = 0e6
acquisition_delay_ns = 0 #160
awgs = []
for i, slot in enumerate(awg_slots):
awg = M3202A_fpga(f'AWG{slot}', 1, slot)
awgs.append(awg)
awg.set_hvi_queue_control(True)
dig = SD_DIG('DIG1', 1, dig_slot)
load_iq_image(dig.SD_AIN)
print_fpga_info(dig.SD_AIN)
dig.set_acquisition_mode(dig_mode)
## add to pulse lib.
p = create_pulse_lib(awgs)
for ch in dig_channels:
dig.set_lo(ch, lo_f, 0, input_channel=ch)
## create 2D scan
gate1, swing1, n_pt1 = 'vP1', 500, 15
biasT_corr=False
dig_param = construct_1D_scan_fast(
gate1, swing1, n_pt1, t_measure, biasT_corr, p,
dig, dig_channels, 500e6,
acquisition_delay_ns=acquisition_delay_ns,
dig_vmax=full_scale,
pulse_gates={
'vP3':200,
'vP2':-100,
},
line_margin=1,
)
config_fpga_debug_log(dig.SD_AIN,
enable_mask=0xC000_0000,
)
## run
start = time.perf_counter()
data = dig_param()
duration = time.perf_counter() - start
print(f'duration {duration*1000:5.1f} ms')
print_fpga_log(dig.SD_AIN)
for awg in awgs:
print(f'AWG: {awg.name}')
print_fpga_log(awg.awg, clock200=True)
dig_data = [None]*4
index = 0
for ch in dig_channels:
c = ch-1
dig_data[c] = data[index].flatten()
index += 1
print(f'ch{ch}: {len(dig_data[c])}')
### plots
#colors = ['k', 'b','r', 'c', 'y']
#colors = ['k', 'tab:blue', 'k', 'yellow', 'tomato']
colors = ['k', 'tab:blue', 'tab:orange', 'tab:green', 'tab:red']
# plot direct data
if dig_mode == 0:
pt.figure(5)
pt.clf()
for ch in dig_channels:
pt.figure(ch)
c = ch-1
t = (np.arange(len(dig_data[c])) + 0.5) * 2
pt.plot(t, dig_data[c])
pt.figure(5)
pt.plot(t, dig_data[c], '-', ms=4, label=f'ch{ch}', color=colors[ch])
pt.legend()
if dig_mode == 1:
pt.figure(5)
pt.clf()
# plot averages
for ch in dig_channels:
c = ch-1
t = (np.arange(len(dig_data[c])) + 0.5) * t_measure
pt.figure(ch)
pt.plot(t, dig_data[c], '-')
# pt.ylim(-0.8, 0.8)
# pt.legend()
pt.figure(5)
pt.plot(t, dig_data[c], '-', ms=4, color=colors[ch], label=f'ch{ch}')
pt.legend()
# pt.ylim(-0.8, 0.8)
if dig_mode in [2,3]:
## plot IQ
for ch in dig_channels:
c = ch-1
t = (np.arange(len(dig_data[c])) + 0.5) * t_measure
pt.figure(20)
pt.plot(t, dig_data[c].real, label=f'ch{ch} I')
pt.legend()
pt.figure(10+ch)
pt.plot(t, dig_data[c].real, label=f'ch{ch} I')
if dig_mode == 2:
pt.plot(t, dig_data[c].imag, label=f'ch{ch} Q')
pt.legend()
pt.figure(30+ch)
pt.plot(t, dig_data[c].imag, label=f'ch{ch} Q')
pt.legend()
pt.figure(7)
pt.plot(t, np.abs(dig_data[c]), label=f'ch{ch}')
pt.legend()
pt.figure(8)
pt.plot(t, np.angle(dig_data[c], deg=True), label=f'ch{ch}')
pt.legend()
dig_param.stop()
for awg in awgs:
awg.close()
dig.close()
| 24.311628 | 104 | 0.632868 |
acdef3fee64a3a6cf04cb353fe8f54ab61458964 | 1,647 | py | Python | python/neural_srl/shared/inference.py | wangtong106/deep_srl | 544217ccb68c363abe98a2a5835ca1d215864841 | [
"Apache-2.0"
] | 354 | 2017-06-01T03:35:56.000Z | 2022-03-16T02:50:27.000Z | python/neural_srl/shared/inference.py | wangtong106/deep_srl | 544217ccb68c363abe98a2a5835ca1d215864841 | [
"Apache-2.0"
] | 30 | 2017-10-15T05:48:57.000Z | 2021-12-22T18:50:55.000Z | python/neural_srl/shared/inference.py | wangtong106/deep_srl | 544217ccb68c363abe98a2a5835ca1d215864841 | [
"Apache-2.0"
] | 96 | 2017-06-16T10:05:04.000Z | 2022-03-16T13:02:36.000Z | import numpy
def get_transition_params(label_strs):
'''Construct transtion scoresd (0 for allowed, -inf for invalid).
Args:
label_strs: A [num_tags,] sequence of BIO-tags.
Returns:
A [num_tags, num_tags] matrix of transition scores.
'''
num_tags = len(label_strs)
transition_params = numpy.zeros([num_tags, num_tags], dtype=numpy.float32)
for i, prev_label in enumerate(label_strs):
for j, label in enumerate(label_strs):
if i != j and label[0] == 'I' and not prev_label == 'B' + label[1:]:
transition_params[i,j] = numpy.NINF
return transition_params
def viterbi_decode(score, transition_params):
""" Adapted from Tensorflow implementation.
Decode the highest scoring sequence of tags outside of TensorFlow.
This should only be used at test time.
Args:
score: A [seq_len, num_tags] matrix of unary potentials.
transition_params: A [num_tags, num_tags] matrix of binary potentials.
Returns:
viterbi: A [seq_len] list of integers containing the highest scoring tag
indicies.
viterbi_score: A float containing the score for the Viterbi sequence.
"""
trellis = numpy.zeros_like(score)
backpointers = numpy.zeros_like(score, dtype=numpy.int32)
trellis[0] = score[0]
for t in range(1, score.shape[0]):
v = numpy.expand_dims(trellis[t - 1], 1) + transition_params
trellis[t] = score[t] + numpy.max(v, 0)
backpointers[t] = numpy.argmax(v, 0)
viterbi = [numpy.argmax(trellis[-1])]
for bp in reversed(backpointers[1:]):
viterbi.append(bp[viterbi[-1]])
viterbi.reverse()
viterbi_score = numpy.max(trellis[-1])
return viterbi, viterbi_score
| 37.431818 | 76 | 0.707347 |
acdef47c9610f215ffeec558a371c5ab2738defd | 808 | py | Python | SAPRON-backend/service_SAPRON/urls.py | ig-cardoso/SAPRON | d61fbde1ec98acae3052cb52f0f8c617c1d1b209 | [
"MIT"
] | null | null | null | SAPRON-backend/service_SAPRON/urls.py | ig-cardoso/SAPRON | d61fbde1ec98acae3052cb52f0f8c617c1d1b209 | [
"MIT"
] | null | null | null | SAPRON-backend/service_SAPRON/urls.py | ig-cardoso/SAPRON | d61fbde1ec98acae3052cb52f0f8c617c1d1b209 | [
"MIT"
] | null | null | null | """service_SAPRON URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('userCalendar.urls'))
]
| 35.130435 | 77 | 0.707921 |
acdef5efb9ca50ba2415d3812ab2053c9c78c622 | 815 | py | Python | 1/1.py | hd264/python_challenge | efcf4975fabab78220df382d15ad27827e2d15a3 | [
"Apache-2.0"
] | null | null | null | 1/1.py | hd264/python_challenge | efcf4975fabab78220df382d15ad27827e2d15a3 | [
"Apache-2.0"
] | null | null | null | 1/1.py | hd264/python_challenge | efcf4975fabab78220df382d15ad27827e2d15a3 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
########################################################################
#
# Copyright (c) 2017 cifangyiquan.com, Inc. All Rights Reserved
#
########################################################################
"""
File: 2.py
Author: work(work@cifangyiquan)
Date: 2017/08/11 00:45:45
"""
import string
input = 'g fmnc wms bgblr rpylqjyrc gr zw fylb. rfyrq ufyr amknsrcpq ypc dmp. bmgle gr gl zw fylb gq glcddgagclr ylb rfyr\'q ufw rfgq rcvr gq qm jmle. sqgle qrpgle.kyicrpylq() gq pcamkkclbcb. lmu ynnjw ml rfc spj.'
input = 'map'
ret = ''
for i in input:
num = ord(i)
if ord('a') <= num <= ord('x'):
char = chr(num + 2)
elif ord('y') <= num <= ord('z'):
char = chr(num - 26 + 2)
else:
char = i
ret += char
print ret
| 27.166667 | 214 | 0.496933 |
acdef63b263bc2d10f72d3eeb442d4b4a89025f2 | 1,576 | py | Python | jdcloud_sdk/services/cdn/apis/SetLiveDomainIpBlackListRequest.py | Tanc009/jdcloud-sdk-python | 8b045c99bc5b73ca7348e950b6f01e03a27982f5 | [
"Apache-2.0"
] | 14 | 2018-04-19T09:53:56.000Z | 2022-01-27T06:05:48.000Z | jdcloud_sdk/services/cdn/apis/SetLiveDomainIpBlackListRequest.py | Tanc009/jdcloud-sdk-python | 8b045c99bc5b73ca7348e950b6f01e03a27982f5 | [
"Apache-2.0"
] | 15 | 2018-09-11T05:39:54.000Z | 2021-07-02T12:38:02.000Z | jdcloud_sdk/services/cdn/apis/SetLiveDomainIpBlackListRequest.py | Tanc009/jdcloud-sdk-python | 8b045c99bc5b73ca7348e950b6f01e03a27982f5 | [
"Apache-2.0"
] | 33 | 2018-04-20T05:29:16.000Z | 2022-02-17T09:10:05.000Z | # coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class SetLiveDomainIpBlackListRequest(JDCloudRequest):
"""
设置直播域名ip黑白名单
"""
def __init__(self, parameters, header=None, version="v1"):
super(SetLiveDomainIpBlackListRequest, self).__init__(
'/liveDomain/{domain}/ipBlackList', 'POST', header, version)
self.parameters = parameters
class SetLiveDomainIpBlackListParameters(object):
def __init__(self, domain, ):
"""
:param domain: 用户域名
"""
self.domain = domain
self.ips = None
self.ipListType = None
def setIps(self, ips):
"""
:param ips: (Optional) ip名单,ips中url不能超过50条
"""
self.ips = ips
def setIpListType(self, ipListType):
"""
:param ipListType: (Optional) ip黑白名单类型,black:黑名单,white:白名单
"""
self.ipListType = ipListType
| 28.142857 | 75 | 0.679569 |
acdef66ebd4150c3cc60e3ca8c7b1b480b4a83ed | 7,621 | py | Python | sparkify/emr/cluster.py | bopopescu/sparkify-data-lake | 5ed6772a9a699aeb24ab306bad475b2033ac13ab | [
"Apache-2.0"
] | null | null | null | sparkify/emr/cluster.py | bopopescu/sparkify-data-lake | 5ed6772a9a699aeb24ab306bad475b2033ac13ab | [
"Apache-2.0"
] | null | null | null | sparkify/emr/cluster.py | bopopescu/sparkify-data-lake | 5ed6772a9a699aeb24ab306bad475b2033ac13ab | [
"Apache-2.0"
] | 1 | 2020-07-24T07:02:05.000Z | 2020-07-24T07:02:05.000Z | import time
from datetime import datetime
import pandas as pd
import boto3
import json
import configparser
def get_config():
config = configparser.ConfigParser()
config.read_file(open('dwh.cfg'))
return config
def create_emr_role(config):
iam = iam_client(config)
try:
print('Creating a new IAM Role')
iam.create_role(
Path='/',
RoleName=config.get("DWH", "DWH_IAM_EMR_ROLE_NAME"),
Description='Default policy for the Amazon Elastic MapReduce service role.',
AssumeRolePolicyDocument=json.dumps(
{
'Statement': [{
'Action': 'sts:AssumeRole',
'Effect': 'Allow',
'Principal': {'Service': 'elasticmapreduce.amazonaws.com'}
}],
'Version': '2012-10-17'
}
)
)
print('Attaching Policy')
attaching_policy_result = iam.attach_role_policy(
RoleName=config.get("DWH", "DWH_IAM_EMR_ROLE_NAME"),
PolicyArn="arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceRole"
)['ResponseMetadata']['HTTPStatusCode']
print(attaching_policy_result)
print('Get the IAM role ARN')
role_arn = iam_role(config.get("DWH", "DWH_IAM_EMR_ROLE_NAME"), iam)
print(role_arn['Role']['Arn'])
except Exception as e:
print(e)
def create_ec2_role(config):
iam = iam_client(config)
try:
print('Creating a new EC2 IAM Role')
iam.create_role(
Path='/',
RoleName=config.get("DWH", "DWH_IAM_EC2_ROLE_NAME"),
Description='Amazon EC2 service role...',
AssumeRolePolicyDocument=json.dumps(
{
'Statement': [{
'Sid': '',
'Action': 'sts:AssumeRole',
'Effect': 'Allow',
'Principal': {'Service': 'ec2.amazonaws.com'}
}],
'Version': '2008-10-17'
}
)
)
print('Attaching Policy')
attaching_policy_result = iam.attach_role_policy(
RoleName=config.get("DWH", "DWH_IAM_EC2_ROLE_NAME"),
PolicyArn="arn:aws:iam::aws:policy/service-role/AmazonElasticMapReduceforEC2Role"
)['ResponseMetadata']['HTTPStatusCode']
print(attaching_policy_result)
print('Get the IAM role ARN')
role_arn = iam_role(config.get("DWH", "DWH_IAM_EC2_ROLE_NAME"), iam)
print(role_arn['Role']['Arn'])
except Exception as e:
print(e)
try:
iam.create_instance_profile(
InstanceProfileName=config.get('DWH', 'DWH_IAM_EC2_ROLE_NAME'),
Path='/'
)
instance_profile = iam.get_instance_profile(
InstanceProfileName=config.get('DWH', 'DWH_IAM_EC2_ROLE_NAME')
)
print(instance_profile)
iam.add_role_to_instance_profile(
InstanceProfileName=config.get('DWH', 'DWH_IAM_EC2_ROLE_NAME'),
RoleName=config.get('DWH', 'DWH_IAM_EC2_ROLE_NAME')
)
print('-------')
instance_profile = iam.get_instance_profile(
InstanceProfileName=config.get('DWH', 'DWH_IAM_EC2_ROLE_NAME')
)
print(instance_profile)
except Exception as e:
print(e)
def iam_role(role_name, iam):
return iam.get_role(
RoleName=role_name,
)
def iam_client(config):
iam = boto3.client('iam',
region_name=config.get('DWH', 'REGION'),
aws_access_key_id=config.get('AWS', 'KEY'),
aws_secret_access_key=config.get('AWS', 'SECRET'))
return iam
def emr_client(config):
return boto3.client('emr',
region_name=config.get('DWH', 'REGION'),
aws_access_key_id=config.get('AWS', 'KEY'),
aws_secret_access_key=config.get('AWS', 'SECRET'))
def emr_cluster(config):
emr = emr_client(config)
try:
response = emr.run_job_flow(
Name=config.get("DWH", "DWH_CLUSTER_IDENTIFIER"), LogUri='s3://LOGS', ReleaseLabel='emr-5.29.0',
Applications=[
{
'Name': 'Spark'
},
],
Instances={
'InstanceGroups': [
{
'Name': "Master",
'Market': 'ON_DEMAND',
'InstanceRole': 'MASTER',
'InstanceType': config.get("DWH", "DWH_NODE_TYPE"),
'InstanceCount': 1,
},
{
'Name': "Slave",
'Market': 'ON_DEMAND',
'InstanceRole': 'CORE',
'InstanceType': config.get("DWH", "DWH_NODE_TYPE"),
'InstanceCount': int(config.get("DWH", "DWH_NUM_SLAVES")),
}
],
'Ec2KeyName': 'spark-cluster',
'KeepJobFlowAliveWhenNoSteps': True,
'TerminationProtected': False,
'Ec2SubnetId': 'subnet-3815fa53',
},
Steps=[],
VisibleToAllUsers=True,
JobFlowRole=config.get('DWH', 'DWH_IAM_EC2_ROLE_NAME'),
ServiceRole=config.get('DWH', 'DWH_IAM_EMR_ROLE_NAME'),
Tags=[
{
'Key': 'tag_name_1',
'Value': 'tab_value_1',
},
{
'Key': 'tag_name_2',
'Value': 'tag_value_2',
},
],
)
print(response['ResponseMetadata']['HTTPStatusCode'])
except Exception as e:
print(e)
def pretty_redshift_properties(props):
pd.set_option('display.max_colwidth', -1)
keys_to_show = ["Id", "Name", "Status", "MasterPublicDnsName"]
x = [(k, v) for k, v in props['Cluster'].items() if k in keys_to_show]
print(pd.DataFrame(data=x, columns=["Key", "Value"]))
def cluster_status():
pretty_redshift_properties(cluster_properties())
def cluster_properties():
config = get_config()
emr = emr_client(config)
return emr.describe_cluster(ClusterId=cluster_id(config, emr))
def cluster_id(config, emr):
clusters = emr.list_clusters(
CreatedAfter=datetime.today(),
)['Clusters']
return [cluster for cluster in clusters if cluster['Name'] == config.get("DWH", "DWH_CLUSTER_IDENTIFIER")][0]['Id']
def create_cluster():
config = get_config()
print('init')
create_emr_role(config)
create_ec2_role(config)
time.sleep(20)
print('ending')
emr_cluster(config)
cluster_status()
def delete_cluster():
config = get_config()
emr = emr_client(config)
cluster = cluster_id(config, emr)
print("Deleting EMR cluster " + str(cluster))
emr.terminate_job_flows(
JobFlowIds=[
cluster,
]
)
# iam = iam_client(config)
# print("Detaching policy...")
# iam.detach_role_policy(RoleName=config.get("DWH", "DWH_IAM_EMR_ROLE_NAME"),
# PolicyArn="arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess")
# print("Deleting role...")
# iam.delete_role(RoleName=config.get("DWH", "DWH_IAM_EMR_ROLE_NAME"), )
def help():
print("1. create_cluster()")
print("2. cluster_status()")
print("4. delete_cluster()")
| 29.886275 | 119 | 0.539168 |
acdef7c1191d666577dd6d7f3a126aa0dfb24abd | 1,564 | py | Python | ams/utilities/target.py | CPFL/AMS | bb685024b1c061e7144dc2ef93e09d6d6c830af8 | [
"Apache-2.0"
] | 26 | 2018-02-16T10:49:19.000Z | 2022-03-23T16:42:48.000Z | ams/utilities/target.py | CPFL/Autoware-Management-System | bb685024b1c061e7144dc2ef93e09d6d6c830af8 | [
"Apache-2.0"
] | 10 | 2018-11-13T08:16:49.000Z | 2019-01-09T04:59:24.000Z | ams/utilities/target.py | CPFL/AMS | bb685024b1c061e7144dc2ef93e09d6d6c830af8 | [
"Apache-2.0"
] | 19 | 2018-03-28T07:38:45.000Z | 2022-01-27T05:18:21.000Z | #!/usr/bin/env python
# coding: utf-8
from ams.structures import TARGET
from ams.structures import Target as Structure
from ams.structures import Targets as Structures
class Target(object):
CONST = TARGET
@staticmethod
def new_target(_id, group):
return Structure.new_data(
id=_id,
group=group
)
@staticmethod
def new_node_target(node):
if isinstance(node, type):
return Target.new_target(None, node.__name__)
else:
return Target.new_target(node.event_loop_id, node.__class__.__name__)
validate_target = Structure.validate_data
get_errors = Structure.get_errors
@staticmethod
def new_targets(targets):
return Structures.new_data(targets)
@staticmethod
def is_same_id(target1, target2):
return None not in [target1, target2] and target1.id == target2.id
@staticmethod
def is_same_group(target1, target2):
return None not in [target1, target2] and target1.group == target2.group
@staticmethod
def is_same(target1, target2):
return Target.is_same_id(target1, target2) and Target.is_same_group(target1, target2)
@staticmethod
def get_same_group_targets_in_targets(group, targets):
return list(filter(lambda x: x is not None and x.group == group, targets))
@staticmethod
def get_code(target):
return TARGET.DELIMITER.join([
target.group if target.group is not None else "",
target.id if target.id is not None else ""
])
| 27.928571 | 93 | 0.672634 |
acdef9d4f49f3d936b2f1777baadb51986ca1a3e | 6,703 | py | Python | cliport/tasks/packing_boxes_pairs.py | wx-b/cliport | c29b0c4b6b1c4e4da5bda6c7f8c718e36f28a6e8 | [
"Apache-2.0"
] | 110 | 2021-09-24T19:48:07.000Z | 2022-03-25T12:14:14.000Z | cliport/tasks/packing_boxes_pairs.py | wx-b/cliport | c29b0c4b6b1c4e4da5bda6c7f8c718e36f28a6e8 | [
"Apache-2.0"
] | 9 | 2021-10-20T19:34:49.000Z | 2022-03-23T15:20:33.000Z | cliport/tasks/packing_boxes_pairs.py | wx-b/cliport | c29b0c4b6b1c4e4da5bda6c7f8c718e36f28a6e8 | [
"Apache-2.0"
] | 14 | 2021-09-25T04:59:22.000Z | 2022-03-08T11:17:12.000Z | """Packing Box Pairs task."""
import os
import numpy as np
from cliport.tasks.task import Task
from cliport.utils import utils
import pybullet as p
class PackingBoxesPairsUnseenColors(Task):
"""Packing Box Pairs task."""
def __init__(self):
super().__init__()
self.max_steps = 20
self.lang_template = "pack all the {colors} blocks into the brown box" # should have called it boxes :(
self.task_completed_desc = "done packing blocks."
# Tight z-bound (0.0525) to discourage stuffing everything into the brown box
self.zone_bounds = np.array([[0.25, 0.75], [-0.5, 0.5], [0, 0.0525]])
def reset(self, env):
super().reset(env)
# Add container box.
zone_size = self.get_random_size(0.05, 0.3, 0.05, 0.3, 0.05, 0.05)
zone_pose = self.get_random_pose(env, zone_size)
container_template = 'container/container-template.urdf'
half = np.float32(zone_size) / 2
replace = {'DIM': zone_size, 'HALF': half}
container_urdf = self.fill_template(container_template, replace)
env.add_object(container_urdf, zone_pose, 'fixed')
if os.path.exists(container_urdf):
os.remove(container_urdf)
margin = 0.01
min_object_dim = 0.05
bboxes = []
class TreeNode:
def __init__(self, parent, children, bbox):
self.parent = parent
self.children = children
self.bbox = bbox # min x, min y, min z, max x, max y, max z
def KDTree(node):
size = node.bbox[3:] - node.bbox[:3]
# Choose which axis to split.
split = size > 2 * min_object_dim
if np.sum(split) == 0:
bboxes.append(node.bbox)
return
split = np.float32(split) / np.sum(split)
split_axis = np.random.choice(range(len(split)), 1, p=split)[0]
# Split along chosen axis and create 2 children
cut_ind = np.random.rand() * \
(size[split_axis] - 2 * min_object_dim) + \
node.bbox[split_axis] + min_object_dim
child1_bbox = node.bbox.copy()
child1_bbox[3 + split_axis] = cut_ind - margin / 2.
child2_bbox = node.bbox.copy()
child2_bbox[split_axis] = cut_ind + margin / 2.
node.children = [
TreeNode(node, [], bbox=child1_bbox),
TreeNode(node, [], bbox=child2_bbox)
]
KDTree(node.children[0])
KDTree(node.children[1])
# Split container space with KD trees.
stack_size = np.array(zone_size)
stack_size[0] -= 0.01
stack_size[1] -= 0.01
root_size = (0.01, 0.01, 0) + tuple(stack_size)
root = TreeNode(None, [], bbox=np.array(root_size))
KDTree(root)
all_color_names = [c for c in self.get_colors()]
relevant_color_names = np.random.choice(all_color_names, min(2, len(bboxes)), replace=False)
distractor_color_names = [c for c in all_color_names if c not in relevant_color_names]
pack_colors = [utils.COLORS[c] for c in relevant_color_names]
distractor_colors = [utils.COLORS[c] for c in distractor_color_names]
# Add objects in container.
object_points = {}
object_ids = []
bboxes = np.array(bboxes)
object_template = 'box/box-template.urdf'
for bbox in bboxes:
size = bbox[3:] - bbox[:3]
position = size / 2. + bbox[:3]
position[0] += -zone_size[0] / 2
position[1] += -zone_size[1] / 2
pose = (position, (0, 0, 0, 1))
pose = utils.multiply(zone_pose, pose)
urdf = self.fill_template(object_template, {'DIM': size})
box_id = env.add_object(urdf, pose)
if os.path.exists(urdf):
os.remove(urdf)
object_ids.append((box_id, (0, None)))
icolor = np.random.choice(range(len(pack_colors)), 1).squeeze()
p.changeVisualShape(box_id, -1, rgbaColor=pack_colors[icolor] + [1])
object_points[box_id] = self.get_box_object_points(box_id)
# Randomly select object in box and save ground truth pose.
object_volumes = []
true_poses = []
for object_id, _ in object_ids:
true_pose = p.getBasePositionAndOrientation(object_id)
object_size = p.getVisualShapeData(object_id)[0][3]
object_volumes.append(np.prod(np.array(object_size) * 100))
pose = self.get_random_pose(env, object_size)
p.resetBasePositionAndOrientation(object_id, pose[0], pose[1])
true_poses.append(true_pose)
# Add distractor objects
num_distractor_objects = 4
distractor_bbox_idxs = np.random.choice(len(bboxes), num_distractor_objects)
for bbox_idx in distractor_bbox_idxs:
bbox = bboxes[bbox_idx]
size = bbox[3:] - bbox[:3]
position = size / 2. + bbox[:3]
position[0] += -zone_size[0] / 2
position[1] += -zone_size[1] / 2
pose = self.get_random_pose(env, size)
urdf = self.fill_template(object_template, {'DIM': size})
box_id = env.add_object(urdf, pose)
if os.path.exists(urdf):
os.remove(urdf)
icolor = np.random.choice(range(len(distractor_colors)), 1).squeeze()
if box_id:
p.changeVisualShape(box_id, -1, rgbaColor=distractor_colors[icolor] + [1])
# Some scenes might contain just one relevant block that fits in the box.
if len(relevant_color_names) > 1:
relevant_desc = f'{relevant_color_names[0]} and {relevant_color_names[1]}'
else:
relevant_desc = f'{relevant_color_names[0]}'
self.goals.append((
object_ids, np.eye(len(object_ids)), true_poses,
False, True, 'zone',
(object_points, [(zone_pose, zone_size)]), 1))
self.lang_goals.append(self.lang_template.format(
colors=relevant_desc,
))
def get_colors(self):
return utils.TRAIN_COLORS if self.mode == 'train' else utils.EVAL_COLORS
class PackingBoxesPairsSeenColors(PackingBoxesPairsUnseenColors):
def __init__(self):
super().__init__()
def get_colors(self):
return utils.TRAIN_COLORS
class PackingBoxesPairsFull(PackingBoxesPairsUnseenColors):
def __init__(self):
super().__init__()
def get_colors(self):
all_colors = list(set(utils.TRAIN_COLORS) | set(utils.EVAL_COLORS))
return all_colors | 38.745665 | 111 | 0.593913 |
acdef9e10b6ffe796b5aecc726a89ef67cba3076 | 547 | py | Python | tests/unittests/log_filtering_functions/sdk_submodule_logging/__init__.py | anandagopal6/azure-functions-python-worker | e4adb351e5454c093fcefbf0fb84f200af32f386 | [
"MIT"
] | 277 | 2018-01-25T23:13:03.000Z | 2022-02-22T06:12:04.000Z | tests/unittests/log_filtering_functions/sdk_submodule_logging/__init__.py | anandagopal6/azure-functions-python-worker | e4adb351e5454c093fcefbf0fb84f200af32f386 | [
"MIT"
] | 731 | 2018-01-18T18:54:38.000Z | 2022-03-29T00:01:46.000Z | tests/unittests/log_filtering_functions/sdk_submodule_logging/__init__.py | YunchuWang/azure-functions-python-worker | 1f23e038a506c6412e4efbf07eb471a6afab0c2a | [
"MIT"
] | 109 | 2018-01-18T02:22:57.000Z | 2022-02-15T18:59:54.000Z | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
import logging
import azure.functions as func
sdk_submodule_logger = logging.getLogger('azure.functions.submodule')
def main(req: func.HttpRequest):
sdk_submodule_logger.info('sdk_submodule_logger info')
sdk_submodule_logger.warning('sdk_submodule_logger warning')
sdk_submodule_logger.debug('sdk_submodule_logger debug')
sdk_submodule_logger.error('sdk_submodule_logger error', exc_info=True)
return 'OK-sdk-submodule-logging'
| 32.176471 | 75 | 0.798903 |
acdefa7db332cdc5a8f7e7855834d1dda842064f | 678 | py | Python | zdpapi_modbus/libs/modbus_tk/defines.py | zhangdapeng520/zdpapi_modbus | 729fe8fef1c1268a2efefdb5020a15eda2bda1ea | [
"MIT"
] | 1 | 2022-02-10T03:16:46.000Z | 2022-02-10T03:16:46.000Z | zdpapi_modbus/libs/modbus_tk/defines.py | zhangdapeng520/zdpapi_modbus | 729fe8fef1c1268a2efefdb5020a15eda2bda1ea | [
"MIT"
] | null | null | null | zdpapi_modbus/libs/modbus_tk/defines.py | zhangdapeng520/zdpapi_modbus | 729fe8fef1c1268a2efefdb5020a15eda2bda1ea | [
"MIT"
] | 1 | 2021-12-21T06:46:40.000Z | 2021-12-21T06:46:40.000Z | # modbus异常码
ILLEGAL_FUNCTION = 1
ILLEGAL_DATA_ADDRESS = 2
ILLEGAL_DATA_VALUE = 3
SLAVE_DEVICE_FAILURE = 4
COMMAND_ACKNOWLEDGE = 5
SLAVE_DEVICE_BUSY = 6
MEMORY_PARITY_ERROR = 8
# 支持的读写func码
# 常用读数据
READ_COILS = 1 # 读线圈
READ_DISCRETE_INPUTS = 2 # 读离散输入
READ_HOLDING_REGISTERS = 3 # 读寄存器
READ_INPUT_REGISTERS = 4 # 读输入寄存器
# 常用写数据
WRITE_SINGLE_COIL = 5 # 写单一线圈
WRITE_SINGLE_REGISTER = 6 # 写单一寄存器
WRITE_MULTIPLE_COILS = 15 # 写多个线圈
WRITE_MULTIPLE_REGISTERS = 16 # 写多寄存器
# 其他
READ_EXCEPTION_STATUS = 7
DIAGNOSTIC = 8
REPORT_SLAVE_ID = 17
READ_WRITE_MULTIPLE_REGISTERS = 23
DEVICE_INFO = 43
# 支持的block类型
COILS = 1
DISCRETE_INPUTS = 2
HOLDING_REGISTERS = 3
ANALOG_INPUTS = 4
| 19.371429 | 38 | 0.778761 |
acdefb2906e2077061f10020da45742457c88a0e | 258 | py | Python | django_deployer/paas_templates/dotcloud/mkadmin.py | natea/django-deployer | 5ce7d972db2f8500ec53ad89e7eb312d3360d074 | [
"MIT"
] | 19 | 2015-02-06T06:14:39.000Z | 2021-01-06T22:27:03.000Z | django_deployer/paas_templates/dotcloud/mkadmin.py | natea/django-deployer | 5ce7d972db2f8500ec53ad89e7eb312d3360d074 | [
"MIT"
] | null | null | null | django_deployer/paas_templates/dotcloud/mkadmin.py | natea/django-deployer | 5ce7d972db2f8500ec53ad89e7eb312d3360d074 | [
"MIT"
] | 2 | 2015-12-22T17:22:15.000Z | 2016-03-02T12:15:01.000Z | #!/usr/bin/env python
from wsgi import *
from django.contrib.auth.models import User
u, created = User.objects.get_or_create(username='admin')
if created:
u.set_password('{{ admin_password }}')
u.is_superuser = True
u.is_staff = True
u.save() | 28.666667 | 57 | 0.705426 |
acdefc29ea23dcb442105017e00fbb7654503c37 | 783 | py | Python | test/linux/ldflags-duplicates/check-ldflags.py | cclauss/GYP | 6a5d2545c6de9db64d6dc98882008666ea6056e8 | [
"BSD-3-Clause"
] | 11 | 2019-07-30T14:15:13.000Z | 2020-08-17T10:03:46.000Z | test/linux/ldflags-duplicates/check-ldflags.py | refack/GYP | 6a5d2545c6de9db64d6dc98882008666ea6056e8 | [
"BSD-3-Clause"
] | 25 | 2018-10-18T15:39:07.000Z | 2019-07-03T00:40:01.000Z | test/linux/ldflags-duplicates/check-ldflags.py | refack/GYP | 6a5d2545c6de9db64d6dc98882008666ea6056e8 | [
"BSD-3-Clause"
] | 15 | 2018-10-18T14:40:18.000Z | 2019-07-02T16:25:55.000Z | #!/usr/bin/env python
# Copyright (c) 2015 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies duplicate ldflags are not removed.
"""
from __future__ import print_function
import sys
def CheckContainsFlags(args, substring):
found = substring in args
if not found:
print('ERROR: Linker arguments "%s" are missing in "%s"' % (substring,
args))
return found
if __name__ == '__main__':
args = " ".join(sys.argv)
print("args = " + args)
if (not CheckContainsFlags(args, 'lib1.a -Wl,--no-whole-archive')
or not CheckContainsFlags(args, 'lib2.a -Wl,--no-whole-archive')):
sys.exit(1)
sys.exit(0)
| 27 | 74 | 0.634738 |
acdefc6a4aa4e6e07d3b332f3dd7a261e047745f | 9,644 | py | Python | core/web/driver/relationOpt.py | Chras-fu/LiuMa-engine | 4d52160d8a5c81d31311ab75d6cf274bfa74269e | [
"MIT"
] | 1 | 2022-03-31T12:29:14.000Z | 2022-03-31T12:29:14.000Z | core/web/driver/relationOpt.py | Chras-fu/LiuMa-engine | 4d52160d8a5c81d31311ab75d6cf274bfa74269e | [
"MIT"
] | null | null | null | core/web/driver/relationOpt.py | Chras-fu/LiuMa-engine | 4d52160d8a5c81d31311ab75d6cf274bfa74269e | [
"MIT"
] | null | null | null | from selenium.common.exceptions import NoSuchElementException
from core.web.driver.operation import Operation
class Relation(Operation):
"""关联类操作"""
def get_page_title(self, save_name):
"""获取页面标题"""
try:
actual = self.driver.title
self.test.debugLog("成功获取title:%s" % str(actual))
except Exception as e:
self.test.errorLog("无法获取title")
raise e
else:
self.test.context[save_name] = actual
def get_page_url(self, save_name):
"""获取页面url"""
try:
actual = self.driver.current_url
self.test.debugLog("成功获取url:%s" % str(actual))
except Exception as e:
self.test.errorLog("无法获取url")
raise e
else:
self.test.context[save_name] = actual
def get_ele_text(self, element, save_name):
"""获取元素文本"""
try:
actual = self.find_element(element).text
self.test.debugLog("成功获取元素text:%s" % str(actual))
except NoSuchElementException as e:
raise e
except Exception as e:
self.test.errorLog("无法获取元素text")
raise e
else:
self.test.context[save_name] = actual
def get_ele_tag(self, element, save_name):
"""获取元素tag"""
try:
actual = self.find_element(element).tag_name
self.test.debugLog("成功获取元素tag name:%s" % str(actual))
except NoSuchElementException as e:
raise e
except Exception as e:
self.test.errorLog("无法获取元素tag name")
raise e
else:
self.test.context[save_name] = actual
def get_ele_size(self, element, save_name):
"""获取元素尺寸"""
try:
actual = self.find_element(element).size
self.test.debugLog("成功获取元素size:%s" % str(actual))
except NoSuchElementException as e:
raise e
except Exception as e:
self.test.errorLog("无法获取元素size")
raise e
else:
self.test.context[save_name] = actual
def get_ele_height(self, element, save_name):
"""获取元素高度"""
try:
actual = self.find_element(element).size.get("height")
self.test.debugLog("成功获取元素height:%s" % str(actual))
except NoSuchElementException as e:
raise e
except Exception as e:
self.test.errorLog("无法获取元素height")
raise e
else:
self.test.context[save_name] = actual
def get_ele_width(self, element, save_name):
"""获取元素宽度"""
try:
actual = self.find_element(element).size.get("width")
self.test.debugLog("成功获取元素width:%s" % str(actual))
except NoSuchElementException as e:
raise e
except Exception as e:
self.test.errorLog("无法获取元素width")
raise e
else:
self.test.context[save_name] = actual
def get_ele_location(self, element, save_name):
"""获取元素位置"""
try:
actual = self.find_element(element).location
self.test.debugLog("成功获取元素location:%s" % str(actual))
except NoSuchElementException as e:
raise e
except Exception as e:
self.test.errorLog("无法获取元素location")
raise e
else:
self.test.context[save_name] = actual
def get_ele_x(self, element, save_name):
"""获取元素X坐标"""
try:
actual = self.find_element(element).location.get("x")
self.test.debugLog("成功获取元素location x:%s" % str(actual))
except NoSuchElementException as e:
raise e
except Exception as e:
self.test.errorLog("无法获取元素location x")
raise e
else:
self.test.context[save_name] = actual
def get_ele_y(self, element, save_name):
"""获取元素Y坐标"""
try:
actual = self.find_element(element).location.get("y")
self.test.debugLog("成功获取元素location y:%s" % str(actual))
except NoSuchElementException as e:
raise e
except Exception as e:
self.test.errorLog("无法获取元素location y")
raise e
else:
self.test.context[save_name] = actual
def get_ele_attribute(self, element, name, save_name):
"""获取元素属性"""
try:
actual = self.find_element(element).get_attribute(name)
self.test.debugLog("成功获取元素attribute:%s" % str(actual))
except NoSuchElementException as e:
raise e
except Exception as e:
self.test.errorLog("无法获取元素attribute")
raise e
else:
self.test.context[save_name] = actual
def get_ele_css(self, element, name, save_name):
"""获取元素css样式"""
try:
actual = self.find_element(element).value_of_css_property(name)
self.test.debugLog("成功获取元素css %s:%s" % (name, str(actual)))
except NoSuchElementException as e:
raise e
except Exception as e:
self.test.errorLog("无法获取元素css %s" % name)
raise e
else:
self.test.context[save_name] = actual
def get_window_position(self, save_name):
"""获取窗口位置"""
try:
actual = self.driver.get_window_position()
self.test.debugLog("成功获取窗口position:%s" % str(actual))
except Exception as e:
self.test.errorLog("无法获取窗口position")
raise e
else:
self.test.context[save_name] = actual
def get_window_x(self, save_name):
"""获取窗口X坐标"""
try:
actual = self.driver.get_window_position().get("x")
self.test.debugLog("成功获取窗口position x:%s" % str(actual))
except Exception as e:
self.test.errorLog("无法获取窗口position x")
raise e
else:
self.test.context[save_name] = actual
def get_window_y(self, save_name):
"""获取窗口Y坐标"""
try:
actual = self.driver.get_window_position().get("y")
self.test.debugLog("成功获取窗口position y:%s" % str(actual))
except Exception as e:
self.test.errorLog("无法获取窗口position y")
raise e
else:
self.test.context[save_name] = actual
def get_window_size(self, save_name):
"""获取窗口大小"""
try:
actual = self.driver.get_window_size()
self.test.debugLog("成功获取窗口size:%s" % str(actual))
except Exception as e:
self.test.errorLog("无法获取窗口size")
raise e
else:
self.test.context[save_name] = actual
def get_window_width(self, save_name):
"""获取窗口宽度"""
try:
actual = self.driver.get_window_size().get("width")
self.test.debugLog("成功获取窗口width:%s" % str(actual))
except Exception as e:
self.test.errorLog("无法获取窗口width")
raise e
else:
self.test.context[save_name] = actual
def get_window_height(self, save_name):
"""获取窗口高度"""
try:
actual = self.driver.get_window_size().get("height")
self.test.debugLog("成功获取窗口height:%s" % str(actual))
except Exception as e:
self.test.errorLog("无法获取窗口height")
raise e
else:
self.test.context[save_name] = actual
def get_current_handle(self, save_name):
"""获取当前窗口句柄"""
try:
actual = self.driver.current_window_handle
self.test.debugLog("成功获取当前窗口handle:%s" % str(actual))
except Exception as e:
self.test.errorLog("无法获取当前窗口handle")
raise e
else:
self.test.context[save_name] = actual
def get_all_handle(self, save_name):
"""获取所有窗口句柄"""
try:
actual = self.driver.window_handles
self.test.debugLog("成功获取所有窗口handle:%s" % str(actual))
except Exception as e:
self.test.errorLog("无法获取所有窗口handle")
raise e
else:
self.test.context[save_name] = actual
def get_cookies(self, save_name):
"""获取cookies"""
try:
actual = self.driver.get_cookies()
self.test.debugLog("成功获取cookies:%s" % str(actual))
except Exception as e:
self.test.errorLog("无法获取cookies")
raise e
else:
self.test.context[save_name] = actual
def get_cookie(self, name, save_name):
"""获取cookie"""
try:
actual = self.driver.get_cookie(name)
self.test.debugLog("成功获取cookie %s:%s" % (name, str(actual)))
except Exception as e:
self.test.errorLog("无法获取cookie:%s" % name)
raise e
else:
self.test.context[save_name] = actual
def custom(self, **kwargs):
"""自定义"""
code = kwargs["code"]
names = locals()
names["element"] = kwargs["element"]
names["data"] = kwargs["data"]
names["driver"] = self.driver
names["test"] = self.test
try:
"""关联操作需要返回被断言的值 以sys_return(value)返回"""
def sys_return(res):
names["_exec_result"] = res
exec(code)
self.test.debugLog("成功执行 %s" % kwargs["trans"])
except NoSuchElementException as e:
raise e
except Exception as e:
self.test.errorLog("无法执行 %s" % kwargs["trans"])
raise e
else:
self.test.context[kwargs["data"]["save_name"]] = names["_exec_result"]
| 33.027397 | 82 | 0.557549 |
acdefdb8dfb9f0c374372c29ddb385bf5875e354 | 1,609 | py | Python | jejuRSScrawler.py | LiveCoronaDetector/Crawler | dc089f2e1e340ce9a3f7fbef1024db2e38cc6344 | [
"MIT"
] | 8 | 2020-08-07T03:23:04.000Z | 2022-03-25T12:01:13.000Z | jejuRSScrawler.py | LiveCoronaDetector/Crawler | dc089f2e1e340ce9a3f7fbef1024db2e38cc6344 | [
"MIT"
] | null | null | null | jejuRSScrawler.py | LiveCoronaDetector/Crawler | dc089f2e1e340ce9a3f7fbef1024db2e38cc6344 | [
"MIT"
] | 7 | 2020-08-11T10:57:31.000Z | 2022-02-01T04:54:40.000Z | """
제주특별자치도 보건서비스 현황 및 브리핑자료
http://www.jeju.go.kr/wel/healthCare/corona/coronaNotice.htm
Author: Eunhak Lee (@return0927)
"""
import re
import requests
from bs4 import BeautifulSoup as Soup
from bs4.element import Tag
from datetime import datetime
# Preferences
url = "http://www.jeju.go.kr/wel/healthCare/corona/coronaNotice.htm?act=rss"
# Model
def parse():
req = requests.get(url)
soup = Soup(req.text, 'html.parser')
title = getattr(soup.find("title"), 'text', 'Empty Title')
description = getattr(soup.find('description'), 'text', 'Empty Description')
items = []
for elem in soup.findAll("item"):
elem_title = getattr(elem.find("title"), 'text', '')
# elem_link = getattr(elem.find("link"), 'text', '') -> TODO: soup load 시 item -> link 가 깨지는 이유 밝히기
elem_link = re.findall(
r'((http|ftp|https)://([\w_-]+(?:(?:\.[\w_-]+)+))([\w.,@?^=%&:/~+#-]*[\w@?^=%&/~+#-])?)',
elem.text)[-1][0]
elem_description = getattr(elem.find("description"), 'text', '')
elem_author = getattr(elem.find("author"), 'text', '')
_bare_date = getattr(elem.find("pubdate"), 'text', '')
elem_pubDate = datetime.strptime(_bare_date, "%a, %d %b %Y %H:%M:%S GMT")
items.append({
"title": elem_title,
"link": elem_link,
"description": elem_description,
"pubDate": elem_pubDate,
"author": elem_author
})
return {
'title': title,
'description': description,
'items': items
}
if __name__ == "__main__":
parse()
| 28.22807 | 107 | 0.576756 |
acdefe9827593516cb4205128eac9456ecc19dcc | 1,343 | py | Python | ENV/lib/python3.5/site-packages/pyrogram/api/types/send_message_cancel_action.py | block1o1/CryptoPredicted | 7f660cdc456fb8252b3125028f31fd6f5a3ceea5 | [
"MIT"
] | 4 | 2021-10-14T21:22:25.000Z | 2022-03-12T19:58:48.000Z | ENV/lib/python3.5/site-packages/pyrogram/api/types/send_message_cancel_action.py | inevolin/CryptoPredicted | 7f660cdc456fb8252b3125028f31fd6f5a3ceea5 | [
"MIT"
] | null | null | null | ENV/lib/python3.5/site-packages/pyrogram/api/types/send_message_cancel_action.py | inevolin/CryptoPredicted | 7f660cdc456fb8252b3125028f31fd6f5a3ceea5 | [
"MIT"
] | 1 | 2022-03-15T22:52:53.000Z | 2022-03-15T22:52:53.000Z | # Pyrogram - Telegram MTProto API Client Library for Python
# Copyright (C) 2017-2018 Dan Tès <https://github.com/delivrance>
#
# This file is part of Pyrogram.
#
# Pyrogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyrogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
from io import BytesIO
from pyrogram.api.core import *
class SendMessageCancelAction(Object):
"""Attributes:
ID: ``0xfd5ec8f5``
No parameters required.
"""
ID = 0xfd5ec8f5
def __init__(self):
pass
@staticmethod
def read(b: BytesIO, *args) -> "SendMessageCancelAction":
# No flags
return SendMessageCancelAction()
def write(self) -> bytes:
b = BytesIO()
b.write(Int(self.ID, False))
# No flags
return b.getvalue()
| 27.408163 | 74 | 0.687267 |
acdefeb2ccf2a7366dca07965b8f5268144be3a6 | 4,527 | py | Python | automatedtest_backend/projectmanagent/models/general.py | zhousheng0325/AutomationPlatform | e7e41e4a26c63fc8151b191bccb0f4a412a3388d | [
"MIT"
] | null | null | null | automatedtest_backend/projectmanagent/models/general.py | zhousheng0325/AutomationPlatform | e7e41e4a26c63fc8151b191bccb0f4a412a3388d | [
"MIT"
] | 10 | 2020-06-06T01:30:29.000Z | 2022-03-12T00:16:52.000Z | automatedtest_backend/projectmanagent/models/general.py | zhousheng0325/AutomationPlatform | e7e41e4a26c63fc8151b191bccb0f4a412a3388d | [
"MIT"
] | null | null | null | from django.db import models
from user.models import ApiUser, User
class ProjectManager(models.Model):
"""
项目管理
"""
level_status = ((1, '低'), (2, '中'), (3, '高'))
name = models.CharField(max_length=20, verbose_name="项目名称",help_text='项目名称')
start_time = models.CharField(max_length=50, verbose_name="项目开始时间",help_text="开始时间")
end_time = models.CharField(max_length=50, verbose_name="项目结束时间",help_text="结束时间")
status = models.BooleanField(default=True,help_text="项目状态 ")
level = models.IntegerField(choices=level_status, default=3, verbose_name="项目优先级",help_text="项目优先级")
desc = models.TextField(max_length=400, verbose_name="项目描述", null=True, blank=True,help_text="项目描述")
person_charge = models.ForeignKey(User, null=True, blank=True, on_delete=models.SET_NULL, verbose_name="项目负责人",help_text="负责人")
is_delete = models.BooleanField(default=False,verbose_name="是否删除",help_text="删除")
def __str__(self):
return self.name
class Meta:
verbose_name = "项目管理"
verbose_name_plural = verbose_name
db_table = "project_manager"
ordering = ["id"]
@property
def get_server_test_env(self):
return self.servers_env.filter(env=1)
@property
def get_server_development_env(self):
return self.servers_env.filter(env=2)
@property
def get_server_preprouduct_env(self):
return self.servers_env.filter(env=3)
@property
def get_server_product_evn(self):
return self.servers_env.filter(env=4)
@property
def get_data_test_env(self):
return self.data_env.filter(env=1)
@property
def get_data_development_env(self):
return self.data_env.filter(env=2)
@property
def get_data_preprouduct_env(self):
return self.data_env.filter(env=3)
@property
def get_data_product_evn(self):
return self.data_env.filter(env=4)
class ProjectServer(models.Model):
"""
服务器
"""
TEST_ENV = 1 # 测试环境
DEVELOPMENT_ENV = 2 # 开发环境
PREPRODUCT_ENV = 3 # 预生产环境
PRODUCT_ENV = 4 # 生产环境
env_choice = [
(TEST_ENV, "测试环境"),
(DEVELOPMENT_ENV, "开发环境"),
(PREPRODUCT_ENV, "预生产环境"),
(PRODUCT_ENV, "生产环境"),
]
project = models.ForeignKey(ProjectManager, on_delete=models.CASCADE, related_name="servers_env", verbose_name="项目",help_text="项目Id")
env = models.IntegerField(choices=env_choice, default=TEST_ENV, verbose_name="环境", help_text="项目环境")
name = models.CharField(max_length=30, verbose_name="服务器名",help_text="服务器名")
ip_url = models.CharField(max_length=50, verbose_name="ip_or_url",help_text="ip_or_url")
desc = models.CharField(max_length=200, verbose_name="环境描述",help_text="环境描述")
custom_variable = models.CharField(max_length=30, verbose_name="自定义变量",help_text="自定义变量")
def __str__(self):
return self.name
class Meta:
verbose_name = "服务器"
verbose_name_plural = verbose_name
db_table = "project_server"
ordering = ["id"]
unique_together = ("project", "env")
class ProjectDataServer(models.Model):
"""
数据库
"""
TEST_ENV = 1 # 测试环境
DEVELOPMENT_ENV = 2 # 开发环境
PREPRODUCT_ENV = 3 # 预生产环境
PRODUCT_ENV = 4 # 生产环境
env_choice = [
(TEST_ENV, "测试环境"),
(DEVELOPMENT_ENV, "开发环境"),
(PREPRODUCT_ENV, "预生产环境"),
(PRODUCT_ENV, "生产环境"),
]
project = models.ForeignKey(ProjectManager, on_delete=models.CASCADE, related_name="data_env", verbose_name="项目",
help_text="项目Id")
env = models.IntegerField(choices=env_choice, default=TEST_ENV, verbose_name="环境", help_text="项目环境")
name = models.CharField(max_length=30, verbose_name="数据库名", blank=True,help_text="数据名")
custom_variable = models.CharField(max_length=30, verbose_name="自定义变量",help_text="自定义变量")
ip = models.GenericIPAddressField(null=True, blank=True, verbose_name="ip",help_text="数据库IP")
port = models.IntegerField(verbose_name="端口",help_text="端口",default=8080)
pwd = models.CharField(max_length=20, verbose_name="密码",help_text="密码")
username = models.CharField(max_length=30,verbose_name="用户名",blank=True,help_text="用户名")
class Meta:
verbose_name = "数据库"
verbose_name_plural = verbose_name
db_table = "project_db"
ordering = ["id"]
unique_together = ("project", "env")
| 38.692308 | 138 | 0.656285 |
acdeff0fde048adbf70af2f99e540fa54cf6c397 | 27,358 | py | Python | NCRFpp/utils/data.py | aghie/parsing-as-pretraining | 652ce965546515787f4d2132c16221f6d36be01a | [
"MIT"
] | 13 | 2020-02-07T12:54:40.000Z | 2021-04-07T09:38:29.000Z | NCRFpp/utils/data.py | aghie/parsing-as-pretraining | 652ce965546515787f4d2132c16221f6d36be01a | [
"MIT"
] | 1 | 2021-10-20T20:25:59.000Z | 2021-10-20T20:25:59.000Z | NCRFpp/utils/data.py | aghie/parsing-as-pretraining | 652ce965546515787f4d2132c16221f6d36be01a | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# @Author: Jie
# @Date: 2017-06-14 17:34:32
# @Last Modified by: Jie Yang, Contact: jieynlp@gmail.com
# @Last Modified time: 2019-01-25 20:25:59
from __future__ import print_function
from __future__ import absolute_import
import sys
from .alphabet import Alphabet
from .functions import *
try:
import cPickle as pickle
except ImportError:
import pickle as pickle
START = "</s>"
UNKNOWN = "</unk>"
PADDING = "</pad>"
class Data:
def __init__(self):
self.sentence_classification = False
self.MAX_SENTENCE_LENGTH = 250
self.MAX_WORD_LENGTH = -1
self.number_normalized = True
self.norm_word_emb = False
self.norm_char_emb = False
self.word_alphabet = Alphabet('word')
self.char_alphabet = Alphabet('character')
self.feature_name = []
self.feature_alphabets = []
self.feature_num = len(self.feature_alphabets)
self.feat_config = None
self.label_alphabet = Alphabet('label',True)
self.tagScheme = "NoSeg" ## BMES/BIO
self.split_token = ' ||| '
self.seg = True
### I/O
self.train_dir = None
self.dev_dir = None
self.test_dir = None
self.raw_dir = None
self.decode_dir = None
self.dset_dir = None ## data vocabulary related file
self.model_dir = None ## model save file
self.load_model_dir = None ## model load file
self.word_emb_dir = None
self.char_emb_dir = None
self.feature_emb_dirs = []
self.train_texts = []
self.dev_texts = []
self.test_texts = []
self.raw_texts = []
self.train_Ids = []
self.dev_Ids = []
self.test_Ids = []
self.raw_Ids = []
self.pretrain_word_embedding = None
self.pretrain_char_embedding = None
self.pretrain_feature_embeddings = []
self.label_size = 0
self.word_alphabet_size = 0
self.char_alphabet_size = 0
self.label_alphabet_size = 0
self.feature_alphabet_sizes = []
self.feature_emb_dims = []
self.norm_feature_embs = []
self.word_emb_dim = 50
self.char_emb_dim = 30
###Networks
self.word_feature_extractor = "LSTM" ## "LSTM"/"CNN"/"GRU"/
self.use_char = True
self.char_feature_extractor = "CNN" ## "LSTM"/"CNN"/"GRU"/None
self.use_crf = True
self.nbest = None
## Training
self.average_batch_loss = False
self.optimizer = "SGD" ## "SGD"/"AdaGrad"/"AdaDelta"/"RMSProp"/"Adam"
self.status = "train"
### Hyperparameters
self.HP_cnn_layer = 4
self.HP_iteration = 100
self.HP_batch_size = 10
self.HP_char_hidden_dim = 50
self.HP_hidden_dim = 200
self.HP_dropout = 0.5
self.HP_lstm_layer = 1
self.HP_bilstm = True
self.HP_gpu = False
self.HP_lr = 0.015
self.HP_lr_decay = 0.05
self.HP_clip = None
self.HP_momentum = 0
self.HP_l2 = 1e-8
#To optimize the models based on the evalb score
self.optimize_with_evalb = False
self.optimize_with_las = False
#Run ELMo embeddings
self.use_elmo = False
self.elmo_size = 1024
self.contextualize = False
self.fine_tune_emb = True
self.use_features=False
def show_data_summary(self):
print("++"*50)
print("DATA SUMMARY START:")
print(" I/O:")
if self.sentence_classification:
print(" Start Sentence Classification task...")
else:
print(" Start Sequence Labeling task...")
print(" Tag scheme: %s"%(self.tagScheme))
print(" Split token: %s"%(self.split_token))
print(" MAX SENTENCE LENGTH: %s"%(self.MAX_SENTENCE_LENGTH))
print(" MAX WORD LENGTH: %s"%(self.MAX_WORD_LENGTH))
print(" Number normalized: %s"%(self.number_normalized))
print(" Word alphabet size: %s"%(self.word_alphabet_size))
print(" Char alphabet size: %s"%(self.char_alphabet_size))
print(" Label alphabet size: %s"%(self.label_alphabet_size))
print(" Word embedding dir: %s"%(self.word_emb_dir))
print(" Char embedding dir: %s"%(self.char_emb_dir))
print(" Word embedding size: %s"%(self.word_emb_dim))
print(" Char embedding size: %s"%(self.char_emb_dim))
print(" Norm word emb: %s"%(self.norm_word_emb))
print(" Norm char emb: %s"%(self.norm_char_emb))
print(" Train file directory: %s"%(self.train_dir))
print(" Dev file directory: %s"%(self.dev_dir))
print(" Test file directory: %s"%(self.test_dir))
print(" Raw file directory: %s"%(self.raw_dir))
print(" Dset file directory: %s"%(self.dset_dir))
print(" Model file directory: %s"%(self.model_dir))
print(" Loadmodel directory: %s"%(self.load_model_dir))
print(" Decode file directory: %s"%(self.decode_dir))
print(" Train instance number: %s"%(len(self.train_texts)))
print(" Dev instance number: %s"%(len(self.dev_texts)))
print(" Test instance number: %s"%(len(self.test_texts)))
print(" Raw instance number: %s"%(len(self.raw_texts)))
if self.use_features:
print(" FEATURE num: %s"%(self.feature_num))
for idx in range(self.feature_num):
print(" Fe: %s alphabet size: %s"%(self.feature_alphabets[idx].name, self.feature_alphabet_sizes[idx]))
print(" Fe: %s embedding dir: %s"%(self.feature_alphabets[idx].name, self.feature_emb_dirs[idx]))
print(" Fe: %s embedding size: %s"%(self.feature_alphabets[idx].name, self.feature_emb_dims[idx]))
print(" Fe: %s norm emb: %s"%(self.feature_alphabets[idx].name, self.norm_feature_embs[idx]))
print(" "+"++"*20)
print(" Model Network:")
print(" Model use_crf: %s"%(self.use_crf))
print(" Model word extractor: %s"%(self.word_feature_extractor))
print(" Model use_char: %s"%(self.use_char))
if self.use_char:
print(" Model char extractor: %s"%(self.char_feature_extractor))
print(" Model char_hidden_dim: %s"%(self.HP_char_hidden_dim))
print(" "+"++"*20)
print(" Training:")
print(" Optimizer: %s"%(self.optimizer))
print(" Iteration: %s"%(self.HP_iteration))
print(" BatchSize: %s"%(self.HP_batch_size))
print(" Average batch loss: %s"%(self.average_batch_loss))
print(" "+"++"*20)
print(" Hyperparameters:")
print(" Hyper lr: %s"%(self.HP_lr))
print(" Hyper lr_decay: %s"%(self.HP_lr_decay))
print(" Hyper HP_clip: %s"%(self.HP_clip))
print(" Hyper momentum: %s"%(self.HP_momentum))
print(" Hyper l2: %s"%(self.HP_l2))
print(" Hyper hidden_dim: %s"%(self.HP_hidden_dim))
print(" Hyper dropout: %s"%(self.HP_dropout))
print(" Hyper lstm_layer: %s"%(self.HP_lstm_layer))
print(" Hyper bilstm: %s"%(self.HP_bilstm))
print(" Hyper GPU: %s"%(self.HP_gpu))
print("DATA SUMMARY END.")
print("++"*50)
print("Contextualizing word embeddings with LSTMs: ", self.contextualize)
sys.stdout.flush()
def initial_feature_alphabets(self):
if self.sentence_classification:
## if sentence classification data format, splited by '\t'
items = open(self.train_dir,'r').readline().strip('\n').split('\t')
else:
## if sequence labeling data format i.e. CoNLL 2003, split by ' '
items = open(self.train_dir,'r').readline().strip('\n').split()
total_column = len(items)
#if total_column > 2 and self.use_features:
if total_column > 2:
for idx in range(1, total_column-1):
feature_prefix = items[idx].split(']',1)[0]+"]"
self.feature_alphabets.append(Alphabet(feature_prefix))
self.feature_name.append(feature_prefix)
self.feature_num = len(self.feature_alphabets)
self.pretrain_feature_embeddings = [None]*self.feature_num
self.feature_emb_dims = [20]*self.feature_num
self.feature_emb_dirs = [None]*self.feature_num
self.norm_feature_embs = [False]*self.feature_num
self.feature_alphabet_sizes = [0]*self.feature_num
if self.feat_config:
for idx in range(self.feature_num):
if self.feature_name[idx] in self.feat_config:
self.feature_emb_dims[idx] = self.feat_config[self.feature_name[idx]]['emb_size']
self.feature_emb_dirs[idx] = self.feat_config[self.feature_name[idx]]['emb_dir']
self.norm_feature_embs[idx] = self.feat_config[self.feature_name[idx]]['emb_norm']
# exit(0)
def build_alphabet(self, input_file):
in_lines = open(input_file,'r').readlines()
for line in in_lines:
if len(line) > 2:
## if sentence classification data format, splited by \t
if self.sentence_classification:
pairs = line.strip().split(self.split_token)
sent = pairs[0]
if sys.version_info[0] < 3:
sent = sent.decode('utf-8')
words = sent.split()
for word in words:
if self.number_normalized:
word = normalize_word(word)
self.word_alphabet.add(word)
for char in word:
self.char_alphabet.add(char)
label = pairs[-1]
self.label_alphabet.add(label)
## build feature alphabet
for idx in range(self.feature_num):
feat_idx = pairs[idx+1].split(']',1)[-1]
self.feature_alphabets[idx].add(feat_idx)
## if sequence labeling data format i.e. CoNLL 2003
else:
pairs = line.strip().split()
word = pairs[0]
if sys.version_info[0] < 3:
word = word.decode('utf-8')
if self.number_normalized:
word = normalize_word(word)
label = pairs[-1]
self.label_alphabet.add(label)
self.word_alphabet.add(word)
## build feature alphabet
for idx in range(self.feature_num):
feat_idx = pairs[idx+1].split(']',1)[-1]
self.feature_alphabets[idx].add(feat_idx)
for char in word:
self.char_alphabet.add(char)
self.word_alphabet_size = self.word_alphabet.size()
self.char_alphabet_size = self.char_alphabet.size()
self.label_alphabet_size = self.label_alphabet.size()
for idx in range(self.feature_num):
self.feature_alphabet_sizes[idx] = self.feature_alphabets[idx].size()
startS = False
startB = False
for label,_ in self.label_alphabet.iteritems():
if "S-" in label.upper():
startS = True
elif "B-" in label.upper():
startB = True
if startB:
if startS:
self.tagScheme = "BMES"
else:
self.tagScheme = "BIO"
if self.sentence_classification:
self.tagScheme = "Not sequence labeling task"
def fix_alphabet(self):
self.word_alphabet.close()
self.char_alphabet.close()
self.label_alphabet.close()
for idx in range(self.feature_num):
self.feature_alphabets[idx].close()
def build_pretrain_emb(self):
if self.word_emb_dir:
print("Load pretrained word embedding, norm: %s, dir: %s"%(self.norm_word_emb, self.word_emb_dir))
self.pretrain_word_embedding, self.word_emb_dim = build_pretrain_embedding(self.word_emb_dir, self.word_alphabet, self.word_emb_dim, self.norm_word_emb)
if self.char_emb_dir:
print("Load pretrained char embedding, norm: %s, dir: %s"%(self.norm_char_emb, self.char_emb_dir))
self.pretrain_char_embedding, self.char_emb_dim = build_pretrain_embedding(self.char_emb_dir, self.char_alphabet, self.char_emb_dim, self.norm_char_emb)
for idx in range(self.feature_num):
if self.feature_emb_dirs[idx]:
print("Load pretrained feature %s embedding:, norm: %s, dir: %s"%(self.feature_name[idx], self.norm_feature_embs[idx], self.feature_emb_dirs[idx]))
self.pretrain_feature_embeddings[idx], self.feature_emb_dims[idx] = build_pretrain_embedding(self.feature_emb_dirs[idx], self.feature_alphabets[idx], self.feature_emb_dims[idx], self.norm_feature_embs[idx])
def generate_instance(self, name):
self.fix_alphabet()
if name == "train":
self.train_texts, self.train_Ids = read_instance(self.train_dir, self.word_alphabet, self.char_alphabet, self.feature_alphabets, self.label_alphabet, self.number_normalized, self.MAX_SENTENCE_LENGTH, self.sentence_classification, self.split_token)
elif name == "dev":
self.dev_texts, self.dev_Ids = read_instance(self.dev_dir, self.word_alphabet, self.char_alphabet, self.feature_alphabets, self.label_alphabet, self.number_normalized, self.MAX_SENTENCE_LENGTH, self.sentence_classification, self.split_token)
elif name == "test":
self.test_texts, self.test_Ids = read_instance(self.test_dir, self.word_alphabet, self.char_alphabet, self.feature_alphabets, self.label_alphabet, self.number_normalized, self.MAX_SENTENCE_LENGTH, self.sentence_classification, self.split_token)
elif name == "raw":
self.raw_texts, self.raw_Ids = read_instance(self.raw_dir, self.word_alphabet, self.char_alphabet, self.feature_alphabets, self.label_alphabet, self.number_normalized, self.MAX_SENTENCE_LENGTH, self.sentence_classification, self.split_token)
else:
print("Error: you can only generate train/dev/test instance! Illegal input:%s"%(name))
def write_decoded_results(self, predict_results, name):
sent_num = len(predict_results)
content_list = []
if name == 'raw':
content_list = self.raw_texts
elif name == 'test':
content_list = self.test_texts
elif name == 'dev':
content_list = self.dev_texts
elif name == 'train':
content_list = self.train_texts
else:
print("Error: illegal name during writing predict result, name should be within train/dev/test/raw !")
assert(sent_num == len(content_list))
fout = open(self.decode_dir,'w')
for idx in range(sent_num):
if self.sentence_classification:
fout.write(" ".join(content_list[idx][0])+"\t"+predict_results[idx]+ '\n')
else:
sent_length = len(predict_results[idx]) #Index 0 to know the length of the input sentence
for idy in range(sent_length):
inputs = []
for id_input in range(len(content_list[idx])-2):
if content_list[idx][id_input][0] != []:
if type(content_list[idx][id_input][idy]) == type([]):
for feature in content_list[idx][id_input][idy]:
inputs.append(feature)
else:
inputs.append(content_list[idx][id_input][idy])
fout.write( "\t".join(inputs) + "\t" + predict_results[idx][idy] + '\n')
fout.write('\n')
print("Predict %s result has been written into file. %s"%(name, self.decode_dir))
def load(self,data_file):
f = open(data_file, 'rb')
tmp_dict = pickle.load(f)
f.close()
self.__dict__.update(tmp_dict)
def save(self,save_file):
f = open(save_file, 'wb')
pickle.dump(self.__dict__, f, 2)
f.close()
def write_nbest_decoded_results(self, predict_results, pred_scores, name):
## predict_results : [whole_sent_num, nbest, each_sent_length]
## pred_scores: [whole_sent_num, nbest]
fout = open(self.decode_dir,'w')
sent_num = len(predict_results)
content_list = []
if name == 'raw':
content_list = self.raw_texts
elif name == 'test':
content_list = self.test_texts
elif name == 'dev':
content_list = self.dev_texts
elif name == 'train':
content_list = self.train_texts
else:
print("Error: illegal name during writing predict result, name should be within train/dev/test/raw !")
assert(sent_num == len(content_list))
assert(sent_num == len(pred_scores))
for idx in range(sent_num):
sent_length = len(predict_results[idx][0])
nbest = len(predict_results[idx])
score_string = "# "
for idz in range(nbest):
score_string += format(pred_scores[idx][idz], '.4f')+" "
fout.write(score_string.strip() + "\n")
for idy in range(sent_length):
try: # Will fail with python3
label_string = content_list[idx][0][idy].encode('utf-8') + " "
except:
label_string = content_list[idx][0][idy] + " "
for idz in range(nbest):
label_string += predict_results[idx][idz][idy]+" "
label_string = label_string.strip() + "\n"
fout.write(label_string)
fout.write('\n')
fout.close()
print("Predict %s %s-best result has been written into file. %s"%(name,nbest, self.decode_dir))
def read_config(self,config_file):
config = config_file_to_dict(config_file)
## read data:
the_item = 'train_dir'
if the_item in config:
self.train_dir = config[the_item]
the_item = 'dev_dir'
if the_item in config:
self.dev_dir = config[the_item]
the_item = 'test_dir'
if the_item in config:
self.test_dir = config[the_item]
the_item = 'raw_dir'
if the_item in config:
self.raw_dir = config[the_item]
the_item = 'decode_dir'
if the_item in config:
self.decode_dir = config[the_item]
the_item = 'dset_dir'
if the_item in config:
self.dset_dir = config[the_item]
the_item = 'model_dir'
if the_item in config:
self.model_dir = config[the_item]
the_item = 'load_model_dir'
if the_item in config:
self.load_model_dir = config[the_item]
the_item = 'word_emb_dir'
if the_item in config:
self.word_emb_dir = config[the_item]
the_item = 'char_emb_dir'
if the_item in config:
self.char_emb_dir = config[the_item]
the_item = 'MAX_SENTENCE_LENGTH'
if the_item in config:
self.MAX_SENTENCE_LENGTH = int(config[the_item])
the_item = 'MAX_WORD_LENGTH'
if the_item in config:
self.MAX_WORD_LENGTH = int(config[the_item])
the_item = 'norm_word_emb'
if the_item in config:
self.norm_word_emb = str2bool(config[the_item])
the_item = 'norm_char_emb'
if the_item in config:
self.norm_char_emb = str2bool(config[the_item])
the_item = 'number_normalized'
if the_item in config:
self.number_normalized = str2bool(config[the_item])
the_item = 'sentence_classification'
if the_item in config:
self.sentence_classification = str2bool(config[the_item])
the_item = 'seg'
if the_item in config:
self.seg = str2bool(config[the_item])
the_item = 'word_emb_dim'
if the_item in config:
self.word_emb_dim = int(config[the_item])
the_item = 'char_emb_dim'
if the_item in config:
self.char_emb_dim = int(config[the_item])
## read network:
the_item = 'use_crf'
if the_item in config:
self.use_crf = str2bool(config[the_item])
the_item = 'use_char'
if the_item in config:
self.use_char = str2bool(config[the_item])
the_item = 'word_seq_feature'
if the_item in config:
self.word_feature_extractor = config[the_item]
the_item = 'char_seq_feature'
if the_item in config:
self.char_feature_extractor = config[the_item]
the_item = 'nbest'
if the_item in config:
self.nbest = int(config[the_item])
the_item = 'feature'
if the_item in config:
self.feat_config = config[the_item] ## feat_config is a dict
## read training setting:
the_item = 'optimizer'
if the_item in config:
self.optimizer = config[the_item]
the_item = 'ave_batch_loss'
if the_item in config:
self.average_batch_loss = str2bool(config[the_item])
the_item = 'status'
if the_item in config:
self.status = config[the_item]
## read Hyperparameters:
the_item = 'cnn_layer'
if the_item in config:
self.HP_cnn_layer = int(config[the_item])
the_item = 'iteration'
if the_item in config:
self.HP_iteration = int(config[the_item])
the_item = 'batch_size'
if the_item in config:
self.HP_batch_size = int(config[the_item])
the_item = 'char_hidden_dim'
if the_item in config:
self.HP_char_hidden_dim = int(config[the_item])
the_item = 'hidden_dim'
if the_item in config:
self.HP_hidden_dim = int(config[the_item])
the_item = 'dropout'
if the_item in config:
self.HP_dropout = float(config[the_item])
the_item = 'lstm_layer'
if the_item in config:
self.HP_lstm_layer = int(config[the_item])
the_item = 'bilstm'
if the_item in config:
self.HP_bilstm = str2bool(config[the_item])
the_item = 'gpu'
if the_item in config:
self.HP_gpu = str2bool(config[the_item])
the_item = 'learning_rate'
if the_item in config:
self.HP_lr = float(config[the_item])
the_item = 'lr_decay'
if the_item in config:
self.HP_lr_decay = float(config[the_item])
the_item = 'clip'
if the_item in config:
self.HP_clip = float(config[the_item])
the_item = 'momentum'
if the_item in config:
self.HP_momentum = float(config[the_item])
the_item = 'l2'
if the_item in config:
self.HP_l2 = float(config[the_item])
## no seg for sentence classification
if self.sentence_classification:
self.seg = False
self.use_crf = False
#Stuff specific for constituency parsing
the_item = 'en2mt'
if the_item in config:
self.en2mt = config[the_item]
the_item = 'evaluate'
if the_item in config:
self.evaluate = config[the_item]
the_item = "evalb"
if the_item in config:
self.evalb = config[the_item]
the_item = "conll_ud"
if the_item in config:
self.conll_ud = config[the_item]
the_item = "gold_dev_trees"
if the_item in config:
self.gold_dev_trees = config[the_item]
the_item = "tree2labels"
if the_item in config:
self.tree2labels = config[the_item]
the_item = "dep2labels"
if the_item in config:
self.dep2labels = config[the_item]
the_item = "optimize_with_evalb"
if the_item in config:
self.optimize_with_evalb = str2bool(config[the_item])
the_item = "optimize_with_las"
if the_item in config:
self.optimize_with_las = str2bool(config[the_item])
the_item = "use_features"
if the_item in config:
self.use_features = str2bool(config[the_item])
the_item = "use_elmo"
if the_item in config:
self.use_elmo = str2bool(config[the_item])
the_item = "contextualize"
if the_item in config:
self.contextualize = str2bool(config[the_item])
the_item = "fine_tune_emb"
if the_item in config:
self.fine_tune_emb = str2bool(config[the_item])
def config_file_to_dict(input_file):
config = {}
fins = open(input_file,'r').readlines()
for line in fins:
if len(line) > 0 and line[0] == "#":
continue
if "=" in line:
pair = line.strip().split('#',1)[0].split('=',1)
item = pair[0]
if item=="feature":
if item not in config:
feat_dict = {}
config[item]= feat_dict
feat_dict = config[item]
new_pair = pair[-1].split()
feat_name = new_pair[0]
one_dict = {}
one_dict["emb_dir"] = None
one_dict["emb_size"] = 10
one_dict["emb_norm"] = False
if len(new_pair) > 1:
for idx in range(1,len(new_pair)):
conf_pair = new_pair[idx].split('=')
if conf_pair[0] == "emb_dir":
one_dict["emb_dir"]=conf_pair[-1]
elif conf_pair[0] == "emb_size":
one_dict["emb_size"]=int(conf_pair[-1])
elif conf_pair[0] == "emb_norm":
one_dict["emb_norm"]=str2bool(conf_pair[-1])
feat_dict[feat_name] = one_dict
# print "feat",feat_dict
else:
if item in config:
print("Warning: duplicated config item found: %s, updated."%(pair[0]))
config[item] = pair[-1]
return config
def str2bool(string):
if string == "True" or string == "true" or string == "TRUE":
return True
else:
return False
| 40.650817 | 259 | 0.567037 |
acdf000354b7d9e7ed2542f7e04573d9c4c9a841 | 3,415 | py | Python | src/spaceone/inventory/connector/aws_secrets_manager_connector/schema/service_type.py | spaceone-dev/plugin-aws-cloud-service-inven-collector | aa252a41940e0941d4b0f7be7fc05d152da654dd | [
"Apache-2.0"
] | null | null | null | src/spaceone/inventory/connector/aws_secrets_manager_connector/schema/service_type.py | spaceone-dev/plugin-aws-cloud-service-inven-collector | aa252a41940e0941d4b0f7be7fc05d152da654dd | [
"Apache-2.0"
] | 1 | 2022-02-10T04:38:11.000Z | 2022-02-10T04:38:11.000Z | src/spaceone/inventory/connector/aws_secrets_manager_connector/schema/service_type.py | spaceone-dev/plugin-aws-cloud-service-inven-collector | aa252a41940e0941d4b0f7be7fc05d152da654dd | [
"Apache-2.0"
] | 1 | 2021-11-15T05:19:44.000Z | 2021-11-15T05:19:44.000Z | import os
from spaceone.inventory.libs.common_parser import *
from spaceone.inventory.libs.schema.dynamic_widget import ChartWidget, CardWidget
from spaceone.inventory.libs.schema.dynamic_field import TextDyField, DateTimeDyField, SearchField, ListDyField
from spaceone.inventory.libs.schema.resource import CloudServiceTypeResource, CloudServiceTypeResponse, \
CloudServiceTypeMeta
current_dir = os.path.abspath(os.path.dirname(__file__))
secret_count_per_region_conf = os.path.join(current_dir, 'widget/secret_count_per_region.yaml')
secret_count_per_account_conf = os.path.join(current_dir, 'widget/secret_count_per_account.yaml')
cst_secret = CloudServiceTypeResource()
cst_secret.name = 'Secret'
cst_secret.provider = 'aws'
cst_secret.group = 'SecretsManager'
cst_secret.labels = ['Security']
cst_secret.is_primary = True
cst_secret.service_code = 'AWSSecretsManager'
cst_secret.tags = {
'spaceone:icon': 'https://spaceone-custom-assets.s3.ap-northeast-2.amazonaws.com/console-assets/icons/cloud-services/aws/AWS-Secrets-Manager.svg',
}
cst_secret._metadata = CloudServiceTypeMeta.set_meta(
fields=[
TextDyField.data_source('Name', 'name'),
TextDyField.data_source('Description', 'data.description'),
DateTimeDyField.data_source('Last Retrieved', 'data.last_accessed_date'),
TextDyField.data_source('ARN', 'data.arn', options={
'is_optional': True
}),
TextDyField.data_source('KMS Key ID', 'data.kms_key_id', options={
'is_optional': True
}),
TextDyField.data_source('Rotation Enabled', 'data.rotation_enabled', options={
'is_optional': True
}),
TextDyField.data_source('Rotation Lambda ARN', 'data.rotation_lambda_arn', options={
'is_optional': True
}),
ListDyField.data_source('Rotation Rule: Automatically After Days', 'data.rotation_rules', options={
'sub_key': 'automatically_after_days',
'delimiter': '<br>',
'is_optional': True
}),
DateTimeDyField.data_source('Last Rotated Date', 'data.last_rotated_date', options={
'is_optional': True
}),
DateTimeDyField.data_source('Last Changed Date', 'data.last_changed_date', options={
'is_optional': True
}),
DateTimeDyField.data_source('Last Accessed Date', 'data.last_accessed_date', options={
'is_optional': True
}),
DateTimeDyField.data_source('Last Deleted Date', 'data.deleted_date', options={
'is_optional': True
}),
TextDyField.data_source('Owning Service', 'data.owning_service', options={
'is_optional': True
})
],
search=[
SearchField.set(name='Name', key='name'),
SearchField.set(name='ARN', key='data.arn'),
SearchField.set(name='Last Changed Time', key='data.last_changed_date', data_type='datetime'),
SearchField.set(name='Last Accessed Time', key='data.last_accessed_date', data_type='datetime'),
SearchField.set(name='Rotation Enabled', key='data.rotation_enabled', data_type='boolean')
],
widget=[
ChartWidget.set(**get_data_from_yaml(secret_count_per_region_conf)),
ChartWidget.set(**get_data_from_yaml(secret_count_per_account_conf)),
]
)
CLOUD_SERVICE_TYPES = [
CloudServiceTypeResponse({'resource': cst_secret}),
]
| 43.782051 | 150 | 0.692533 |
acdf005b75d8402ed630a0d4b3b88f768eb10564 | 2,975 | py | Python | simple_amqp_rpc/gevent.py | rudineirk/py-simple-amqp-rpc | 823b6efe271732495d4e3ccdcb9f4d85138c1d42 | [
"MIT"
] | null | null | null | simple_amqp_rpc/gevent.py | rudineirk/py-simple-amqp-rpc | 823b6efe271732495d4e3ccdcb9f4d85138c1d42 | [
"MIT"
] | 1 | 2021-06-01T22:28:43.000Z | 2021-06-01T22:28:43.000Z | simple_amqp_rpc/gevent.py | rudineirk/py-simple-amqp-rpc | 823b6efe271732495d4e3ccdcb9f4d85138c1d42 | [
"MIT"
] | null | null | null | import traceback
from gevent.event import AsyncResult
from simple_amqp import AmqpMsg, AmqpParameters
from simple_amqp.gevent import GeventAmqpConnection
from simple_amqp_rpc import RpcCall, RpcResp
from simple_amqp_rpc.base import BaseAmqpRpc
from simple_amqp_rpc.consts import (
CALL_ARGS_MISMATCH,
CALL_ERROR,
OK,
RPC_CALL_TIMEOUT
)
class GeventAmqpRpc(BaseAmqpRpc):
def __init__(
self,
conn: GeventAmqpConnection = None,
params: AmqpParameters = None,
route: str='service.name',
call_timeout: int=RPC_CALL_TIMEOUT,
logger=None,
):
super().__init__(
conn=conn,
params=params,
route=route,
call_timeout=call_timeout,
logger=None,
)
self._response_futures = {}
def start(self, auto_reconnect: bool=True):
self.conn.add_stage(self.setup_stage)
self.conn.add_stage(self.listen_stage)
self.conn.start(auto_reconnect)
def stop(self):
self.conn.stop()
def _create_conn(self, params: AmqpParameters):
return GeventAmqpConnection(params)
def recv_call(self, call: RpcCall) -> RpcResp:
self.log_call_recv(call)
method, error = self._get_method(call.service, call.method)
if error:
return error
resp = None
try:
resp = method(*call.args)
except TypeError:
return RpcResp(
status=CALL_ARGS_MISMATCH,
body='Invalid call arguments',
)
except Exception as e:
if not self._recv_error_handlers:
traceback.print_exc()
else:
for handler in self._recv_error_handlers:
handler(e)
return RpcResp(
status=CALL_ERROR,
)
return RpcResp(
status=OK,
body=resp,
)
def _send_call_msg(
self,
reply_id: str,
timeout: int,
msg: AmqpMsg,
route: str,
) -> RpcResp:
self._rpc_call_channel.publish(msg)
future = AsyncResult()
self._response_futures[reply_id] = (future, route)
return future.get(timeout=timeout)
def _on_call_message(self, msg: AmqpMsg) -> bool:
call = self._decode_call(msg)
resp = self.recv_call(call)
resp_msg = self._encode_resp(resp)
resp_msg = resp_msg.replace(
topic=msg.reply_to,
correlation_id=msg.correlation_id,
)
self.conn.publish(self._rpc_call_channel, resp_msg)
return True
def _on_resp_message(self, msg: AmqpMsg):
try:
(future, route) = self._response_futures.pop(msg.correlation_id)
except KeyError:
return True
resp = self._decode_resp(msg, route)
future.set(resp)
return True
| 27.293578 | 76 | 0.585546 |
acdf00be5aa0ac44f86dcc33bd87442d06ec0d4c | 777 | py | Python | env/lib/python3.8/site-packages/ContactsUI/__init__.py | evilcomputer12/macOSAutoJoinAndRecordZoomSession | 2157de803c1d67fe493ff330f1558892507e8e49 | [
"MIT"
] | 2 | 2021-08-11T15:50:12.000Z | 2021-09-03T17:53:47.000Z | env/lib/python3.8/site-packages/ContactsUI/__init__.py | evilcomputer12/macOSAutoJoinAndRecordZoomSession | 2157de803c1d67fe493ff330f1558892507e8e49 | [
"MIT"
] | null | null | null | env/lib/python3.8/site-packages/ContactsUI/__init__.py | evilcomputer12/macOSAutoJoinAndRecordZoomSession | 2157de803c1d67fe493ff330f1558892507e8e49 | [
"MIT"
] | null | null | null | """
Python mapping for the ContactsUI framework.
This module does not contain docstrings for the wrapped code, check Apple's
documentation for details on how to use these functions and classes.
"""
import sys
import AppKit
import Contacts
import objc
from ContactsUI import _metadata
from ContactsUI import _ContactsUI
sys.modules["ContactsUI"] = mod = objc.ObjCLazyModule(
"ContactsUI",
"com.apple.ContactsUI.framework",
objc.pathForFramework("/System/Library/Frameworks/ContactsUI.framework"),
_metadata.__dict__,
None,
{
"__doc__": __doc__,
"objc": objc,
"__path__": __path__,
"__loader__": globals().get("__loader__", None),
},
(_ContactsUI, AppKit, Contacts),
)
del sys.modules["ContactsUI._metadata"]
| 23.545455 | 77 | 0.710425 |
acdf0105c7cc3389c821e212490709e05b9cfbb0 | 374 | py | Python | tests/test_nnb.py | thatch/bace | 218850f80601e63d75a2aaa5fddd8dff2dd0df8a | [
"MIT"
] | 8 | 2017-04-10T07:59:07.000Z | 2018-02-12T15:27:31.000Z | tests/test_nnb.py | thatch/bace | 218850f80601e63d75a2aaa5fddd8dff2dd0df8a | [
"MIT"
] | 4 | 2019-07-02T22:54:03.000Z | 2020-05-21T02:41:53.000Z | tests/test_nnb.py | thatch/bace | 218850f80601e63d75a2aaa5fddd8dff2dd0df8a | [
"MIT"
] | 5 | 2017-04-08T15:03:49.000Z | 2018-06-22T09:02:05.000Z | import unittest
from bace import NegationNB
from tests.data_feed import get_data
class TestCNB(unittest.TestCase):
def test_cnb(self):
cnb = NegationNB()
X_train, y_train, X_test, y_test = get_data()
score = cnb.fit(X_train, y_train).accuracy_score(X_test, y_test)
assert score > 0.80
if __name__ == '__main__':
unittest.main()
| 23.375 | 72 | 0.681818 |
acdf0270fa0d62346ba6c548a1c2e27235273ca4 | 1,056 | py | Python | lib/Progress.py | Hitachi/selinux-te-lookup | d9534536e517b4cfd29ceab2001a06a74a806ec5 | [
"MIT"
] | 2 | 2017-11-14T05:47:22.000Z | 2020-02-29T04:11:58.000Z | lib/Progress.py | Hitachi/selinux-te-lookup | d9534536e517b4cfd29ceab2001a06a74a806ec5 | [
"MIT"
] | null | null | null | lib/Progress.py | Hitachi/selinux-te-lookup | d9534536e517b4cfd29ceab2001a06a74a806ec5 | [
"MIT"
] | null | null | null | # coding: utf-8
# Copyright (c) 2017 Hitachi, Ltd. All Rights Reserved.
#
# Licensed under the MIT License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/MIT
#
# This file is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OF ANY KIND.
import sys
import time
import threading
class Progress(threading.Thread):
def __init__(self):
super(Progress, self).__init__()
self.setDaemon(True)
self.out = sys.stdout
self.time = 0
self.count = 0
self.active = True
def tick(self):
next_time = time.time()
if next_time - self.time > 0.25:
self.time = next_time
self.count = (self.count + 1 ) % 10
if self.count == 0:
self.out.write('\r\033[K')
else:
self.out.write("+")
self.out.flush()
def stop(self):
self.active = False
self.join()
def silent(self):
self.out = open('/dev/null', 'w')
def run(self):
while self.active:
self.tick()
time.sleep(0.1)
self.out.write('\r\033[K')
self.out.flush() | 20.705882 | 55 | 0.614583 |
acdf028fee79f9f2e68bd3ec38a6136c48ca3dd0 | 6,336 | py | Python | nuplan/planning/simulation/observation/smart_agents/idm_agents/idm_policy.py | MCZhi/nuplan-devkit | 3c4f5b8dcd517b27cfd258915ca5fe5c54e3cb0c | [
"Apache-2.0"
] | null | null | null | nuplan/planning/simulation/observation/smart_agents/idm_agents/idm_policy.py | MCZhi/nuplan-devkit | 3c4f5b8dcd517b27cfd258915ca5fe5c54e3cb0c | [
"Apache-2.0"
] | null | null | null | nuplan/planning/simulation/observation/smart_agents/idm_agents/idm_policy.py | MCZhi/nuplan-devkit | 3c4f5b8dcd517b27cfd258915ca5fe5c54e3cb0c | [
"Apache-2.0"
] | null | null | null | from math import sqrt
from typing import Any, List
import numpy as np
from nuplan.planning.simulation.observation.smart_agents.idm_agents.idm_states import IDMAgentState, IDMLeadAgentState
from scipy.integrate import odeint, solve_ivp
class IDMPolicy:
"""
An agent policy that describes the agent's behaviour w.r.t to a lead agent. The policy only controls the
longitudinal states (progress, velocity) of the agent. This longitudinal states are used to propagate the agent
along a given path.
"""
def __init__(self, target_velocity: float,
min_gap_to_lead_agent: float,
headway_time: float,
accel_max: float,
decel_max: float):
"""
Constructor for IDMPolicy
:param target_velocity: Desired velocity in free traffic [m/s]
:param min_gap_to_lead_agent: Minimum relative distance to lead vehicle [m]
:param headway_time: Desired time headway. The minimum possible time to the vehicle in front [s]
:param accel_max: maximum acceleration [m/s^2]
:param decel_max: maximum deceleration (positive value) [m/s^2]
"""
self._target_velocity = target_velocity
self._min_gap_to_lead_agent = min_gap_to_lead_agent
self._headway_time = headway_time
self._accel_max = accel_max
self._decel_max = decel_max
@property
def idm_params(self) -> List[float]:
""" Returns the policy parameters as a list """
return [self._target_velocity,
self._min_gap_to_lead_agent,
self._headway_time,
self._accel_max,
self._decel_max]
@staticmethod
def idm_model(time_points: List[float], state_variables: List[float],
lead_agent: List[float], params: List[float]) -> List[Any]:
"""
Defines the differential equations for IDM.
:param state_variables: vector of the state variables:
state_variables = [x_agent: progress,
v_agent: velocity]
:param time_points: time A sequence of time points for which to solve for the state variables
:param lead_agent: vector of the state variables for the lead vehicle:
lead_agent = [x_lead: progress,
v_lead: velocity,
l_r_lead: half length of the leading vehicle]
:param params:vector of the parameters:
params = [target_velocity: desired velocity in free traffic,
min_gap_to_lead_agent: minimum relative distance to lead vehicle,
headway_time: desired time headway. The minimum possible time to the vehicle in front,
accel_max: maximum acceleration,
decel_max: maximum deceleration (positive value)]
:return: system of differential equations
"""
# state variables
x_agent, v_agent = state_variables
x_lead, v_lead, l_r_lead = lead_agent
# parameters
target_velocity, min_gap_to_lead_agent, headway_time, accel_max, decel_max = params
acceleration_exponent = 4 # Usually set to 4
# convenience definitions
s_star = min_gap_to_lead_agent + v_agent * headway_time + \
(v_agent * (v_agent - v_lead)) / (2 * sqrt(accel_max * decel_max))
s_alpha = max(x_lead - x_agent - l_r_lead, min_gap_to_lead_agent) # clamp to avoid zero division
# differential equations
x_dot = v_agent
v_agent_dot = accel_max * (1 - (v_agent / target_velocity)**acceleration_exponent - (s_star / s_alpha)**2)
return [x_dot, v_agent_dot]
def solve_forward_euler_idm_policy(self, agent: IDMAgentState, lead_agent: IDMLeadAgentState,
sampling_time: float) -> IDMAgentState:
"""
Solves Solves an initial value problem for a system of ODEs using forward euler.
This has the benefit of being differentiable
:param agent: the agent of interest
:param lead_agent: the lead vehicle
:param sampling_time: interval of integration
:return: solution to the differential equations
"""
params = self.idm_params
x_dot, v_agent_dot = self.idm_model([], agent.to_array(), lead_agent.to_array(), params)
return IDMAgentState(
agent.progress + sampling_time * x_dot,
agent.velocity + sampling_time * min(max(-self._decel_max, v_agent_dot), self._accel_max)
)
def solve_odeint_idm_policy(self, agent: IDMAgentState, lead_agent: IDMLeadAgentState,
sampling_time: float, solve_points: int = 10) -> IDMAgentState:
"""
Solves an initial value problem for a system of ODEs using scipy odeint
:param agent: the agent of interest
:param lead_agent: the lead vehicle
:param sampling_time: interval of integration
:param solve_points: number of points for temporal resolution
:return: solution to the differential equations
"""
t = np.linspace(0, sampling_time, solve_points)
solution = odeint(self.idm_model, agent.to_array(), t, args=(
lead_agent.to_array(), self.idm_params,), tfirst=True)
# return the last solution
return IDMAgentState(
solution[-1][0],
solution[-1][1]
)
def solve_ivp_idm_policy(self, agent: IDMAgentState,
lead_agent: IDMLeadAgentState,
sampling_time: float) -> IDMAgentState:
"""
Solves an initial value problem for a system of ODEs using scipy RK45
:param agent: the agent of interest
:param lead_agent: the lead vehicle
:param sampling_time: interval of integration
:return: solution to the differential equations
"""
t = (0, sampling_time)
solution = solve_ivp(self.idm_model, t, agent.to_array(), args=(
lead_agent.to_array(), self.idm_params,), method='RK45')
# return the last solution
return IDMAgentState(
solution.y[0][-1],
solution.y[1][-1]
)
| 41.960265 | 118 | 0.627367 |
acdf034690cca732f0a36a7063826af48186a7b0 | 81 | py | Python | IRs/apps.py | ttrung149/reslife-portal | a00fa917d0e7adfdcfff8fc244dcc5727e97153d | [
"MIT"
] | null | null | null | IRs/apps.py | ttrung149/reslife-portal | a00fa917d0e7adfdcfff8fc244dcc5727e97153d | [
"MIT"
] | 9 | 2020-06-05T21:26:21.000Z | 2022-03-11T23:50:10.000Z | IRs/apps.py | ttrung149/reslife-portal | a00fa917d0e7adfdcfff8fc244dcc5727e97153d | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class IrsConfig(AppConfig):
name = 'IRs'
| 13.5 | 33 | 0.728395 |
acdf039c7e9f8cacf255446c3394c0a6163161fa | 472 | py | Python | MileStone_2/utils/database_connection.py | python-pro/Learn-Python-by-Doing-by-Jose-Espanol | 5a4a6a6843fec6bbe231900aa0053021c69649d9 | [
"BSD-3-Clause"
] | null | null | null | MileStone_2/utils/database_connection.py | python-pro/Learn-Python-by-Doing-by-Jose-Espanol | 5a4a6a6843fec6bbe231900aa0053021c69649d9 | [
"BSD-3-Clause"
] | null | null | null | MileStone_2/utils/database_connection.py | python-pro/Learn-Python-by-Doing-by-Jose-Espanol | 5a4a6a6843fec6bbe231900aa0053021c69649d9 | [
"BSD-3-Clause"
] | null | null | null | # B_R_R
# M_S_A_W
import sqlite3
class DatabaseConnection():
def __init__(self,host):
self.connection=None
self.host=host
def __enter__(self):
self.connection=sqlite3.connect(self.host)
return self.connection
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type or exc_tb or exc_val:
self.connection.close()
else:
self.connection.commit()
self.connection.close() | 19.666667 | 50 | 0.622881 |
acdf04f4c8fcf9403eba54d419d83ff3f9d89e05 | 1,166 | py | Python | Python36ReleaseParty/async/async_iter_ticker.py | t2y/python-study | 52a132ea600d4696164e540d8a8f8f5fc58e097a | [
"Apache-2.0"
] | 18 | 2016-08-15T00:24:44.000Z | 2020-11-30T15:11:52.000Z | Python36ReleaseParty/async/async_iter_ticker.py | t2y/python-study | 52a132ea600d4696164e540d8a8f8f5fc58e097a | [
"Apache-2.0"
] | null | null | null | Python36ReleaseParty/async/async_iter_ticker.py | t2y/python-study | 52a132ea600d4696164e540d8a8f8f5fc58e097a | [
"Apache-2.0"
] | 6 | 2016-09-28T10:47:03.000Z | 2020-10-14T10:20:06.000Z | # -*- coding: utf-8 -*-
"""
PEP 525 -- Asynchronous Generators
the difference of implementation for asynchronous generators
https://www.python.org/dev/peps/pep-0525/#rationale-and-goals
"""
import asyncio
class Ticker:
"""Yield numbers from 0 to `to` every `delay` seconds."""
def __init__(self, delay, to):
self.delay = delay
self.i = 0
self.to = to
def __aiter__(self):
return self
async def __anext__(self):
i = self.i
if i >= self.to:
raise StopAsyncIteration
self.i += 1
if i:
await asyncio.sleep(self.delay)
return i
async def ticker(delay, to):
"""Yield numbers from 0 to `to` every `delay` seconds."""
for i in range(to):
yield i
await asyncio.sleep(delay)
async def run(delay, to):
async for i in Ticker(delay, to):
print(i)
print('-' * 36)
async for i in ticker(delay, to):
print(i)
def main():
event_loop = asyncio.get_event_loop()
try:
event_loop.run_until_complete(run(0.1, 3))
finally:
event_loop.close()
if __name__ == '__main__':
main()
| 19.433333 | 61 | 0.585763 |
acdf05a29a4f5ef925dc3bc07f43e71660782adf | 4,572 | py | Python | octavia-cli/unit_tests/test_entrypoint.py | OTRI-Unipd/OTRI-airbyte | 50eeeb773f75246e86c6e167b0cd7d2dda6efe0d | [
"MIT"
] | 2 | 2022-03-02T13:46:05.000Z | 2022-03-05T12:31:28.000Z | octavia-cli/unit_tests/test_entrypoint.py | OTRI-Unipd/OTRI-airbyte | 50eeeb773f75246e86c6e167b0cd7d2dda6efe0d | [
"MIT"
] | 2 | 2022-03-16T16:17:37.000Z | 2022-03-24T19:00:41.000Z | octavia-cli/unit_tests/test_entrypoint.py | OTRI-Unipd/OTRI-airbyte | 50eeeb773f75246e86c6e167b0cd7d2dda6efe0d | [
"MIT"
] | null | null | null | #
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
import click
import pytest
from click.testing import CliRunner
from octavia_cli import entrypoint
@click.command()
@click.pass_context
def dumb(ctx):
pass
def test_octavia(mocker):
mocker.patch.object(entrypoint, "click")
mocker.patch.object(entrypoint, "get_api_client")
mocker.patch.object(entrypoint, "get_workspace_id", mocker.Mock(return_value="api-defined-workspace-id"))
mocker.patch.object(entrypoint, "check_is_initialized", mocker.Mock(return_value=True))
context_object = {}
entrypoint.octavia.add_command(dumb)
runner = CliRunner()
result = runner.invoke(entrypoint.octavia, ["--airbyte-url", "test-airbyte-url", "dumb"], obj=context_object)
entrypoint.get_api_client.assert_called()
entrypoint.get_workspace_id.assert_called_with(entrypoint.get_api_client.return_value, None)
expected_message = "🐙 - Octavia is targetting your Airbyte instance running at test-airbyte-url on workspace api-defined-workspace-id."
entrypoint.click.style.assert_called_with(expected_message, fg="green")
entrypoint.click.echo.assert_called_with(entrypoint.click.style.return_value)
assert context_object == {
"API_CLIENT": entrypoint.get_api_client.return_value,
"WORKSPACE_ID": entrypoint.get_workspace_id.return_value,
"PROJECT_IS_INITIALIZED": entrypoint.check_is_initialized.return_value,
}
assert result.exit_code == 0
def test_octavia_not_initialized(mocker):
mocker.patch.object(entrypoint, "click")
mocker.patch.object(entrypoint, "get_api_client")
mocker.patch.object(entrypoint, "get_workspace_id", mocker.Mock(return_value="api-defined-workspace-id"))
mocker.patch.object(entrypoint, "check_is_initialized", mocker.Mock(return_value=False))
context_object = {}
entrypoint.octavia.add_command(dumb)
runner = CliRunner()
result = runner.invoke(entrypoint.octavia, ["--airbyte-url", "test-airbyte-url", "dumb"], obj=context_object)
entrypoint.click.style.assert_called_with("🐙 - Project is not yet initialized.", fg="red", bold=True)
entrypoint.click.echo.assert_called_with(entrypoint.click.style.return_value)
assert result.exit_code == 0
def test_get_api_client(mocker):
mocker.patch.object(entrypoint, "airbyte_api_client")
mocker.patch.object(entrypoint, "check_api_health")
api_client = entrypoint.get_api_client("test-url")
entrypoint.airbyte_api_client.Configuration.assert_called_with(host="test-url/api")
entrypoint.airbyte_api_client.ApiClient.assert_called_with(entrypoint.airbyte_api_client.Configuration.return_value)
entrypoint.check_api_health.assert_called_with(entrypoint.airbyte_api_client.ApiClient.return_value)
assert api_client == entrypoint.airbyte_api_client.ApiClient.return_value
def test_get_workspace_id_user_defined(mocker):
mock_api_client = mocker.Mock()
mocker.patch.object(entrypoint, "check_workspace_exists")
mocker.patch.object(entrypoint, "workspace_api")
assert entrypoint.get_workspace_id(mock_api_client, "user-defined-workspace-id") == "user-defined-workspace-id"
entrypoint.check_workspace_exists.assert_called_with(mock_api_client, "user-defined-workspace-id")
def test_get_workspace_id_api_defined(mocker):
mock_api_client = mocker.Mock()
mocker.patch.object(entrypoint, "check_workspace_exists")
mocker.patch.object(entrypoint, "workspace_api")
mock_api_instance = entrypoint.workspace_api.WorkspaceApi.return_value
mock_api_instance.list_workspaces.return_value = mocker.Mock(workspaces=[{"workspaceId": "api-defined-workspace-id"}])
assert entrypoint.get_workspace_id(mock_api_client, None) == "api-defined-workspace-id"
entrypoint.workspace_api.WorkspaceApi.assert_called_with(mock_api_client)
mock_api_instance.list_workspaces.assert_called_with(_check_return_type=False)
def test_commands_in_octavia_group():
octavia_commands = entrypoint.octavia.commands.values()
for command in entrypoint.AVAILABLE_COMMANDS:
assert command in octavia_commands
@pytest.mark.parametrize(
"command",
[entrypoint.apply, entrypoint.create, entrypoint.delete, entrypoint._import],
)
def test_not_implemented_commands(command):
runner = CliRunner()
result = runner.invoke(command)
assert result.exit_code == 1
assert result.output.endswith("not yet implemented.\n")
def test_available_commands():
assert entrypoint.AVAILABLE_COMMANDS == [entrypoint.list_commands._list, entrypoint.init_commands.init]
| 45.267327 | 139 | 0.780184 |
acdf0610a65efd0f889c4f553b4bc57fc67ef916 | 10,250 | py | Python | lale/search/PGO.py | mfeffer/lale | 57b58843c7c14dc2e5658244280f2c1918bf030b | [
"Apache-2.0"
] | 265 | 2019-08-06T14:45:43.000Z | 2022-03-30T23:57:48.000Z | lale/search/PGO.py | mfeffer/lale | 57b58843c7c14dc2e5658244280f2c1918bf030b | [
"Apache-2.0"
] | 467 | 2019-08-08T02:01:21.000Z | 2022-03-25T16:12:00.000Z | lale/search/PGO.py | mfeffer/lale | 57b58843c7c14dc2e5658244280f2c1918bf030b | [
"Apache-2.0"
] | 81 | 2019-08-07T19:59:31.000Z | 2022-03-31T09:11:58.000Z | # Copyright 2019 IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import random
from enum import Enum
from typing import (
Any,
Dict,
Generic,
Iterable,
Iterator,
List,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
cast,
overload,
)
import jsonschema
import numpy as np
Freqs = Dict[str, int]
PGO = Dict[str, Dict[str, Freqs]]
class DefaultValue(Enum):
token = 0
_default_value = DefaultValue.token
Def = TypeVar("Def")
Defaultable = Union[DefaultValue, Def]
XDK = TypeVar("XDK")
XDV = TypeVar("XDV")
def remove_defaults_dict(d: Dict[XDK, Union[DefaultValue, XDV]]) -> Dict[XDK, XDV]:
ret: Dict[XDK, XDV] = {}
for k, v in d.items():
if v is not _default_value:
assert not isinstance(v, DefaultValue)
# not sure why pyright can't figure this out
ret[k] = v # type: ignore
return ret
# utilites to load a pgo from json-ish
def load_pgo_file(filepath) -> PGO:
with open(filepath) as json_file:
json_data = json.load(json_file)
return load_pgo_data(json_data)
def load_pgo_data(json_data) -> PGO:
jsonschema.validate(json_data, _input_schema, jsonschema.Draft4Validator)
norm = normalize_pgo_type(json_data)
return norm
# TODO: Add support for falling back on an underlying distribution
# with some probability
T = TypeVar("T")
class FrequencyDistribution(Generic[T]):
"""Represents the distribution implied by a histogram"""
freq_dist: np.ndarray # Array[T,int]
vals: np.ndarray # Array[T]
cumulative_freqs: np.ndarray # Array[int]
@classmethod
def asIntegerValues(
cls,
freqs: Iterable[Tuple[Any, int]],
inclusive_min: Optional[float] = None,
inclusive_max: Optional[float] = None,
) -> "FrequencyDistribution[int]":
freqs = freqsAsIntegerValues(
freqs, inclusive_min=inclusive_min, inclusive_max=inclusive_max
)
return FrequencyDistribution[int](list(freqs), dtype=int)
@classmethod
def asFloatValues(
cls,
freqs: Iterable[Tuple[Any, int]],
inclusive_min: Optional[float] = None,
inclusive_max: Optional[float] = None,
) -> "FrequencyDistribution[float]":
freqs = freqsAsFloatValues(
freqs, inclusive_min=inclusive_min, inclusive_max=inclusive_max
)
return FrequencyDistribution[float](list(freqs), dtype=float)
@classmethod
def asEnumValues(
cls, freqs: Iterable[Tuple[Any, int]], values: List[Any]
) -> "FrequencyDistribution[Any]":
freqs = freqsAsEnumValues(freqs, values=values)
return FrequencyDistribution[Any](list(freqs), dtype=object)
def __init__(self, freqs: Iterable[Tuple[Defaultable[T], int]], dtype=object):
# we need them to be sorted for locality
sorted_freq_list = sorted(
freqs,
key=(
lambda k: (
k[0] is _default_value,
None if k[0] is _default_value else k[0],
)
),
)
freqs_array = np.array(
sorted_freq_list, dtype=[("value", object), ("frequency", int)]
)
# freqs_array.sort(order='value')
self.freq_dist = freqs_array
self.vals = freqs_array["value"]
self.cumulative_freqs = np.cumsum(freqs_array["frequency"])
def __len__(self) -> int:
return cast(int, np.int_(self.cumulative_freqs[-1]))
@overload
def __getitem__(self, key: int) -> T:
...
@overload
def __getitem__(self, key: Sequence[int]) -> Sequence[T]:
...
@overload
def __getitem__(self, key: slice) -> Sequence[T]:
...
def __getitem__(
self, key: Union[int, Sequence[int], slice]
) -> Union[T, Sequence[T]]:
indices: Sequence[int]
single = False
if isinstance(key, (int, float)):
single = True
indices = [key]
elif isinstance(key, slice):
# TODO: this could be made more efficient
indices = range(key.start or 0, key.stop or len(self), key.step or 1)
else:
indices = key
val_indices = np.searchsorted(self.cumulative_freqs, indices, side="right")
values = self.vals[val_indices].tolist()
if single:
assert len(values) == 1
return values[0]
else:
return values
def sample(self) -> T:
ll = len(self)
i = random.randrange(ll)
return self[i]
def samples(self, count: int) -> Sequence[T]:
ll = len(self)
i: Sequence[int] = [random.randrange(ll) for _ in range(count)]
return self[i]
# utiltities to convert and sample from a PGO frequency distribution
DEFAULT_STR = "default"
def freqsAsIntegerValues(
freqs: Iterable[Tuple[Any, int]],
inclusive_min: Optional[float] = None,
inclusive_max: Optional[float] = None,
) -> Iterator[Tuple[Defaultable[int], int]]:
"""maps the str values to integers, and skips anything that does not look like an integer"""
for v, f in freqs:
try:
if v == DEFAULT_STR:
yield _default_value, f
continue
i = int(v)
if inclusive_min is not None and inclusive_min > i:
continue
if inclusive_max is not None and inclusive_max < i:
continue
yield i, f
except ValueError:
pass
def freqsAsFloatValues(
freqs: Iterable[Tuple[Any, int]],
inclusive_min: Optional[float] = None,
inclusive_max: Optional[float] = None,
) -> Iterator[Tuple[Defaultable[float], int]]:
"""maps the str values to integers, and skips anything that does not look like an integer"""
for v, f in freqs:
try:
if v == DEFAULT_STR:
yield _default_value, f
continue
i = float(v)
if inclusive_min is not None and inclusive_min > i:
continue
if inclusive_max is not None and inclusive_max < i:
continue
yield i, f
except ValueError:
pass
# TODO: we can get a dictionary from freqs (before items() was called)
# and then lookup values in it (since values is likely smaller then freqs)
# or, of course, check which one is smaller and iterate through it
def freqsAsEnumValues(
freqs: Iterable[Tuple[Any, int]], values: List[Any]
) -> Iterator[Tuple[Defaultable[Any], int]]:
"""only keeps things that match the string representation of values in the enumeration.
converts from the string to the value as represented in the enumeration.
"""
def as_str(v) -> str:
"""There are some quirks in how the PGO files
encodes values relative to python's str method
"""
if v is None:
return "none"
elif v is True:
return "true"
elif v is False:
return "false"
else:
return str(v)
value_lookup = {as_str(k): k for k in values}
for v, f in freqs:
if v == DEFAULT_STR:
yield _default_value, f
continue
if v in value_lookup:
yield value_lookup[v], f
_input_type = Dict[str, Dict[str, Union[int, Dict[str, Union[str, int]]]]]
# For now, we skip things of the form
# alg -> {default: number}
# (i.e. without parameters)
def normalize_pgo_type(data: _input_type) -> PGO:
return {
alg: {
param_keys: {
param_values: int(param_counts)
for param_values, param_counts in v2.items()
}
for param_keys, v2 in v1.items()
if isinstance(v2, dict)
}
for alg, v1 in data.items()
}
_input_schema: Any = {
"$schema": "http://json-schema.org/draft-04/schema#",
"description": "Input format for pgo files. Keys are the name of the algorithm",
"type": "object",
"additionalProperties": {
"anyOf": [
{
"description": "Keys are the parameter names",
"type": "object",
"additionalProperties": {
"description": "Keys are value names",
"type": "object",
"additionalProperties": {
"anyOf": [
{
"description": "the number of times this value was found",
"type": "integer",
},
{
"description": "the number of times this value was found",
"type": "string",
},
]
},
},
},
{
"description": "default value for the optimizer",
"type": "object",
"additionalProperties": False,
"required": ["default"],
"properties": {
"default": {
"anyOf": [
{
"description": "the number of times the default was found",
"type": "integer",
},
{
"description": "the number of times the default was found",
"type": "string",
},
]
}
},
},
]
},
}
| 30.147059 | 96 | 0.557463 |
acdf069089f64b6ad46cb3560a6f6c54db797ba5 | 651 | py | Python | algorithms/dynamic_programming/best_sum_tab.py | claytod5/algorithms | 565b7aa3c9a8eff95175e6cd48da9f1cf9a6993f | [
"MIT"
] | null | null | null | algorithms/dynamic_programming/best_sum_tab.py | claytod5/algorithms | 565b7aa3c9a8eff95175e6cd48da9f1cf9a6993f | [
"MIT"
] | null | null | null | algorithms/dynamic_programming/best_sum_tab.py | claytod5/algorithms | 565b7aa3c9a8eff95175e6cd48da9f1cf9a6993f | [
"MIT"
] | 1 | 2021-10-02T19:56:25.000Z | 2021-10-02T19:56:25.000Z | def best_sum_tab(n, a):
table = [None for i in range(n + 1)]
table[0] = []
for i in range(n + 1):
if table[i] is not None:
for j in a:
if (i + j) < len(table):
temp = table[i] + [j]
if table[i + j] is None:
table[i + j] = temp
elif len(temp) < len(table[i + j]):
table[i + j] = temp
return table[n]
if __name__ == "__main__":
print(best_sum_tab(7, [5, 3, 4, 7]))
print(best_sum_tab(8, [2, 3, 5]))
print(best_sum_tab(8, [1, 4, 5]))
print(best_sum_tab(100, [1, 2, 5, 25]))
| 28.304348 | 55 | 0.43318 |
acdf0756d2bbd02c24ed5f99b685a55e9a4edfde | 9,556 | py | Python | lib/datasets/tables_old.py | rremani/faster-rcnn.pytorch | 34744c513f6b02da97c34061d619f8cfbf41c66c | [
"MIT"
] | null | null | null | lib/datasets/tables_old.py | rremani/faster-rcnn.pytorch | 34744c513f6b02da97c34061d619f8cfbf41c66c | [
"MIT"
] | null | null | null | lib/datasets/tables_old.py | rremani/faster-rcnn.pytorch | 34744c513f6b02da97c34061d619f8cfbf41c66c | [
"MIT"
] | null | null | null | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
import os
import errno
from datasets.imdb import imdb
import xml.dom.minidom as minidom
import numpy as np
import scipy.sparse
import scipy.io as sio
import utils.cython_bbox
import cPickle
import subprocess
import uuid
from tables_eval import tables_eval
#from __future__ import print
class tables(imdb):
def __init__(self, image_set, devkit_path):
imdb.__init__(self, image_set)
self._image_set = image_set
self._devkit_path = devkit_path
self._data_path = os.path.join(self._devkit_path, 'data')
self._classes = ('__background__','1')
self._class_to_ind = dict(zip(self.classes, xrange(self.num_classes)))
self._image_ext = ['.jpg', '.png']
self._image_index = self._load_image_set_index()
self._salt = str(uuid.uuid4())
self._comp_id = 'comp4'
# Specific config options
self.config = {'cleanup' : True,
'use_salt' : True,
'top_k' : 2000,
'use_diff' : False,
'rpn_file' : None}
assert os.path.exists(self._devkit_path), \
'Devkit path does not exist: {}'.format(self._devkit_path)
assert os.path.exists(self._data_path), \
'Path does not exist: {}'.format(self._data_path)
def image_path_at(self, i):
"""
Return the absolute path to image i in the image sequence.
"""
return self.image_path_from_index(self._image_index[i])
def image_path_from_index(self, index):
"""
Construct an image path from the image's "index" identifier.
"""
for ext in self._image_ext:
image_path = os.path.join(self._data_path, 'Images',
index + ext)
if os.path.exists(image_path):
break
assert os.path.exists(image_path), \
'Path does not exist: {}'.format(image_path)
return image_path
def _load_image_set_index(self):
"""
Load the indexes listed in this dataset's image set file.
"""
# Example path to image set file:
# self._data_path + /ImageSets/val.txt
image_set_file = os.path.join(self._data_path, 'ImageSets',
self._image_set + '.txt')
assert os.path.exists(image_set_file), \
'Path does not exist: {}'.format(image_set_file)
with open(image_set_file) as f:
image_index = [x.strip() for x in f.readlines()]
return image_index
def gt_roidb(self):
"""
Return the database of ground-truth regions of interest.
This function loads/saves from/to a cache file to speed up future calls.
"""
cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
roidb = cPickle.load(fid)
print ('{} gt roidb loaded from {}'.format(self.name, cache_file))
return roidb
gt_roidb = [self._load_tables_annotation(index)
for index in self.image_index]
with open(cache_file, 'wb') as fid:
cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL)
print ('wrote gt roidb to {}'.format(cache_file))
return gt_roidb
def rpn_roidb(self):
gt_roidb = self.gt_roidb()
rpn_roidb = self._load_rpn_roidb(gt_roidb)
roidb = imdb.merge_roidbs(gt_roidb, rpn_roidb)
#roidb = self._load_rpn_roidb(None)
return roidb
def _load_rpn_roidb(self, gt_roidb):
filename = self.config['rpn_file']
print ("--------------------------------------------",filename)
print ('loading {}'.format(filename))
assert os.path.exists(filename), \
'rpn data not found at: {}'.format(filename)
with open(filename, 'rb') as f:
box_list = cPickle.load(f)
return self.create_roidb_from_box_list(box_list, gt_roidb)
def _load_tables_annotation(self, index):
"""
Load image and bounding boxes info from txt files of invoice.
"""
filename = os.path.join(self._data_path, 'Annotations', index + '.txt')
# print 'Loading: {}'.format(filename)
with open(filename) as f:
data = f.read()
import re
objs=re.findall('\d+;\d+;\d+;\d+;.+', data)
num_objs = len(objs)
boxes = np.zeros((num_objs, 4), dtype=np.uint16)
gt_classes = np.zeros((num_objs), dtype=np.int32)
overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32)
# "Seg" area here is just the box area
seg_areas = np.zeros((num_objs), dtype=np.float32)
# Load object bounding boxes into a data frame.
for ix, obj in enumerate(objs):
# Make pixel indexes 0-based
coor = obj.split(';')
x1 = float(coor[0])
y1 = float(coor[1])
x2 = float(coor[2])
y2 = float(coor[3])
cls = self._class_to_ind[coor[4]]
#print index, ix, coor
boxes[ix, :] = [x1, y1, x2, y2]
gt_classes[ix] = cls
overlaps[ix, cls] = 1.0
seg_areas[ix] = (x2 - x1 + 1) * (y2 - y1 + 1)
overlaps = scipy.sparse.csr_matrix(overlaps)
return {'boxes' : boxes,
'gt_classes': gt_classes,
'gt_overlaps' : overlaps,
'flipped' : False,
'seg_areas' : seg_areas}
def _write_table_results_file(self, all_boxes):
for cls_ind, cls in enumerate(self.classes):
if cls == '__background__':
continue
print ('Writing {} results file'.format(cls))
filename = self._get_table_results_file_template().format(cls)
with open(filename, 'wt') as f:
for im_ind, index in enumerate(self.image_index):
dets = all_boxes[cls_ind][im_ind]
if dets == []:
continue
# the VOCdevkit expects 1-based indices
for k in xrange(dets.shape[0]):
f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'.
format(index, dets[k, -1],
dets[k, 0] + 1, dets[k, 1] + 1,
dets[k, 2] + 1, dets[k, 3] + 1))
def evaluate_detections(self, all_boxes, output_dir):
self._write_table_results_file(all_boxes)
self._do_python_eval(output_dir)
if self.config['cleanup']:
for cls in self._classes:
if cls == '__background__':
continue
filename = self._get_table_results_file_template().format(cls)
os.remove(filename)
def _get_comp_id(self):
comp_id = (self._comp_id + '_' + self._salt if self.config['use_salt']
else self._comp_id)
return comp_id
def _get_table_results_file_template(self):
# mmidevkit/results/comp4-44503_det_test_{%s}.txt
filename = self._get_comp_id() + '_det_' + self._image_set + '_{:s}.txt'
try:
os.mkdir(self._devkit_path + '/results')
except OSError as e:
if e.errno == errno.EEXIST:
pass
else:
raise e
path = os.path.join(
self._devkit_path,
'results',
filename)
return path
def _do_python_eval(self, output_dir = 'output'):
annopath = os.path.join(
self._data_path,
'Annotations',
'{:s}.txt')
imagesetfile = os.path.join(
self._data_path,
'ImageSets',
self._image_set + '.txt')
cachedir = os.path.join(self._devkit_path, 'annotations_cache')
aps = []
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
for i, cls in enumerate(self._classes):
if cls == '__background__':
continue
filename = self._get_table_results_file_template().format(cls)
rec, prec, ap = tables_eval(
filename, annopath, imagesetfile, cls, cachedir, ovthresh=0.5)
aps += [ap]
print('AP for {} = {:.4f}'.format(cls, ap))
with open(os.path.join(output_dir, cls + '_pr.pkl'), 'w') as f:
cPickle.dump({'rec': rec, 'prec': prec, 'ap': ap}, f)
print('Mean AP = {:.4f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('Results:')
for ap in aps:
print('{:.3f}'.format(ap))
print('{:.3f}'.format(np.mean(aps)))
print('~~~~~~~~')
print('')
print('--------------------------------------------------------------')
print('Results computed with the **unofficial** Python eval code.')
print('Results should be very close to the official MATLAB eval code.')
print('Recompute with `./tools/reval.py --matlab ...` for your paper.')
print('-- Thanks, The Management')
print('--------------------------------------------------------------')
| 38.224 | 80 | 0.536626 |
acdf07afab17252963f9121d1e3711a6834e55ab | 672 | py | Python | simple_rl/apmdp/LTLGridWorldStateClass.py | yoonseon-oh/simple_rl | a6b9056ac05c29a259d616f2b289e8db2d984314 | [
"Apache-2.0"
] | 2 | 2020-02-13T22:37:31.000Z | 2021-02-03T02:35:42.000Z | simple_rl/apmdp/LTLGridWorldStateClass.py | yoonseon-oh/simple_rl | a6b9056ac05c29a259d616f2b289e8db2d984314 | [
"Apache-2.0"
] | null | null | null | simple_rl/apmdp/LTLGridWorldStateClass.py | yoonseon-oh/simple_rl | a6b9056ac05c29a259d616f2b289e8db2d984314 | [
"Apache-2.0"
] | null | null | null | from simple_rl.tasks.grid_world.GridWorldStateClass import GridWorldState
class LTLGridWorldState(GridWorldState):
''' Class for Grid World States when LTL task is given'''
def __init__(self, x, y, q):
GridWorldState.__init__(self,x,y)
self.q = q
self.data.append(q)
def __hash__(self):
return hash(tuple(self.data))
def __str__(self):
return "s: (" + str(self.x) + "," + str(self.y) + "," + str(self.q) + ")"
def __repr__(self):
return self.__str__()
def __eq__(self, other):
return isinstance(other, LTLGridWorldState) and self.x == other.x and self.y == other.y and self.q == other.q
| 32 | 117 | 0.630952 |
acdf0883ebcb1ba7078d8cb8642ce198a67ce02a | 1,534 | py | Python | k_closest_points.py | pranavdave893/Leetcode | 1f30ea37af7b60585d168b15d9397143f53c92a1 | [
"MIT"
] | null | null | null | k_closest_points.py | pranavdave893/Leetcode | 1f30ea37af7b60585d168b15d9397143f53c92a1 | [
"MIT"
] | null | null | null | k_closest_points.py | pranavdave893/Leetcode | 1f30ea37af7b60585d168b15d9397143f53c92a1 | [
"MIT"
] | null | null | null | from math import sqrt
from heapq import heappush, heappop, heappushpop
class Solution(object):
def kClosest(self, points, K):
"""
:type points: List[List[int]]
:type K: int
:rtype: List[List[int]]
"""
def stupid_python(points, K):
dct = {}
answer = []
for point in points:
dist = sqrt((0-point[0]) * (0-point[0]) + (0 - point[1]) * (0-point[1]))
if dist not in dct:
dct[dist] = [point]
else:
dct[dist].append(point)
count = 0
for value in sorted(dct.items(), key=lambda x:x[0]):
for points in value[1]:
if count == K:
break
answer.append(points)
count += 1
print(answer)
def heap_python(points, K):
distance = []
for (x,y) in points:
dist = -(x*x + y*y)
if len(distance) == K:
heappushpop(distance, (dist, x, y))
else:
heappush(distance, (dist, x, y))
print [[x,y] for (dist, x, y) in distance]
stupid_python(points, K)
heap_python(points, K)
ip = [[1,3],[-2,2]]
k1 = 1
ip_2 = [[3,3],[5,-1],[-2,4]]
k2 = 2
ip3 = [[1,0],[0,1]]
k3 = 2
abc = Solution()
abc.kClosest(ip, k1)
abc.kClosest(ip_2, k2)
abc.kClosest(ip3, k3) | 26.912281 | 88 | 0.426336 |
acdf08c3ca1dbed8d63e2d6a7a9159077b5e641e | 184 | py | Python | abstract-factory/AbstractFactory/autos/ford/fiesta.py | Tomvictor/python-design-patterns | 6b99607d721bbe03d26a0a451a10e88cd1c1d112 | [
"MIT"
] | null | null | null | abstract-factory/AbstractFactory/autos/ford/fiesta.py | Tomvictor/python-design-patterns | 6b99607d721bbe03d26a0a451a10e88cd1c1d112 | [
"MIT"
] | null | null | null | abstract-factory/AbstractFactory/autos/ford/fiesta.py | Tomvictor/python-design-patterns | 6b99607d721bbe03d26a0a451a10e88cd1c1d112 | [
"MIT"
] | null | null | null | from autos.abs_auto import AbsAuto
class FordFiesta(AbsAuto):
def start(self):
print('Ford Fiesta running cheaply.')
def stop(self):
print('Ford Fiestashutting down.')
| 20.444444 | 40 | 0.711957 |
acdf09fb67299b2a4d7753647937cced709f649f | 1,225 | py | Python | imouto/magicroute.py | bakalab/imouto | 01944746d4f7530a741bcb082866e18c48d07f3a | [
"BSD-3-Clause"
] | 9 | 2017-06-18T06:03:00.000Z | 2019-05-07T10:06:22.000Z | imouto/magicroute.py | bakalab/imouto | 01944746d4f7530a741bcb082866e18c48d07f3a | [
"BSD-3-Clause"
] | 3 | 2017-08-05T08:01:42.000Z | 2017-12-08T01:58:33.000Z | imouto/magicroute.py | bakalab/imouto | 01944746d4f7530a741bcb082866e18c48d07f3a | [
"BSD-3-Clause"
] | null | null | null | from imouto.web import Application
from imouto.route import URLSpec
class MagicRoute:
__slots__ = ['_magic_route', 'get', 'post', 'head', 'put',
'patch', 'trace', 'options', 'connect', 'delete']
class Route:
def __init__(self, method, path):
self.method = method
self.path = path
def __gt__(self, handler):
# TODO refactoring the ugly code, try to replace the Slot
app = Application()
if self.path in app._handlers:
setattr(app._handlers[self.path].handler_class,
self.method.lower(), handler)
else:
obj = MagicRoute()
setattr(obj, '_magic_route', True)
setattr(obj, self.method.lower(), handler)
# Application is singleton
app._handlers[self.path] = URLSpec(self.path, obj)
class HTTPMethod(type):
def __truediv__(self, path):
return Route(self.__name__, path)
class GET(metaclass=HTTPMethod):
pass
class POST(metaclass=HTTPMethod):
pass
class HEAD(metaclass=HTTPMethod):
pass
class OPTIONS(metaclass=HTTPMethod):
pass
class DELETE(metaclass=HTTPMethod):
pass
class PUT(metaclass=HTTPMethod):
pass
| 21.12069 | 66 | 0.623673 |
acdf0a47256e6400d98c48ea82d2f8419ec45bba | 3,034 | py | Python | profiles_api/views.py | vikasagarwal2485/profiles-rest-api | eb7c9c8d856b0c780c238871704c0b2ffbb5ed78 | [
"MIT"
] | null | null | null | profiles_api/views.py | vikasagarwal2485/profiles-rest-api | eb7c9c8d856b0c780c238871704c0b2ffbb5ed78 | [
"MIT"
] | null | null | null | profiles_api/views.py | vikasagarwal2485/profiles-rest-api | eb7c9c8d856b0c780c238871704c0b2ffbb5ed78 | [
"MIT"
] | null | null | null | from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import viewsets
from profiles_api import serializers
class HelloApiView(APIView):
"""Test API View"""
serializer_class = serializers.HelloSerializer
def get(self, request, format=None):
"""Returns a list of APIView features"""
an_apiview = [
'Uses HTTP methods as function (get, post, patch, put, delete)',
'Is similar to a traditional Django View',
'Gives you the most control over you application logic',
]
return Response({'message': 'Hello!', 'an_apiview': an_apiview})
def post(self, request):
"""Create a hello message with our name"""
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get('name')
message = f'Hello {name}'
return Response({'message': message})
else:
return Response(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
def put(self, request, pk=None):
"""Handle updating an object"""
return Response({'method': 'PUT'})
def patch(self, request, pk=None):
"""Handle a partial update of an object"""
return Response({'method': 'PATCH'})
def delete(self, request, pk=None):
"""Delete an object"""
return Response({'method': 'DELETE'})
class HelloViewSet(viewsets.ViewSet):
"""Test API ViewSet"""
serializer_class = serializers.HelloSerializer
def list(self, request):
"""Return a hello message."""
a_viewset = [
'Uses actions (list, create, retrieve, update, partial_update)',
'Automatically maps to URLS using Routers',
'Provides more functionality with less code',
]
return Response({'message': 'Hello!', 'a_viewset': a_viewset})
def create(self, request):
"""Create a new hello message."""
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get('name')
message = f'Hello {name}!'
return Response({'message': message})
else:
return Response(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
def retrieve(self, request, pk=None):
"""Handle getting an object by its ID"""
return Response({'http_method': 'GET'})
def update(self, request, pk=None):
"""Handle updating an object"""
return Response({'http_method': 'PUT'})
def partial_update(self, request, pk=None):
"""Handle updating part of an object"""
return Response({'http_method': 'PATCH'})
def destroy(self, request, pk=None):
"""Handle removing an object"""
return Response({'http_method': 'DELETE'})
| 31.936842 | 76 | 0.60679 |
acdf0a8ac6a9f686c4e6f8306e30e973b2fb2e77 | 5,579 | py | Python | test_jacobi_diis.py | robertodr/solver | dced957cb3f2aa8c1f3ab085d8445c6dd7d3d284 | [
"MIT"
] | 2 | 2018-11-09T17:03:09.000Z | 2018-11-19T10:30:04.000Z | test_jacobi_diis.py | robertodr/solver | dced957cb3f2aa8c1f3ab085d8445c6dd7d3d284 | [
"MIT"
] | null | null | null | test_jacobi_diis.py | robertodr/solver | dced957cb3f2aa8c1f3ab085d8445c6dd7d3d284 | [
"MIT"
] | null | null | null | from functools import partial
from operator import ge, le
from typing import Callable, Dict, List, Tuple
import numpy as np
import zarr
from fixpoint import *
from numpy import linalg as LA
from utils import *
def jacobi_diis(A,
b,
rtol=1.0e-8,
etol=1.0e-8,
max_it=25,
max_diis_hist=8,
x_0=None):
if x_0 is None:
x = np.zeros_like(b)
else:
x = x_0
# DIIS extrapolation
diis = DIIS(max_history=8)
it = 0
# Compute pseudoenergy
E_old = 0.0
E_new = quadratic_form(A, b, x)
DeltaE = E_new - E_old
rnorm = compute_residual_norm(A, b, x)
print('Iteration # Residual norm Delta E')
# Report at start
print(' {:4d} {:.5E} {:.5E}'.format(
it, rnorm, abs(DeltaE)))
while it < max_it:
# Update solution vector
x = jacobi_step(A, b, x)
# Compute residual
r = compute_residual(A, b, x)
# Collect DIIS history
diis.append(x, r)
extrapolated = False
if it >= 1:
x = diis.extrapolate()
extrapolated = True
r = compute_residual(A, b, x)
# Compute residual norm
rnorm = LA.norm(r)
# Compute new pseudoenergy
E_new = quadratic_form(A, b, x)
DeltaE = E_new - E_old
E_old = E_new
it += 1
# Check convergence
if rnorm < rtol and abs(DeltaE) < etol:
# Print last statistics before breaking out
print(' {:4d} {:.5E} {:.5E} {:s}'.format(
it, rnorm, abs(DeltaE), 'DIIS' if extrapolated else ''))
break
# Report
print(' {:4d} {:.5E} {:.5E} {:s}'.format(
it, rnorm, abs(DeltaE), 'DIIS' if extrapolated else ''))
else:
raise RuntimeError(
'Maximum number of iterations ({0:d}) exceeded, but residual norm {1:.5E} still greater than threshold {2:.5E}'.
format(max_it, rnorm, rtol))
return x
def jacobi_step(A, b, x):
D = np.diag(A)
O = A - np.diagflat(D)
return (b - np.einsum('ij,j->i', O, x)) / D
def stepper(A, b, diis, iterate: Iterate):
# Update vector and statistics
x_new = jacobi_step(A, b, iterate['x'])
r_new = compute_residual(A, b, x_new)
diis.append(x_new, r_new)
if iterate['iteration counter'] >= 1:
x_new = diis.extrapolate()
r_new = compute_residual(A, b, x_new)
E_new = quadratic_form(A, b, x_new)
rnorm = LA.norm(r_new)
xdiffnorm = LA.norm(x_new - iterate['x'])
denergy = abs(E_new - iterate['E'])
# In-place update of dictionary
iterate['iteration counter'] += 1
iterate['x'] = x_new
iterate['E'] = E_new
iterate['2-norm of residual'] = rnorm
iterate['2-norm of error'] = xdiffnorm
iterate['absolute pseudoenergy difference'] = denergy
def checkpointer(iterate: Iterate):
zarr.save('data/jacobi.zarr', iterate['x'])
def main():
print('Experiments with linear solvers')
dim = 50
M = np.random.randn(dim, dim)
# Make sure our matrix is SPD
A = 0.5 * (M + M.transpose())
A = A * A.transpose()
A += dim * np.eye(dim)
b = np.random.rand(dim)
x_ref = LA.solve(A, b)
# DIIS extrapolation
diis = DIIS(max_history=8)
# Jacobi method
print('Jacobi-DIIS algorithm')
x_jacobi = jacobi_diis(A, b, rtol=1.0e-4, etol=1.0e-5, max_it=25)
print('Jacobi relative error to reference {:.5E}\n'.format(
relative_error_to_reference(x_jacobi, x_ref)))
# Jacobi method, with iterator
it_count = Stat(
'# it.',
'{:d}',
kind='failure',
criterion=Criterion(
threshold=25,
comparison=ge,
message='Maximum number of iterations ({threshold:d}) exceeded'))
rnorm = Stat(
'||r||_2',
'{:.5E}',
kind='success',
criterion=Criterion(
threshold=1.0e-4,
comparison=le,
message='2-norm of residual below threshold {threshold:.1E}'))
denergy = Stat(
'abs(Delta E)',
'{:.5E}',
kind='success',
criterion=Criterion(
threshold=1.0e-5,
comparison=le,
message='Pseudoenergy variation below threshold {threshold:.1E}'))
xdiffnorm = Stat(
'||x_new - x_old||_2',
'{:.5E}',
kind='success',
criterion=Criterion(
threshold=1.0e-4,
comparison=le,
message='2-norm of error below threshold {threshold:.1E}'))
energy = Stat('E', '{:.5E}', kind='report')
stats = {
'2-norm of residual': rnorm,
'absolute pseudoenergy difference': denergy,
'2-norm of error': xdiffnorm,
'E': energy,
'iteration counter': it_count
}
x_0 = np.zeros_like(b)
guess = Iterate({
'x': x_0,
'E': quadratic_form(A, b, x_0),
'2-norm of residual': compute_residual_norm(A, b, x_0),
'absolute pseudoenergy difference': 0.0,
'2-norm of error': 0.0
})
jacobi_loose = IterativeSolver(
partial(stepper, A, b, diis), guess, stats, RuntimeError, checkpointer)
for _ in jacobi_loose:
pass
print('\njacobi_loose.niterations ', jacobi_loose.niterations)
print('Jacobi relative error to reference {:.5E}\n'.format(
relative_error_to_reference(jacobi_loose.iterate['x'], x_ref)))
if __name__ == '__main__':
main()
| 28.176768 | 124 | 0.555834 |
acdf0a94dd2b0cb4178bf54679e4addf0e6fe037 | 829 | py | Python | fizzbuzz.py | kdabuhanna/learning_python | ad897620371e16e00cf1db08f71a07aebc8decdf | [
"MIT"
] | null | null | null | fizzbuzz.py | kdabuhanna/learning_python | ad897620371e16e00cf1db08f71a07aebc8decdf | [
"MIT"
] | null | null | null | fizzbuzz.py | kdabuhanna/learning_python | ad897620371e16e00cf1db08f71a07aebc8decdf | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Write a program that prints the numbers from 1 to 100
# For multiples of 3 print “Fizz” instead of the number
# For the multiples of 5 print “Buzz”
# For numbers which are multiples of both 3 and 5 print “FizzBuzz”.
for i in range(1,101):
if i % 15 ==0:
print("Fizzbuzz")
elif i % 5 ==0:
print("Buzz")
elif i % 3 ==0:
print("Fizz")
else:
print(i)
"""
python3 fizzbuzz.py
1
2
Fizz
4
Buzz
Fizz
7
8
Fizz
Buzz
11
Fizz
13
14
FizzBuzz
16
17
Fizz
19
Buzz
Fizz
22
23
Fizz
Buzz
26
Fizz
28
29
FizzBuzz
31
32
Fizz
34
Buzz
Fizz
37
38
Fizz
Buzz
41
Fizz
43
44
FizzBuzz
46
47
Fizz
49
Buzz
Fizz
52
53
Fizz
Buzz
56
Fizz
58
59
FizzBuzz
61
62
Fizz
64
Buzz
Fizz
67
68
Fizz
Buzz
71
Fizz
73
74
FizzBuzz
76
77
Fizz
79
Buzz
Fizz
82
83
Fizz
Buzz
86
Fizz
88
89
FizzBuzz
91
92
Fizz
94
Buzz
Fizz
97
98
Fizz
Buzz
"""
| 6.908333 | 67 | 0.705669 |
acdf0ae30fa110ada0e0b6f896d0a32a0d7a77a5 | 5,213 | py | Python | demisto_client/demisto_api/models/audit_result.py | guytest/demisto-py | 8ca4f56a6177668151b5656cbe675a377003c0e9 | [
"Apache-2.0"
] | 59 | 2017-05-04T05:48:00.000Z | 2022-02-27T21:06:01.000Z | demisto_client/demisto_api/models/audit_result.py | guytest/demisto-py | 8ca4f56a6177668151b5656cbe675a377003c0e9 | [
"Apache-2.0"
] | 44 | 2017-05-09T17:42:43.000Z | 2022-03-30T05:55:44.000Z | demisto_client/demisto_api/models/audit_result.py | guytest/demisto-py | 8ca4f56a6177668151b5656cbe675a377003c0e9 | [
"Apache-2.0"
] | 37 | 2017-05-06T04:30:32.000Z | 2022-02-15T04:59:00.000Z | # coding: utf-8
"""
Demisto API
This is the public REST API to integrate with the demisto server. HTTP request can be sent using any HTTP-client. For an example dedicated client take a look at: https://github.com/demisto/demisto-py. Requests must include API-key that can be generated in the Demisto web client under 'Settings' -> 'Integrations' -> 'API keys' Optimistic Locking and Versioning\\: When using Demisto REST API, you will need to make sure to work on the latest version of the item (incident, entry, etc.), otherwise, you will get a DB version error (which not allow you to override a newer item). In addition, you can pass 'version\\: -1' to force data override (make sure that other users data might be lost). Assume that Alice and Bob both read the same data from Demisto server, then they both changed the data, and then both tried to write the new versions back to the server. Whose changes should be saved? Alice’s? Bob’s? To solve this, each data item in Demisto has a numeric incremental version. If Alice saved an item with version 4 and Bob trying to save the same item with version 3, Demisto will rollback Bob request and returns a DB version conflict error. Bob will need to get the latest item and work on it so Alice work will not get lost. Example request using 'curl'\\: ``` curl 'https://hostname:443/incidents/search' -H 'content-type: application/json' -H 'accept: application/json' -H 'Authorization: <API Key goes here>' --data-binary '{\"filter\":{\"query\":\"-status:closed -category:job\",\"period\":{\"by\":\"day\",\"fromValue\":7}}}' --compressed ``` # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from demisto_client.demisto_api.models.audit import Audit # noqa: F401,E501
class AuditResult(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'audits': 'list[Audit]',
'total': 'int'
}
attribute_map = {
'audits': 'audits',
'total': 'total'
}
def __init__(self, audits=None, total=None): # noqa: E501
"""AuditResult - a model defined in Swagger""" # noqa: E501
self._audits = None
self._total = None
self.discriminator = None
if audits is not None:
self.audits = audits
if total is not None:
self.total = total
@property
def audits(self):
"""Gets the audits of this AuditResult. # noqa: E501
:return: The audits of this AuditResult. # noqa: E501
:rtype: list[Audit]
"""
return self._audits
@audits.setter
def audits(self, audits):
"""Sets the audits of this AuditResult.
:param audits: The audits of this AuditResult. # noqa: E501
:type: list[Audit]
"""
self._audits = audits
@property
def total(self):
"""Gets the total of this AuditResult. # noqa: E501
:return: The total of this AuditResult. # noqa: E501
:rtype: int
"""
return self._total
@total.setter
def total(self, total):
"""Sets the total of this AuditResult.
:param total: The total of this AuditResult. # noqa: E501
:type: int
"""
self._total = total
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AuditResult, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AuditResult):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 36.201389 | 1,584 | 0.605985 |
acdf0cdab6a553349f9e7a1a201b8dd7f2facf82 | 3,927 | py | Python | tests/test_answer_patterns.py | FermiParadox/ipy_student_exercises | 9e74406255c01672516b2d06dabc6e5f6822e3ad | [
"MIT"
] | null | null | null | tests/test_answer_patterns.py | FermiParadox/ipy_student_exercises | 9e74406255c01672516b2d06dabc6e5f6822e3ad | [
"MIT"
] | null | null | null | tests/test_answer_patterns.py | FermiParadox/ipy_student_exercises | 9e74406255c01672516b2d06dabc6e5f6822e3ad | [
"MIT"
] | null | null | null | from unittest import TestCase
from re import compile, fullmatch
import answer_patterns
from answer_patterns import _PatternBase, PATTERNS
# ---------------------------------------------------------------------------------
class Test_total_matches_within_bounds(TestCase):
def test_within_bounds(self):
within_bounds_inputs = {'m>2': 3,
'm<=1': 1,
'm>=10': 111,
'm==45': 45,
'm!=5': 2,
'2<=m<15': 3}
for k, v in within_bounds_inputs.items():
self.assertTrue(_PatternBase.total_matches_within_bounds(m=v, bounds_str=k))
def test_out_of_bounds(self):
out_of_bounds_inputs = {'m>2': 2,
'm<=1': 34,
'm>=10': 8,
'm==45': 2,
'm!=5': 5,
'2<=m<15': 1}
for k, v in out_of_bounds_inputs.items():
self.assertFalse(_PatternBase.total_matches_within_bounds(m=v, bounds_str=k),
msg='m: {}, Pattern: {}'.format(v, k))
def test_invalid_input(self):
invalid_inputs = {'2',
'<=',
'10>',
'2>1',
'4==4',
'4!=5',
'>x',
'2<x<4',
'1.3<m<4'}
for i in invalid_inputs:
from random import randint
self.assertRaises(ValueError, _PatternBase.total_matches_within_bounds, randint(1, 1000), i)
class Test__check_duplicates_and_note_new_pattern(TestCase):
FILLER_ARGS = ([''],[''],[''])
def test_duplicate_detected(self):
_PatternBase(compile(r'1'), *self.FILLER_ARGS)
self.assertRaises(ValueError, _PatternBase, compile(r'1'), *self.FILLER_ARGS)
def test_new_pattern_noted(self):
inst = _PatternBase(compile(r'x'), *self.FILLER_ARGS)
# Must be in either key or val
self.assertIn(inst, PATTERNS)
class Test_PatternBase(TestCase):
FILLER_ARGS = Test__check_duplicates_and_note_new_pattern.FILLER_ARGS
def test_str_not_accepted_as_patt(self):
self.assertRaises(TypeError, _PatternBase, r'\d+', *self.FILLER_ARGS)
self.assertRaises(TypeError, _PatternBase, '\d+', *self.FILLER_ARGS)
# ---------------------------------------------------------------------------------
class TestEachPattern(TestCase):
# fullmatch test-base
def _fullmatch_base_(self, search_patt, _y_n):
if _y_n == 'y':
assert_func = self.assertTrue
lst_of_strs = getattr(search_patt, 'fullmatch')
else:
assert_func = self.assertFalse
lst_of_strs = getattr(search_patt, 'no_fullmatch')
for s in lst_of_strs:
assert_func(fullmatch(search_patt, s),
'\nPattern: {} \nexpression: {}'.format(search_patt, s))
def _does_fullmatch(self, pattern):
return self._fullmatch_base_(search_patt=pattern, _y_n='y')
def _does_not_fullmatch(self, pattern):
return self._fullmatch_base_(search_patt=pattern, _y_n='n')
# find_m_patterns test-base
def _does_find2patterns(self, pattern):
m = 2
m_str = 'm==2'
for expr in getattr(pattern, 'two_matches'):
self.assertTrue(answer_patterns.found_m_patterns(compile_obj=pattern, expr=expr, bounds_str=m_str),
'Did not find {} times \nthe pattern: {} \nin the string: {}'.format(m, pattern, expr))
def test_all_patterns_examples_and_non_examples(self):
for p in PATTERNS:
self._does_fullmatch(p)
self._does_not_fullmatch(p)
self._does_find2patterns(p)
| 38.126214 | 115 | 0.531194 |
acdf0f7c287ce3e519479e3ea8362635e0c46852 | 1,278 | py | Python | src/btc_assistant/dynamodb/read/core.py | nevtum/btc-assistant | 1995056276f0c640b72cc12f6f8d933994f8404d | [
"MIT"
] | null | null | null | src/btc_assistant/dynamodb/read/core.py | nevtum/btc-assistant | 1995056276f0c640b72cc12f6f8d933994f8404d | [
"MIT"
] | 1 | 2020-05-16T15:43:31.000Z | 2020-05-16T15:43:31.000Z | src/btc_assistant/dynamodb/read/core.py | nevtum/btc-assistant | 1995056276f0c640b72cc12f6f8d933994f8404d | [
"MIT"
] | null | null | null | from collections import deque
import boto3
from log import get_logger
logger = get_logger(__name__)
_client = boto3.client("dynamodb")
def execute_query(**kwargs):
resp = _client.query(**kwargs)
logger.info("Consumed capacity: {}".format(resp["ConsumedCapacity"]))
return resp
class DynamoQueryPaginator:
def __init__(self, query_kwargs):
self.query_kwargs = query_kwargs
self.no_more_records = False
self.buffer = deque()
def _fetch_next_records(self):
resp = execute_query(**self.query_kwargs)
logger.info(f"Fetched records {resp}")
if "LastEvaluatedKey" in resp:
logger.debug("There is more data to retrieve!")
self.query_kwargs["ExclusiveStartKey"] = resp["LastEvaluatedKey"]
else:
if "ExclusiveStartKey" in self.query_kwargs:
self.query_kwargs.pop("ExclusiveStartKey")
self.no_more_records = True
return resp.get("Items", [])
def __iter__(self):
return self
def __next__(self):
if len(self.buffer) > 0:
return self.buffer.popleft()
if self.no_more_records:
raise StopIteration("No more records!")
self.buffer = deque(self._fetch_next_records())
| 25.56 | 77 | 0.643975 |
acdf105e3f204a7d2878f6b43601962b06baf9bc | 501 | py | Python | resources/geometry_update_script.py | Aruut/CFD-Bayesian-Optimization | b11b6389278c8cca8c7c053d10dc190618c85f00 | [
"MIT"
] | 8 | 2020-02-16T10:53:31.000Z | 2022-03-28T10:09:00.000Z | resources/geometry_update_script.py | Aruut/CFD-Bayesian-Optimization | b11b6389278c8cca8c7c053d10dc190618c85f00 | [
"MIT"
] | null | null | null | resources/geometry_update_script.py | Aruut/CFD-Bayesian-Optimization | b11b6389278c8cca8c7c053d10dc190618c85f00 | [
"MIT"
] | 6 | 2020-04-02T07:18:19.000Z | 2021-12-20T06:29:33.000Z | for ii in range(len(expressions)):
for jj in range(len(expressions[0])):
if needsUpdates[ii][jj]:
designPoints[jj].SetParameterExpression(parameters[ii], expressions[ii][jj])
system1 = GetSystem(Name="CFX")
setupComponent1 = system1.GetComponent(Name="Setup")
for desi in designPoints:
if Parameters.GetBaseDesignPoint() != desi:
Parameters.SetBaseDesignPoint(DesignPoint=desi)
try:
setupComponent1.UpdateUpstreamComponents()
except:
pass
| 31.3125 | 88 | 0.698603 |
acdf14b8c6b9e2ba9b22261d261dde886c0ce24b | 686 | py | Python | orchestra/migrations/0008_auto_20150521_2011.py | ksbek/orchestra | 07556717feb57efcf8fb29a1e2e98eebe2313b8c | [
"Apache-2.0"
] | null | null | null | orchestra/migrations/0008_auto_20150521_2011.py | ksbek/orchestra | 07556717feb57efcf8fb29a1e2e98eebe2313b8c | [
"Apache-2.0"
] | null | null | null | orchestra/migrations/0008_auto_20150521_2011.py | ksbek/orchestra | 07556717feb57efcf8fb29a1e2e98eebe2313b8c | [
"Apache-2.0"
] | 1 | 2021-12-15T01:10:35.000Z | 2021-12-15T01:10:35.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('orchestra', '0007_auto_20150507_0204'),
]
operations = [
migrations.AlterField(
model_name='certification',
name='required_certifications',
field=models.ManyToManyField(
related_name='required_certifications_rel_+', to='orchestra.Certification'),
),
migrations.AlterUniqueTogether( # manually-reviewed
name='taskassignment',
unique_together=set([('task', 'assignment_counter')]),
),
]
| 27.44 | 92 | 0.626822 |
acdf152880ed70307f8350192b1c04480f64701a | 794 | py | Python | setup.py | openseat/notebook | 081ea4e76c4f36c8717406262f994799139c431d | [
"BSD-3-Clause"
] | null | null | null | setup.py | openseat/notebook | 081ea4e76c4f36c8717406262f994799139c431d | [
"BSD-3-Clause"
] | null | null | null | setup.py | openseat/notebook | 081ea4e76c4f36c8717406262f994799139c431d | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import (
find_packages,
setup,
)
setup(
name='openseatnotebook',
version='0.1.0',
description='Conceptual Modeling in the IPython Notebook',
author='Nicholas Bollweg',
author_email='nicholas.bollweg@gtri.gatech.edu',
license='New BSD License',
url='https://github.com/openseat/notebook',
keywords=('data visualization interactive interaction python ipython'
' widgets widget jupyter'),
install_requires=[
'ipywidgets',
'notebook'
],
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(),
include_package_data=True
)
| 26.466667 | 73 | 0.63728 |
acdf1588a9f8df54b1b019bae544ed87d1788a48 | 1,256 | py | Python | catkin_ws/src/00-infrastructure/what_the_duck/include/what_the_duck/checks/permissions.py | yxiao1996/dev | e2181233aaa3d16c472b792b58fc4863983825bd | [
"CC-BY-2.0"
] | 2 | 2018-06-25T02:51:25.000Z | 2018-06-25T02:51:27.000Z | catkin_ws/src/00-infrastructure/what_the_duck/include/what_the_duck/checks/permissions.py | yxiao1996/dev | e2181233aaa3d16c472b792b58fc4863983825bd | [
"CC-BY-2.0"
] | null | null | null | catkin_ws/src/00-infrastructure/what_the_duck/include/what_the_duck/checks/permissions.py | yxiao1996/dev | e2181233aaa3d16c472b792b58fc4863983825bd | [
"CC-BY-2.0"
] | 2 | 2018-09-04T06:44:21.000Z | 2018-10-15T02:30:50.000Z | import os
import stat
from what_the_duck.check import CheckFailed, CheckError, Check
from what_the_duck.resolution import Suggestion
from duckietown_utils import expand_all
class CheckPermissions(Check):
def __init__(self, filename, expected):
if not isinstance(expected, str) or len(expected) != 4:
msg = 'Expected "expected" to be a 4-digit string octal ("0700")'
raise ValueError(msg)
self.filename = filename
self.expected = expected
def check(self):
fn = expand_all(self.filename)
if not os.path.exists(fn):
msg = 'Cannot check permissions if file or dir does not exist.'
raise CheckError(msg)
fstats = os.stat(fn)
filemode = oct(stat.S_IMODE(fstats.st_mode))
if len(filemode) > 4:
filemode = filemode[-4:]
if filemode != self.expected:
msg = ('Expected mode %r, obtained %r.' %
(self.expected, filemode))
raise CheckFailed(msg)
def get_suggestion(self):
msg = """
You can fix the permissions using:
$ chmod %s %s
""" % (self.expected, self.filename)
return Suggestion(msg)
| 30.634146 | 77 | 0.589172 |
acdf15af8834ae6578f9c658702347ef1c605357 | 4,462 | py | Python | mm_mm/read_trace.py | 3upperm2n/cke_ovlp_rate | a6a117f4c77c4528e8a6f6437d26de6b8ef4da2b | [
"MIT"
] | null | null | null | mm_mm/read_trace.py | 3upperm2n/cke_ovlp_rate | a6a117f4c77c4528e8a6f6437d26de6b8ef4da2b | [
"MIT"
] | null | null | null | mm_mm/read_trace.py | 3upperm2n/cke_ovlp_rate | a6a117f4c77c4528e8a6f6437d26de6b8ef4da2b | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
import operator
class transfer():
def __init__(self, start=0.0,end=0.0):
self.start_time_ms = start
self.end_time_ms = end
class streams():
def __init__(self):
self.h2d = []
self.d2h = []
self.kernel = []
def time_coef_ms(df_trace):
rows, cols = df_trace.shape
start_unit = df_trace['Start'].iloc[0]
duration_unit = df_trace['Duration'].iloc[0]
start_coef = 1.0
if start_unit == 's':
start_coef = 1e3
if start_unit == 'us':
start_coef = 1e-3
duration_coef = 1.0
if duration_unit == 's':
duration_coef = 1e3
if duration_unit == 'us':
duration_coef = 1e-3
return start_coef, duration_coef
# read data for the current row
def read_row(df_row, start_coef_ms, duration_coef_ms):
start_time_ms = float(df_row['Start']) * start_coef_ms
end_time_ms = start_time_ms + float(df_row['Duration']) * duration_coef_ms
stream_id = int(df_row['Stream'])
api_name = df_row['Name'].to_string()
if "DtoH" in api_name:
api_type = 'd2h'
elif "HtoD" in api_name:
api_type = 'h2d'
else:
api_type = 'kernel'
return stream_id, api_type, start_time_ms, end_time_ms
def trace2dataframe(trace_file):
"""
read the trace file into dataframe using pandas
"""
# There are max 17 columns in the output csv
col_name = ["Start","Duration","Grid X","Grid Y","Grid Z","Block X","Block Y","Block Z","Registers Per Thread","Static SMem","Dynamic SMem","Size","Throughput","Device","Context","Stream","Name"]
df_trace = pd.read_csv(trace_file, names=col_name, engine='python')
rows_to_skip = 0
# find out the number of rows to skip
for index, row in df_trace.iterrows():
if row['Start'] == 'Start':
rows_to_skip = index
break
# read the input csv again
df_trace = pd.read_csv(trace_file, skiprows=rows_to_skip)
return df_trace
def get_stream_info(df_trace):
"""
read dataframe into stream list which contains the h2d/d2h/kernel star and end time in ms.
"""
streamList = []
# read the number of unique streams
stream_id_list = df_trace['Stream'].unique()
stream_id_list = stream_id_list[~np.isnan(stream_id_list)] # remove nan
num_streams = len(stream_id_list)
for i in xrange(num_streams):
streamList.append(streams())
start_coef, duration_coef = time_coef_ms(df_trace)
# read row by row
for rowID in xrange(1, df_trace.shape[0]):
# extract info from the current row
stream_id, api_type, start_time_ms, end_time_ms = read_row(df_trace.iloc[[rowID]], start_coef, duration_coef)
# find out index of the stream
sid, = np.where(stream_id_list==stream_id)
# add the start/end time for different api calls
if api_type == 'h2d':
streamList[sid].h2d.append(transfer(start_time_ms, end_time_ms))
elif api_type == 'd2h':
streamList[sid].d2h.append(transfer(start_time_ms, end_time_ms))
elif api_type == 'kernel':
streamList[sid].kernel.append(transfer(start_time_ms, end_time_ms))
else:
print "Unknown. Error."
return streamList
def check_kernel_ovlprate(trace_file):
"""
Read the trace file and figure out the overlapping rate for the two kernel execution.
"""
# read data from the trace file
df_trace = trace2dataframe(trace_file)
# extract stream info
streamList = get_stream_info(df_trace)
# check kernel overlapping
preK_start = streamList[0].kernel[0].start_time_ms
preK_end = streamList[0].kernel[0].end_time_ms
curK_start = streamList[1].kernel[0].start_time_ms
curK_end = streamList[1].kernel[0].end_time_ms
preK_runtime = preK_end - preK_start
curK_runtime = curK_end - curK_start
ovlp_duration = preK_end - curK_start
ovlp_ratio = ovlp_duration / preK_runtime
# if curK_start >= preK_start and curK_start <= preK_end:
# print('concurrent kernel execution :\n\t stream-prev {} ms \n\t stream-cur {} ms'
# '\n\t overlapping {} ms \n\t ovlp ratio (based on prev stream) {}%'\
# .format(preK_runtime, curK_runtime, ovlp_duration, ovlp_ratio))
return ovlp_ratio | 29.946309 | 199 | 0.638279 |
acdf176723dc158b7e6e5a365fdcfb75b2ac28bb | 16,255 | py | Python | tools/tcpconnect.py | swiftomkar/bcc | 070e6c8bc0a76341eeab50a000c0ea8342c78fab | [
"Apache-2.0"
] | 2 | 2021-06-08T06:11:27.000Z | 2021-08-03T16:13:23.000Z | tools/tcpconnect.py | swiftomkar/bcc | 070e6c8bc0a76341eeab50a000c0ea8342c78fab | [
"Apache-2.0"
] | 1 | 2020-12-17T10:43:39.000Z | 2020-12-17T10:43:39.000Z | tools/tcpconnect.py | swiftomkar/bcc | 070e6c8bc0a76341eeab50a000c0ea8342c78fab | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
# @lint-avoid-python-3-compatibility-imports
#
# tcpconnect Trace TCP connect()s.
# For Linux, uses BCC, eBPF. Embedded C.
#
# USAGE: tcpconnect [-h] [-c] [-t] [-p PID] [-P PORT [PORT ...]]
#
# All connection attempts are traced, even if they ultimately fail.
#
# This uses dynamic tracing of kernel functions, and will need to be updated
# to match kernel changes.
#
# Copyright (c) 2015 Brendan Gregg.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 25-Sep-2015 Brendan Gregg Created this.
# 14-Feb-2016 " " Switch to bpf_perf_output.
# 09-Jan-2019 Takuma Kume Support filtering by UID
# 30-Jul-2019 Xiaozhou Liu Count connects.
# 07-Oct-2020 Nabil Schear Correlate connects with DNS responses
from __future__ import print_function
from bcc import BPF
from bcc.containers import filter_by_containers
from bcc.utils import printb
import argparse
from socket import inet_ntop, ntohs, AF_INET, AF_INET6
from struct import pack
from time import sleep
from datetime import datetime
# arguments
examples = """examples:
./tcpconnect # trace all TCP connect()s
./tcpconnect -t # include timestamps
./tcpconnect -d # include DNS queries associated with connects
./tcpconnect -p 181 # only trace PID 181
./tcpconnect -P 80 # only trace port 80
./tcpconnect -P 80,81 # only trace port 80 and 81
./tcpconnect -U # include UID
./tcpconnect -u 1000 # only trace UID 1000
./tcpconnect -c # count connects per src ip and dest ip/port
./tcpconnect --cgroupmap mappath # only trace cgroups in this BPF map
./tcpconnect --mntnsmap mappath # only trace mount namespaces in the map
"""
parser = argparse.ArgumentParser(
description="Trace TCP connects",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=examples)
parser.add_argument("-t", "--timestamp", action="store_true",
help="include timestamp on output")
parser.add_argument("-p", "--pid",
help="trace this PID only")
parser.add_argument("-P", "--port",
help="comma-separated list of destination ports to trace.")
parser.add_argument("-U", "--print-uid", action="store_true",
help="include UID on output")
parser.add_argument("-u", "--uid",
help="trace this UID only")
parser.add_argument("-c", "--count", action="store_true",
help="count connects per src ip and dest ip/port")
parser.add_argument("--cgroupmap",
help="trace cgroups in this BPF map only")
parser.add_argument("--mntnsmap",
help="trace mount namespaces in this BPF map only")
parser.add_argument("-d", "--dns", action="store_true",
help="include likely DNS query associated with each connect")
parser.add_argument("--ebpf", action="store_true",
help=argparse.SUPPRESS)
args = parser.parse_args()
debug = 0
# define BPF program
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <net/sock.h>
#include <bcc/proto.h>
BPF_HASH(currsock, u32, struct sock *);
// separate data structs for ipv4 and ipv6
struct ipv4_data_t {
u64 ts_us;
u32 pid;
u32 uid;
u32 saddr;
u32 daddr;
u64 ip;
u16 dport;
char task[TASK_COMM_LEN];
};
BPF_PERF_OUTPUT(ipv4_events);
struct ipv6_data_t {
u64 ts_us;
u32 pid;
u32 uid;
unsigned __int128 saddr;
unsigned __int128 daddr;
u64 ip;
u16 dport;
char task[TASK_COMM_LEN];
};
BPF_PERF_OUTPUT(ipv6_events);
// separate flow keys per address family
struct ipv4_flow_key_t {
u32 saddr;
u32 daddr;
u16 dport;
};
BPF_HASH(ipv4_count, struct ipv4_flow_key_t);
struct ipv6_flow_key_t {
unsigned __int128 saddr;
unsigned __int128 daddr;
u16 dport;
};
BPF_HASH(ipv6_count, struct ipv6_flow_key_t);
int trace_connect_entry(struct pt_regs *ctx, struct sock *sk)
{
if (container_should_be_filtered()) {
return 0;
}
u64 pid_tgid = bpf_get_current_pid_tgid();
u32 pid = pid_tgid >> 32;
u32 tid = pid_tgid;
FILTER_PID
u32 uid = bpf_get_current_uid_gid();
FILTER_UID
// stash the sock ptr for lookup on return
currsock.update(&tid, &sk);
return 0;
};
static int trace_connect_return(struct pt_regs *ctx, short ipver)
{
int ret = PT_REGS_RC(ctx);
u64 pid_tgid = bpf_get_current_pid_tgid();
u32 pid = pid_tgid >> 32;
u32 tid = pid_tgid;
struct sock **skpp;
skpp = currsock.lookup(&tid);
if (skpp == 0) {
return 0; // missed entry
}
if (ret != 0) {
// failed to send SYNC packet, may not have populated
// socket __sk_common.{skc_rcv_saddr, ...}
currsock.delete(&tid);
return 0;
}
// pull in details
struct sock *skp = *skpp;
u16 dport = skp->__sk_common.skc_dport;
FILTER_PORT
if (ipver == 4) {
IPV4_CODE
} else /* 6 */ {
IPV6_CODE
}
currsock.delete(&tid);
return 0;
}
int trace_connect_v4_return(struct pt_regs *ctx)
{
return trace_connect_return(ctx, 4);
}
int trace_connect_v6_return(struct pt_regs *ctx)
{
return trace_connect_return(ctx, 6);
}
"""
struct_init = {'ipv4':
{'count':
"""
struct ipv4_flow_key_t flow_key = {};
flow_key.saddr = skp->__sk_common.skc_rcv_saddr;
flow_key.daddr = skp->__sk_common.skc_daddr;
flow_key.dport = ntohs(dport);
ipv4_count.increment(flow_key);""",
'trace':
"""
struct ipv4_data_t data4 = {.pid = pid, .ip = ipver};
data4.uid = bpf_get_current_uid_gid();
data4.ts_us = bpf_ktime_get_ns() / 1000;
data4.saddr = skp->__sk_common.skc_rcv_saddr;
data4.daddr = skp->__sk_common.skc_daddr;
data4.dport = ntohs(dport);
bpf_get_current_comm(&data4.task, sizeof(data4.task));
ipv4_events.perf_submit(ctx, &data4, sizeof(data4));"""
},
'ipv6':
{'count':
"""
struct ipv6_flow_key_t flow_key = {};
bpf_probe_read_kernel(&flow_key.saddr, sizeof(flow_key.saddr),
skp->__sk_common.skc_v6_rcv_saddr.in6_u.u6_addr32);
bpf_probe_read_kernel(&flow_key.daddr, sizeof(flow_key.daddr),
skp->__sk_common.skc_v6_daddr.in6_u.u6_addr32);
flow_key.dport = ntohs(dport);
ipv6_count.increment(flow_key);""",
'trace':
"""
struct ipv6_data_t data6 = {.pid = pid, .ip = ipver};
data6.uid = bpf_get_current_uid_gid();
data6.ts_us = bpf_ktime_get_ns() / 1000;
bpf_probe_read_kernel(&data6.saddr, sizeof(data6.saddr),
skp->__sk_common.skc_v6_rcv_saddr.in6_u.u6_addr32);
bpf_probe_read_kernel(&data6.daddr, sizeof(data6.daddr),
skp->__sk_common.skc_v6_daddr.in6_u.u6_addr32);
data6.dport = ntohs(dport);
bpf_get_current_comm(&data6.task, sizeof(data6.task));
ipv6_events.perf_submit(ctx, &data6, sizeof(data6));"""
}
}
# This defines an additional BPF program that instruments udp_recvmsg system
# call to locate DNS response packets on UDP port 53. When these packets are
# located, the data is copied to user-space where python will parse them with
# dnslib.
#
# uses a percpu array of length 1 to store the dns_data_t off the stack to
# allow for a maximum DNS packet length of 512 bytes.
dns_bpf_text = """
#include <net/inet_sock.h>
#define MAX_PKT 512
struct dns_data_t {
u8 pkt[MAX_PKT];
};
BPF_PERF_OUTPUT(dns_events);
// store msghdr pointer captured on syscall entry to parse on syscall return
BPF_HASH(tbl_udp_msg_hdr, u64, struct msghdr *);
// single element per-cpu array to hold the current event off the stack
BPF_PERCPU_ARRAY(dns_data,struct dns_data_t,1);
int trace_udp_recvmsg(struct pt_regs *ctx)
{
__u64 pid_tgid = bpf_get_current_pid_tgid();
struct sock *sk = (struct sock *)PT_REGS_PARM1(ctx);
struct inet_sock *is = inet_sk(sk);
// only grab port 53 packets, 13568 is ntohs(53)
if (is->inet_dport == 13568) {
struct msghdr *msghdr = (struct msghdr *)PT_REGS_PARM2(ctx);
tbl_udp_msg_hdr.update(&pid_tgid, &msghdr);
}
return 0;
}
int trace_udp_ret_recvmsg(struct pt_regs *ctx)
{
__u64 pid_tgid = bpf_get_current_pid_tgid();
u32 zero = 0;
struct msghdr **msgpp = tbl_udp_msg_hdr.lookup(&pid_tgid);
if (msgpp == 0)
return 0;
struct msghdr *msghdr = (struct msghdr *)*msgpp;
if (msghdr->msg_iter.type != ITER_IOVEC)
goto delete_and_return;
int copied = (int)PT_REGS_RC(ctx);
if (copied < 0)
goto delete_and_return;
size_t buflen = (size_t)copied;
if (buflen > msghdr->msg_iter.iov->iov_len)
goto delete_and_return;
if (buflen > MAX_PKT)
buflen = MAX_PKT;
struct dns_data_t *data = dns_data.lookup(&zero);
if (!data) // this should never happen, just making the verifier happy
return 0;
void *iovbase = msghdr->msg_iter.iov->iov_base;
bpf_probe_read(data->pkt, buflen, iovbase);
dns_events.perf_submit(ctx, data, buflen);
delete_and_return:
tbl_udp_msg_hdr.delete(&pid_tgid);
return 0;
}
"""
if args.count and args.dns:
print("Error: you may not specify -d/--dns with -c/--count.")
exit()
# code substitutions
if args.count:
bpf_text = bpf_text.replace("IPV4_CODE", struct_init['ipv4']['count'])
bpf_text = bpf_text.replace("IPV6_CODE", struct_init['ipv6']['count'])
else:
bpf_text = bpf_text.replace("IPV4_CODE", struct_init['ipv4']['trace'])
bpf_text = bpf_text.replace("IPV6_CODE", struct_init['ipv6']['trace'])
if args.pid:
bpf_text = bpf_text.replace('FILTER_PID',
'if (pid != %s) { return 0; }' % args.pid)
if args.port:
dports = [int(dport) for dport in args.port.split(',')]
dports_if = ' && '.join(['dport != %d' % ntohs(dport) for dport in dports])
bpf_text = bpf_text.replace('FILTER_PORT',
'if (%s) { currsock.delete(&tid); return 0; }' % dports_if)
if args.uid:
bpf_text = bpf_text.replace('FILTER_UID',
'if (uid != %s) { return 0; }' % args.uid)
bpf_text = filter_by_containers(args) + bpf_text
bpf_text = bpf_text.replace('FILTER_PID', '')
bpf_text = bpf_text.replace('FILTER_PORT', '')
bpf_text = bpf_text.replace('FILTER_UID', '')
if args.dns:
bpf_text += dns_bpf_text
if debug or args.ebpf:
print(bpf_text)
if args.ebpf:
exit()
# process event
def print_ipv4_event(cpu, data, size):
event = b["ipv4_events"].event(data)
global start_ts
if args.timestamp:
if start_ts == 0:
start_ts = event.ts_us
printb(b"%-9.3f" % ((float(event.ts_us) - start_ts) / 1000000), nl="")
if args.print_uid:
printb(b"%-6d" % event.uid, nl="")
dest_ip = inet_ntop(AF_INET, pack("I", event.daddr)).encode()
printb(b"%-6d %-12.12s %-2d %-16s %-16s %-6d %s" % (event.pid,
event.task, event.ip,
inet_ntop(AF_INET, pack("I", event.saddr)).encode(),
dest_ip, event.dport, print_dns(dest_ip)))
def print_ipv6_event(cpu, data, size):
event = b["ipv6_events"].event(data)
global start_ts
if args.timestamp:
if start_ts == 0:
start_ts = event.ts_us
printb(b"%-9.3f" % ((float(event.ts_us) - start_ts) / 1000000), nl="")
if args.print_uid:
printb(b"%-6d" % event.uid, nl="")
dest_ip = inet_ntop(AF_INET6, event.daddr).encode()
printb(b"%-6d %-12.12s %-2d %-16s %-16s %-6d %s" % (event.pid,
event.task, event.ip,
inet_ntop(AF_INET6, event.saddr).encode(), dest_ip,
event.dport, print_dns(dest_ip)))
def depict_cnt(counts_tab, l3prot='ipv4'):
for k, v in sorted(counts_tab.items(),
key=lambda counts: counts[1].value, reverse=True):
depict_key = ""
if l3prot == 'ipv4':
depict_key = "%-25s %-25s %-20s" % \
((inet_ntop(AF_INET, pack('I', k.saddr))),
inet_ntop(AF_INET, pack('I', k.daddr)), k.dport)
else:
depict_key = "%-25s %-25s %-20s" % \
((inet_ntop(AF_INET6, k.saddr)),
inet_ntop(AF_INET6, k.daddr), k.dport)
print("%s %-10d" % (depict_key, v.value))
def print_dns(dest_ip):
if not args.dns:
return b""
dnsname, timestamp = dns_cache.get(dest_ip, (None, None))
if timestamp is not None:
diff = datetime.now() - timestamp
diff = float(diff.seconds) * 1000 + float(diff.microseconds) / 1000
else:
diff = 0
if dnsname is None:
dnsname = b"No DNS Query"
if dest_ip == b"127.0.0.1" or dest_ip == b"::1":
dnsname = b"localhost"
retval = b"%s" % dnsname
if diff > DELAY_DNS:
retval += b" (%.3fms)" % diff
return retval
if args.dns:
try:
import dnslib
from cachetools import TTLCache
except ImportError:
print("Error: The python packages dnslib and cachetools are required "
"to use the -d/--dns option.")
print("Install this package with:")
print("\t$ pip3 install dnslib cachetools")
print(" or")
print("\t$ sudo apt-get install python3-dnslib python3-cachetools "
"(on Ubuntu 18.04+)")
exit(1)
# 24 hours
DEFAULT_TTL = 86400
# Cache Size in entries
DNS_CACHE_SIZE = 10240
# delay in ms in which to warn users of long delay between the query
# and the connect that used the IP
DELAY_DNS = 100
dns_cache = TTLCache(maxsize=DNS_CACHE_SIZE, ttl=DEFAULT_TTL)
# process event
def save_dns(cpu, data, size):
event = b["dns_events"].event(data)
payload = event.pkt[:size]
# pass the payload to dnslib for parsing
dnspkt = dnslib.DNSRecord.parse(payload)
# lets only look at responses
if dnspkt.header.qr != 1:
return
# must be some questions in there
if dnspkt.header.q != 1:
return
# make sure there are answers
if dnspkt.header.a == 0 and dnspkt.header.aa == 0:
return
# lop off the trailing .
question = ("%s" % dnspkt.q.qname)[:-1].encode('utf-8')
for answer in dnspkt.rr:
# skip all but A and AAAA records
if answer.rtype == 1 or answer.rtype == 28:
dns_cache[str(answer.rdata).encode('utf-8')] = (question,
datetime.now())
# initialize BPF
b = BPF(text=bpf_text)
b.attach_kprobe(event="tcp_v4_connect", fn_name="trace_connect_entry")
b.attach_kprobe(event="tcp_v6_connect", fn_name="trace_connect_entry")
b.attach_kretprobe(event="tcp_v4_connect", fn_name="trace_connect_v4_return")
b.attach_kretprobe(event="tcp_v6_connect", fn_name="trace_connect_v6_return")
if args.dns:
b.attach_kprobe(event="udp_recvmsg", fn_name="trace_udp_recvmsg")
b.attach_kretprobe(event="udp_recvmsg", fn_name="trace_udp_ret_recvmsg")
print("Tracing connect ... Hit Ctrl-C to end")
if args.count:
try:
while True:
sleep(99999999)
except KeyboardInterrupt:
pass
# header
print("\n%-25s %-25s %-20s %-10s" % (
"LADDR", "RADDR", "RPORT", "CONNECTS"))
depict_cnt(b["ipv4_count"])
depict_cnt(b["ipv6_count"], l3prot='ipv6')
# read events
else:
# header
if args.timestamp:
print("%-9s" % ("TIME(s)"), end="")
if args.print_uid:
print("%-6s" % ("UID"), end="")
print("%-6s %-12s %-2s %-16s %-16s %-6s" % ("PID", "COMM", "IP", "SADDR",
"DADDR", "DPORT"), end="")
if args.dns:
print(" QUERY")
else:
print()
start_ts = 0
# read events
b["ipv4_events"].open_perf_buffer(print_ipv4_event)
b["ipv6_events"].open_perf_buffer(print_ipv6_event)
if args.dns:
b["dns_events"].open_perf_buffer(save_dns)
while True:
try:
b.perf_buffer_poll()
except KeyboardInterrupt:
exit()
| 31.748047 | 79 | 0.622086 |
acdf17cc2fbedc6d54af742c96d887d3d2f03f3c | 1,525 | py | Python | python/scrutiny/server/device/request_dispatcher.py | scrutinydebugger/scrutiny | 7530790ae40f20c3008fe196e39cab07c4b73445 | [
"MIT"
] | 1 | 2021-04-18T02:58:59.000Z | 2021-04-18T02:58:59.000Z | python/scrutiny/server/device/request_dispatcher.py | scrutinydebugger/scrutiny | 7530790ae40f20c3008fe196e39cab07c4b73445 | [
"MIT"
] | null | null | null | python/scrutiny/server/device/request_dispatcher.py | scrutinydebugger/scrutiny | 7530790ae40f20c3008fe196e39cab07c4b73445 | [
"MIT"
] | null | null | null | import queue
class RequestDispatcher:
class RequestRecord:
__slots__ = ('request', 'success_callback', 'failure_callback', 'success_params', 'failure_params', 'completed')
def __init__(self):
self.completed = False
def complete(self, success=False, response=None, response_data = None):
self.completed = True # Set to true at beginning so that it is still true if an exception raise in the callback
if success:
if response is None or response_data is None:
raise ValueError('Missing response')
self.success_callback(self.request, response.code, response_data, self.success_params)
else:
self.failure_callback(self.request, self.failure_params)
def is_completed(self):
return self.completed
def __init__(self):
self.request_queue = queue.PriorityQueue()
def register_request(self, request, success_callback, failure_callback, priority=0, success_params = None, failure_params=None):
record = self.RequestRecord()
record.request = request
record.success_callback = success_callback
record.success_params = success_params
record.failure_callback = failure_callback
record.failure_params = failure_params
self.request_queue.put((priority, record))
def next(self):
if not self.request_queue.empty():
prio, req = self.request_queue.get()
return req
| 37.195122 | 132 | 0.659016 |
acdf18bd42a618a3e88bf8f2b8ecc495f989d38e | 6,518 | py | Python | test/python/squarepants_test/test_target_template.py | ericzundel/mvn2pants | 59776864939515bc0cae28e1b89944ce55b98b21 | [
"Apache-2.0"
] | 8 | 2015-04-14T22:37:56.000Z | 2021-01-20T19:46:40.000Z | test/python/squarepants_test/test_target_template.py | ericzundel/mvn2pants | 59776864939515bc0cae28e1b89944ce55b98b21 | [
"Apache-2.0"
] | 1 | 2016-01-13T23:19:14.000Z | 2016-01-22T22:47:48.000Z | test/python/squarepants_test/test_target_template.py | ericzundel/mvn2pants | 59776864939515bc0cae28e1b89944ce55b98b21 | [
"Apache-2.0"
] | 3 | 2015-12-13T08:35:34.000Z | 2018-08-01T17:44:59.000Z | # Tests for code in squarepants/src/main/python/squarepants/target_template.py
#
# Run with:
# ./pants test squarepants/src/test/python/squarepants_test:target_template
import pytest
import unittest2 as unittest
from squarepants.target_template import Target
class TargetTemplateTest(unittest.TestCase):
def setUp(self):
self.maxDiff = None
super(TargetTemplateTest, self).setUp()
def tearDown(self):
Target.reset()
super(TargetTemplateTest, self).tearDown()
def test_simple(self):
with self.assertRaises(Target.NoSuchTargetError):
Target.get_template('foo')
template = Target.create_template(
'foo', ['var1:string', 'var2:raw', 'var3:list'],
'var1 is {var1} var2 is {var2} var3 is {var3} var1 again is {var1}')
self.assertEquals(template, Target.get_template('foo'))
triple_quote_string = """
var1 is 'foo' var2 is bar var3 is [
'1',
'2'
] var1 again is 'foo'
"""
self.assertEquals(triple_quote_string,
template.format(var1='foo', var2='bar', var3=['1', '2']))
def test_default_types(self):
template = Target.create_template(
'foo', ['name', 'sources', 'resources', 'dependencies', 'imports', 'var1:string'],
'name={name} sources={sources} var1={var1} resources={resources} '
'depencencies={dependencies} imports={imports}')
result = template.format(name='n', sources=['s1', 's2'], resources=['r1', 'r2'],
dependencies=['d1', 'd2'], imports=['i1', 'i2'],
var1='v')
triple_quote_string = """
name='n' sources=[
's1',
's2'
] var1='v' resources=[
'r1',
'r2'
] depencencies=[
'd1',
'd2'
] imports=[
'i1',
'i2'
]
"""
self.assertEquals(triple_quote_string, result)
def test_no_such_value(self):
template = Target.create_template('foo', [], 'var1 is {var1}')
with self.assertRaises(KeyError):
template.format(var1='foo')
def test_optional_flag(self):
template = Target.create_template('target',
['name:string', 'sources:list', 'dependencies:list:optional',
'foobar:raw:optional'],
'target(name={name}, sources={sources}, '
'dependencies={dependencies}, foobar={foobar})')
triple_quote_string = """
target(name='my name', sources=[
'one.txt',
'two.txt'
], foobar=True)
"""
self.assertEquals(triple_quote_string, template.format(name='my name',
sources=['one.txt', 'two.txt'],
foobar=True))
with self.assertRaises(Target.MissingTemplateArgumentError):
template.format(name='my name', foobar=True)
def test_collapsible_flag(self):
template = Target.create_template('target',
['name:string',
'collapsible_list:list:collapsible',
'normal_list:list',],
'target(name={name}, collapsible_list={collapsible_list}, '
'normal_list={normal_list})')
triple_quote_string = """
target(name='my name', collapsible_list=['one.txt'], normal_list=[
':foobar'
])
"""
self.assertEquals(triple_quote_string, template.format(name='my name',
collapsible_list=['one.txt'],
normal_list=[':foobar']))
def test_sorted_flag(self):
template = Target.create_template('target', ['name:string', 'sources:list:sorted'],
'target(name={name}, sources={sources})')
triple_quote_string = """
target(name='my name', sources=[
'a.txt',
'b.txt',
'one.txt',
'two.txt',
'zebra.txt'
])
"""
self.assertEquals(triple_quote_string, template.format(name='my name',
sources=['one.txt', 'two.txt',
'a.txt', 'b.txt', 'zebra.txt']))
def test_symbol_substitution(self):
template = Target.create_template('target', ['name:string', 'sources:list'],
'target(name={name}, sources={sources},\n)')
triple_quote_string = """
target(name='my foobar', sources=[
'${symbol-not-present}',
'foobar.txt',
'hello.txt',
'potato.txt'
],
)
"""
formatted_target = template.format(name='my ${name}',
sources=[
'${symbol-not-present}',
'${name}.txt',
'${greeting.file}',
'${vegetable.file}',
],
symbols={
'name': 'foobar',
'greeting.file': 'hello.txt',
'vegetable.file': 'potato.txt',
},
)
self.assertEquals(triple_quote_string, formatted_target)
def test_format_list(self):
result = Target.jar_library._format_list(
"foo",
["jar(org='com.example',name='a',rev='1',excludes=[exclude(org='bar', name='b'),exclude(org='bar', name='c'),],)"])
self.assertEquals("""
[
jar(org='com.example',name='a',rev='1',excludes=[exclude(org='bar', name='b'),exclude(org='bar', name='c'),],)
]
""".strip(), result)
def test_format_item(self):
result = Target.jar_library._format_item(
"jar(org='com.example', name='a', rev='1', excludes=[ exclude(org='bar', name='b'), exclude(org='bar', name='c'),],)")
self.assertEquals("""
jar(org='com.example', name='a', rev='1', excludes=[ exclude(org='bar', name='b'), exclude(org='bar', name='c'),],)
""".strip(), result)
# This test demonstrates a problem when using the format() method with some types of values.
# This is why we don't use Target.jar_library.format() in generate_third_party.py
@pytest.mark.xfail
def test_jar_library(self):
jar="""sjar(org='com.example', name='a', rev='0.8.0',
excludes=[
exclude(org='bar', name='b'),
exclude(org='bar', name='c'),
],
)"""
jar_library = Target.jar_library.format(name="foo", jars=[jar,])
triple_quote_string="""
jar_library(name='foo',
jars = [
sjar(org='com.example', name='a', rev='0.8.0',
excludes=[
exclude(org='bar', name='b'),
exclude(org='bar', name='c'),
],
)
],
)
"""
self.assertEquals(triple_quote_string, jar_library)
| 33.947917 | 124 | 0.555078 |
acdf1913fd55801105849ab092b489d828367687 | 201 | py | Python | external/fv3fit/fv3fit/emulation/data/__init__.py | VulcanClimateModeling/fv3net | d5908bd7ac3534362eac33f0b907d3af699bdf39 | [
"MIT"
] | 5 | 2021-03-20T22:42:40.000Z | 2021-06-30T18:39:36.000Z | external/fv3fit/fv3fit/emulation/data/__init__.py | VulcanClimateModeling/fv3net | d5908bd7ac3534362eac33f0b907d3af699bdf39 | [
"MIT"
] | 154 | 2021-03-17T19:44:29.000Z | 2021-09-15T23:02:12.000Z | external/fv3fit/fv3fit/emulation/data/__init__.py | VulcanClimateModeling/fv3net | d5908bd7ac3534362eac33f0b907d3af699bdf39 | [
"MIT"
] | 1 | 2021-06-16T22:04:24.000Z | 2021-06-16T22:04:24.000Z | from .config import TransformConfig
from . import transforms
from .load import nc_files_to_tf_dataset, nc_dir_to_tf_dataset
from .io import get_nc_files
from .dict_dataset import netcdf_url_to_dataset
| 33.5 | 62 | 0.865672 |
acdf1a8f87704d7d64d5d868ecbe69e98bc5f479 | 6,794 | py | Python | bin/sa_haveibeenpwned/aob_py3/solnlib/packages/splunklib/modularinput/script.py | hRun/SA-haveibeenpwned | 2a8ae3dedc405dc3c8dac1cb6a705a70f574afdb | [
"Apache-2.0"
] | 2 | 2020-08-17T07:52:48.000Z | 2020-12-18T16:39:32.000Z | bin/sa_haveibeenpwned/aob_py3/solnlib/packages/splunklib/modularinput/script.py | hRun/SA-haveibeenpwned | 2a8ae3dedc405dc3c8dac1cb6a705a70f574afdb | [
"Apache-2.0"
] | 5 | 2020-12-15T23:40:14.000Z | 2022-02-23T15:43:18.000Z | bin/sa_haveibeenpwned/aob_py2/solnlib/packages/splunklib/modularinput/script.py | hRun/SA-haveibeenpwned | 2a8ae3dedc405dc3c8dac1cb6a705a70f574afdb | [
"Apache-2.0"
] | 4 | 2019-05-16T09:57:33.000Z | 2021-07-14T12:31:21.000Z | # Copyright 2011-2015 Splunk, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
from ..six.moves.urllib.parse import urlsplit
import sys
from ..client import Service
from .event_writer import EventWriter
from .input_definition import InputDefinition
from .validation_definition import ValidationDefinition
from .. import six
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
class Script(six.with_metaclass(ABCMeta, object)):
"""An abstract base class for implementing modular inputs.
Subclasses should override ``get_scheme``, ``stream_events``,
and optionally ``validate_input`` if the modular input uses
external validation.
The ``run`` function is used to run modular inputs; it typically should
not be overridden.
"""
def __init__(self):
self._input_definition = None
self._service = None
def run(self, args):
"""Runs this modular input
:param args: List of command line arguments passed to this script.
:returns: An integer to be used as the exit value of this program.
"""
# call the run_script function, which handles the specifics of running
# a modular input
return self.run_script(args, EventWriter(), sys.stdin)
def run_script(self, args, event_writer, input_stream):
"""Handles all the specifics of running a modular input
:param args: List of command line arguments passed to this script.
:param event_writer: An ``EventWriter`` object for writing events.
:param input_stream: An input stream for reading inputs.
:returns: An integer to be used as the exit value of this program.
"""
try:
if len(args) == 1:
# This script is running as an input. Input definitions will be
# passed on stdin as XML, and the script will write events on
# stdout and log entries on stderr.
self._input_definition = InputDefinition.parse(input_stream)
self.stream_events(self._input_definition, event_writer)
event_writer.close()
return 0
elif str(args[1]).lower() == "--scheme":
# Splunk has requested XML specifying the scheme for this
# modular input Return it and exit.
scheme = self.get_scheme()
if scheme is None:
event_writer.log(
EventWriter.FATAL,
"Modular input script returned a null scheme.")
return 1
else:
event_writer.write_xml_document(scheme.to_xml())
return 0
elif args[1].lower() == "--validate-arguments":
validation_definition = ValidationDefinition.parse(input_stream)
try:
self.validate_input(validation_definition)
return 0
except Exception as e:
root = ET.Element("error")
ET.SubElement(root, "message").text = str(e)
event_writer.write_xml_document(root)
return 1
else:
err_string = "ERROR Invalid arguments to modular input script:" + ' '.join(
args)
event_writer._err.write(err_string)
return 1
except Exception as e:
err_string = EventWriter.ERROR + str(e)
event_writer._err.write(err_string)
return 1
@property
def service(self):
""" Returns a Splunk service object for this script invocation.
The service object is created from the Splunkd URI and session key
passed to the command invocation on the modular input stream. It is
available as soon as the :code:`Script.stream_events` method is
called.
:return: :class:splunklib.client.Service. A value of None is returned,
if you call this method before the :code:`Script.stream_events` method
is called.
"""
if self._service is not None:
return self._service
if self._input_definition is None:
return None
splunkd_uri = self._input_definition.metadata["server_uri"]
session_key = self._input_definition.metadata["session_key"]
splunkd = urlsplit(splunkd_uri, allow_fragments=False)
self._service = Service(
scheme=splunkd.scheme,
host=splunkd.hostname,
port=splunkd.port,
token=session_key,
)
return self._service
@abstractmethod
def get_scheme(self):
"""The scheme defines the parameters understood by this modular input.
:return: a ``Scheme`` object representing the parameters for this modular input.
"""
def validate_input(self, definition):
"""Handles external validation for modular input kinds.
When Splunk calls a modular input script in validation mode, it will
pass in an XML document giving information about the Splunk instance (so
you can call back into it if needed) and the name and parameters of the
proposed input.
If this function does not throw an exception, the validation is assumed
to succeed. Otherwise any errors thrown will be turned into a string and
logged back to Splunk.
The default implementation always passes.
:param definition: The parameters for the proposed input passed by splunkd.
"""
pass
@abstractmethod
def stream_events(self, inputs, ew):
"""The method called to stream events into Splunk. It should do all of its output via
EventWriter rather than assuming that there is a console attached.
:param inputs: An ``InputDefinition`` object.
:param ew: An object with methods to write events and log messages to Splunk.
"""
| 37.955307 | 94 | 0.621725 |
acdf1af2255b81466614f6cf31e02f262649e23c | 3,281 | py | Python | avatar2/archs/arm.py | rawsample/avatar2 | 4d772f72a06a16a7d80e2edcf8544727d0755704 | [
"Apache-2.0"
] | null | null | null | avatar2/archs/arm.py | rawsample/avatar2 | 4d772f72a06a16a7d80e2edcf8544727d0755704 | [
"Apache-2.0"
] | null | null | null | avatar2/archs/arm.py | rawsample/avatar2 | 4d772f72a06a16a7d80e2edcf8544727d0755704 | [
"Apache-2.0"
] | null | null | null | # from capstone import CS_ARCH_ARM, CS_MODE_LITTLE_ENDIAN, CS_MODE_BIG_ENDIAN
from capstone import *
from keystone.keystone_const import *
from unicorn import *
from unicorn.arm_const import *
from .architecture import Architecture
import avatar2
from avatar2.installer.config import QEMU, PANDA, OPENOCD, GDB_MULTI
class ARM(Architecture):
get_qemu_executable = Architecture.resolve(QEMU)
get_panda_executable = Architecture.resolve(PANDA)
get_gdb_executable = Architecture.resolve(GDB_MULTI)
get_oocd_executable = Architecture.resolve(OPENOCD)
qemu_name = 'arm'
gdb_name = 'arm'
registers = {'r0': 0, 'r1': 1, 'r2': 2, 'r3': 3, 'r4': 4, 'r5': 5, 'r6': 6,
'r7': 7, 'r8': 8, 'r9': 9, 'r10': 10, 'r11': 11, 'r12': 12,
'sp': 13, 'lr': 14, 'pc': 15, 'cpsr': 25,
}
unicorn_registers = {'r0': UC_ARM_REG_R0, 'r1': UC_ARM_REG_R1, 'r2': UC_ARM_REG_R2,
'r3': UC_ARM_REG_R3, 'r4': UC_ARM_REG_R4, 'r5': UC_ARM_REG_R5,
'r6': UC_ARM_REG_R6, 'r7': UC_ARM_REG_R7, 'r8': UC_ARM_REG_R8,
'r9': UC_ARM_REG_R9, 'r10': UC_ARM_REG_R10, 'r11': UC_ARM_REG_R11,
'r12': UC_ARM_REG_R12, 'sp': UC_ARM_REG_SP, 'lr': UC_ARM_REG_LR,
'pc': UC_ARM_REG_PC, 'cpsr': UC_ARM_REG_CPSR}
pc_name = 'pc'
sr_name = 'cpsr'
unemulated_instructions = ['mcr', 'mrc']
capstone_arch = CS_ARCH_ARM
capstone_mode = CS_MODE_LITTLE_ENDIAN
keystone_arch = KS_ARCH_ARM
keystone_mode = KS_MODE_ARM
unicorn_arch = UC_ARCH_ARM
unicorn_mode = UC_MODE_ARM
class ARM_CORTEX_M3(ARM):
cpu_model = 'cortex-m3'
qemu_name = 'arm'
gdb_name = 'arm'
capstone_arch = CS_ARCH_ARM
keystone_arch = KS_ARCH_ARM
capstone_mode = CS_MODE_LITTLE_ENDIAN | CS_MODE_THUMB | CS_MODE_MCLASS
keystone_arch = KS_ARCH_ARM
keystone_mode = KS_MODE_LITTLE_ENDIAN | KS_MODE_THUMB
unicorn_arch = UC_ARCH_ARM
unicorn_mode = UC_MODE_LITTLE_ENDIAN | UC_MODE_THUMB
sr_name = 'xpsr'
@staticmethod
def register_write_cb(avatar, *args, **kwargs):
if isinstance(kwargs['watched_target'],
avatar2.targets.qemu_target.QemuTarget):
qemu = kwargs['watched_target']
# xcps/cpsr encodes the thumbbit diffently accross different
# ISA versions. Panda_target does not cleanly support cortex-m yet,
# and hence uses the thumbbit as stored on other ARM versions.
if isinstance(qemu, avatar2.targets.panda_target.PandaTarget):
shiftval = 5
else:
shiftval = 24
if args[0] == 'pc' or args[0] == 'cpsr':
cpsr = qemu.read_register('cpsr')
if cpsr & 1<< shiftval:
return
else:
cpsr |= 1<<shiftval
qemu.write_register('cpsr', cpsr)
@staticmethod
def init(avatar):
avatar.watchmen.add('TargetRegisterWrite', 'after',
ARM_CORTEX_M3.register_write_cb)
pass
ARMV7M = ARM_CORTEX_M3
class ARMBE(ARM):
qemu_name = 'armeb'
capstone_mode = CS_MODE_BIG_ENDIAN
| 35.27957 | 91 | 0.614447 |
acdf1c251b53fee75699e1d692af29bf8c6ea27d | 18,882 | py | Python | cirq/ops/clifford_gate_test.py | jlmayfield/Cirq | dc1294f54118a9a4f92546ca13780b91615dd675 | [
"Apache-2.0"
] | 1 | 2019-05-10T10:26:42.000Z | 2019-05-10T10:26:42.000Z | cirq/ops/clifford_gate_test.py | jlmayfield/Cirq | dc1294f54118a9a4f92546ca13780b91615dd675 | [
"Apache-2.0"
] | null | null | null | cirq/ops/clifford_gate_test.py | jlmayfield/Cirq | dc1294f54118a9a4f92546ca13780b91615dd675 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools, functools
import pytest
from cirq.testing import (
EqualsTester,
assert_allclose_up_to_global_phase,
)
import cirq
_bools = (False, True)
_paulis = (cirq.X, cirq.Y, cirq.Z)
def _assert_not_mirror(gate) -> None:
trans_x = gate.transform(cirq.X)
trans_y = gate.transform(cirq.Y)
trans_z = gate.transform(cirq.Z)
right_handed = (trans_x.flip ^ trans_y.flip ^ trans_z.flip ^
(trans_x.to.relative_index(trans_y.to) != 1))
assert right_handed, 'Mirrors'
def _assert_no_collision(gate) -> None:
trans_x = gate.transform(cirq.X)
trans_y = gate.transform(cirq.Y)
trans_z = gate.transform(cirq.Z)
assert trans_x.to != trans_y.to, 'Collision'
assert trans_y.to != trans_z.to, 'Collision'
assert trans_z.to != trans_x.to, 'Collision'
def _all_rotations():
for pauli, flip, in itertools.product(_paulis, _bools):
yield cirq.PauliTransform(pauli, flip)
def _all_rotation_pairs():
for px, flip_x, pz, flip_z in itertools.product(_paulis, _bools,
_paulis, _bools):
if px == pz:
continue
yield cirq.PauliTransform(px, flip_x), cirq.PauliTransform(pz, flip_z)
def _all_clifford_gates():
for trans_x, trans_z in _all_rotation_pairs():
yield cirq.SingleQubitCliffordGate.from_xz_map(trans_x, trans_z)
@pytest.mark.parametrize('pauli,flip_x,flip_z',
itertools.product(_paulis, _bools, _bools))
def test_init_value_error(pauli, flip_x, flip_z):
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_xz_map((pauli, flip_x),
(pauli, flip_z))
@pytest.mark.parametrize('trans_x,trans_z', _all_rotation_pairs())
def test_init_from_xz(trans_x, trans_z):
gate = cirq.SingleQubitCliffordGate.from_xz_map(trans_x, trans_z)
assert gate.transform(cirq.X) == trans_x
assert gate.transform(cirq.Z) == trans_z
_assert_not_mirror(gate)
_assert_no_collision(gate)
@pytest.mark.parametrize('trans1,trans2,from1',
((trans1, trans2, from1)
for trans1, trans2, from1 in itertools.product(_all_rotations(),
_all_rotations(),
_paulis)
if trans1.to != trans2.to))
def test_init_from_double_map_vs_kwargs(trans1, trans2, from1):
from2 = cirq.Pauli.by_relative_index(from1, 1)
from1_str, from2_str = (str(frm).lower()+'_to' for frm in (from1, from2))
gate_kw = cirq.SingleQubitCliffordGate.from_double_map(**{from1_str: trans1,
from2_str: trans2})
gate_map = cirq.SingleQubitCliffordGate.from_double_map({from1: trans1,
from2: trans2})
# Test initializes the same gate
assert gate_kw == gate_map
@pytest.mark.parametrize('trans1,trans2,from1',
((trans1, trans2, from1)
for trans1, trans2, from1 in itertools.product(_all_rotations(),
_all_rotations(),
_paulis)
if trans1.to == trans2.to))
def test_init_from_double_invalid(trans1, trans2, from1):
from2 = cirq.Pauli.by_relative_index(from1, 1)
# Test throws on invalid arguments
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_double_map({from1: trans1,
from2: trans2})
@pytest.mark.parametrize('trans1,trans2,from1',
((trans1, trans2, from1)
for trans1, trans2, from1 in itertools.product(_all_rotations(),
_all_rotations(),
_paulis)
if trans1.to != trans2.to))
def test_init_from_double(trans1, trans2, from1):
from2 = cirq.Pauli.by_relative_index(from1, 1)
gate = cirq.SingleQubitCliffordGate.from_double_map({from1: trans1,
from2: trans2})
# Test initializes what was expected
assert gate.transform(from1) == trans1
assert gate.transform(from2) == trans2
_assert_not_mirror(gate)
_assert_no_collision(gate)
@pytest.mark.parametrize('trans,frm',
itertools.product(_all_rotations(), _paulis))
def test_init_from_single_map_vs_kwargs(trans, frm):
from_str = str(frm).lower()+'_to'
# pylint: disable=unexpected-keyword-arg
gate_kw = cirq.SingleQubitCliffordGate.from_single_map(**{from_str: trans})
gate_map = cirq.SingleQubitCliffordGate.from_single_map({frm: trans})
assert gate_kw == gate_map
@pytest.mark.parametrize('trans,frm',
((trans, frm)
for trans, frm in itertools.product(_all_rotations(), _paulis)
if trans.to != frm))
def test_init_90rot_from_single(trans, frm):
gate = cirq.SingleQubitCliffordGate.from_single_map({frm: trans})
assert gate.transform(frm) == trans
_assert_not_mirror(gate)
_assert_no_collision(gate)
# Check that it decomposes to one gate
assert len(gate.decompose_rotation()) == 1
# Check that this is a 90 degree rotation gate
assert (gate.merged_with(gate).merged_with(gate).merged_with(gate)
== cirq.SingleQubitCliffordGate.I)
# Check that flipping the transform produces the inverse rotation
trans_rev = cirq.PauliTransform(trans.to, not trans.flip)
gate_rev = cirq.SingleQubitCliffordGate.from_single_map({frm: trans_rev})
assert gate**-1 == gate_rev
@pytest.mark.parametrize('trans,frm',
((trans, frm)
for trans, frm in itertools.product(_all_rotations(), _paulis)
if trans.to == frm and trans.flip))
def test_init_180rot_from_single(trans, frm):
gate = cirq.SingleQubitCliffordGate.from_single_map({frm: trans})
assert gate.transform(frm) == trans
_assert_not_mirror(gate)
_assert_no_collision(gate)
# Check that it decomposes to one gate
assert len(gate.decompose_rotation()) == 1
# Check that this is a 180 degree rotation gate
assert gate.merged_with(gate) == cirq.SingleQubitCliffordGate.I
@pytest.mark.parametrize('trans,frm',
((trans, frm)
for trans, frm in itertools.product(_all_rotations(), _paulis)
if trans.to == frm and not trans.flip))
def test_init_ident_from_single(trans, frm):
gate = cirq.SingleQubitCliffordGate.from_single_map({frm: trans})
assert gate.transform(frm) == trans
_assert_not_mirror(gate)
_assert_no_collision(gate)
# Check that it decomposes to zero gates
assert len(gate.decompose_rotation()) == 0
# Check that this is an identity gate
assert gate == cirq.SingleQubitCliffordGate.I
@pytest.mark.parametrize('pauli,sqrt,expected', (
(cirq.X, False, cirq.SingleQubitCliffordGate.X),
(cirq.Y, False, cirq.SingleQubitCliffordGate.Y),
(cirq.Z, False, cirq.SingleQubitCliffordGate.Z),
(cirq.X, True, cirq.SingleQubitCliffordGate.X_sqrt),
(cirq.Y, True, cirq.SingleQubitCliffordGate.Y_sqrt),
(cirq.Z, True, cirq.SingleQubitCliffordGate.Z_sqrt)))
def test_init_from_pauli(pauli, sqrt, expected):
gate = cirq.SingleQubitCliffordGate.from_pauli(pauli, sqrt=sqrt)
assert gate == expected
def test_pow():
assert cirq.SingleQubitCliffordGate.X**-1 == cirq.SingleQubitCliffordGate.X
assert cirq.SingleQubitCliffordGate.H**-1 == cirq.SingleQubitCliffordGate.H
assert (cirq.SingleQubitCliffordGate.X_sqrt**-1 ==
cirq.SingleQubitCliffordGate.X_nsqrt)
assert cirq.inverse(cirq.SingleQubitCliffordGate.X_nsqrt) == (
cirq.SingleQubitCliffordGate.X_sqrt
)
with pytest.raises(TypeError):
_ = cirq.SingleQubitCliffordGate.Z**0.25
def test_init_from_quarter_turns():
eq = cirq.testing.EqualsTester()
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 0),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 0),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 0),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 4),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 4),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 4),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 8),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 8),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 8),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, -4),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, -4),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, -4)
)
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 1),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 5),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 9),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, -3),
)
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 1),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 5),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, 9),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Y, -3),
)
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 1),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 5),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, 9),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.Z, -3),
)
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 2),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 6),
)
eq.add_equality_group(
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 3),
cirq.SingleQubitCliffordGate.from_quarter_turns(cirq.X, 7),
)
@pytest.mark.parametrize('gate', _all_clifford_gates())
def test_init_from_quarter_turns_reconstruct(gate):
new_gate = functools.reduce(
cirq.SingleQubitCliffordGate.merged_with,
(cirq.SingleQubitCliffordGate.from_quarter_turns(pauli, qt)
for pauli, qt in gate.decompose_rotation()),
cirq.SingleQubitCliffordGate.I)
assert gate == new_gate
def test_init_invalid():
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map()
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map({})
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map(
{cirq.X: (cirq.X, False)}, y_to=(cirq.Y, False))
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map(
{cirq.X: (cirq.X, False), cirq.Y: (cirq.Y, False)})
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_double_map()
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_double_map({})
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_double_map(
{cirq.X: (cirq.X, False)})
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_double_map(x_to=(cirq.X, False))
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map(
{cirq.X: (cirq.Y, False),
cirq.Y: (cirq.Z, False),
cirq.Z: (cirq.X, False)})
with pytest.raises(ValueError):
cirq.SingleQubitCliffordGate.from_single_map(
{cirq.X: (cirq.X, False),
cirq.Y: (cirq.X, False)})
def test_eq_ne_and_hash():
eq = EqualsTester()
for trans_x, trans_z in _all_rotation_pairs():
gate_gen = lambda: cirq.SingleQubitCliffordGate.from_xz_map(trans_x,
trans_z)
eq.make_equality_group(gate_gen)
@pytest.mark.parametrize('gate,rep', (
(cirq.SingleQubitCliffordGate.I,
'cirq.SingleQubitCliffordGate(X:+X, Y:+Y, Z:+Z)'),
(cirq.SingleQubitCliffordGate.H,
'cirq.SingleQubitCliffordGate(X:+Z, Y:-Y, Z:+X)'),
(cirq.SingleQubitCliffordGate.X,
'cirq.SingleQubitCliffordGate(X:+X, Y:-Y, Z:-Z)'),
(cirq.SingleQubitCliffordGate.X_sqrt,
'cirq.SingleQubitCliffordGate(X:+X, Y:+Z, Z:-Y)')))
def test_repr(gate, rep):
assert repr(gate) == rep
@pytest.mark.parametrize('gate,trans_y', (
(cirq.SingleQubitCliffordGate.I, (cirq.Y, False)),
(cirq.SingleQubitCliffordGate.H, (cirq.Y, True)),
(cirq.SingleQubitCliffordGate.X, (cirq.Y, True)),
(cirq.SingleQubitCliffordGate.Y, (cirq.Y, False)),
(cirq.SingleQubitCliffordGate.Z, (cirq.Y, True)),
(cirq.SingleQubitCliffordGate.X_sqrt, (cirq.Z, False)),
(cirq.SingleQubitCliffordGate.X_nsqrt, (cirq.Z, True)),
(cirq.SingleQubitCliffordGate.Y_sqrt, (cirq.Y, False)),
(cirq.SingleQubitCliffordGate.Y_nsqrt, (cirq.Y, False)),
(cirq.SingleQubitCliffordGate.Z_sqrt, (cirq.X, True)),
(cirq.SingleQubitCliffordGate.Z_nsqrt, (cirq.X, False))))
def test_y_rotation(gate, trans_y):
assert gate.transform(cirq.Y) == trans_y
@pytest.mark.parametrize('gate,gate_equiv', (
(cirq.SingleQubitCliffordGate.I, cirq.X ** 0),
(cirq.SingleQubitCliffordGate.H, cirq.H),
(cirq.SingleQubitCliffordGate.X, cirq.X),
(cirq.SingleQubitCliffordGate.Y, cirq.Y),
(cirq.SingleQubitCliffordGate.Z, cirq.Z),
(cirq.SingleQubitCliffordGate.X_sqrt, cirq.X ** 0.5),
(cirq.SingleQubitCliffordGate.X_nsqrt, cirq.X ** -0.5),
(cirq.SingleQubitCliffordGate.Y_sqrt, cirq.Y ** 0.5),
(cirq.SingleQubitCliffordGate.Y_nsqrt, cirq.Y ** -0.5),
(cirq.SingleQubitCliffordGate.Z_sqrt, cirq.Z ** 0.5),
(cirq.SingleQubitCliffordGate.Z_nsqrt, cirq.Z ** -0.5)))
def test_decompose(gate, gate_equiv):
q0 = cirq.NamedQubit('q0')
mat = cirq.Circuit.from_ops(
gate(q0),
).to_unitary_matrix()
mat_check = cirq.Circuit.from_ops(
gate_equiv(q0),
).to_unitary_matrix()
assert_allclose_up_to_global_phase(mat, mat_check, rtol=1e-7, atol=1e-7)
@pytest.mark.parametrize('gate,gate_equiv', (
(cirq.SingleQubitCliffordGate.I, cirq.X ** 0),
(cirq.SingleQubitCliffordGate.H, cirq.H),
(cirq.SingleQubitCliffordGate.X, cirq.X),
(cirq.SingleQubitCliffordGate.Y, cirq.Y),
(cirq.SingleQubitCliffordGate.Z, cirq.Z),
(cirq.SingleQubitCliffordGate.X_sqrt, cirq.X ** 0.5),
(cirq.SingleQubitCliffordGate.X_nsqrt, cirq.X ** -0.5),
(cirq.SingleQubitCliffordGate.Y_sqrt, cirq.Y ** 0.5),
(cirq.SingleQubitCliffordGate.Y_nsqrt, cirq.Y ** -0.5),
(cirq.SingleQubitCliffordGate.Z_sqrt, cirq.Z ** 0.5),
(cirq.SingleQubitCliffordGate.Z_nsqrt, cirq.Z ** -0.5)))
def test_known_matrix(gate, gate_equiv):
assert cirq.has_unitary(gate)
mat = cirq.unitary(gate)
mat_check = cirq.unitary(gate_equiv)
assert_allclose_up_to_global_phase(mat, mat_check, rtol=1e-7, atol=1e-7)
@pytest.mark.parametrize('gate', _all_clifford_gates())
def test_inverse(gate):
assert gate == cirq.inverse(cirq.inverse(gate))
@pytest.mark.parametrize('gate', _all_clifford_gates())
def test_inverse_matrix(gate):
q0 = cirq.NamedQubit('q0')
mat = cirq.Circuit.from_ops(gate(q0)).to_unitary_matrix()
mat_inv = cirq.Circuit.from_ops(cirq.inverse(gate)(q0)).to_unitary_matrix()
assert_allclose_up_to_global_phase(mat, mat_inv.T.conj(),
rtol=1e-7, atol=1e-7)
@pytest.mark.parametrize('gate,other',
itertools.product(_all_clifford_gates(),
_all_clifford_gates()))
def test_commutes_with_single_qubit_gate(gate, other):
q0 = cirq.NamedQubit('q0')
mat = cirq.Circuit.from_ops(
gate(q0),
other(q0),
).to_unitary_matrix()
mat_swap = cirq.Circuit.from_ops(
other(q0),
gate(q0),
).to_unitary_matrix()
commutes = gate.commutes_with(other)
commutes_check = cirq.allclose_up_to_global_phase(mat, mat_swap)
assert commutes == commutes_check
@pytest.mark.parametrize('gate,pauli,half_turns',
itertools.product(_all_clifford_gates(),
_paulis,
(0.1, 0.25, 0.5, -0.5)))
def test_commutes_with_pauli(gate, pauli, half_turns):
pauli_gate = pauli ** half_turns
q0 = cirq.NamedQubit('q0')
mat = cirq.Circuit.from_ops(
gate(q0),
pauli_gate(q0),
).to_unitary_matrix()
mat_swap = cirq.Circuit.from_ops(
pauli_gate(q0),
gate(q0),
).to_unitary_matrix()
commutes = gate.commutes_with(pauli)
commutes_check = cirq.allclose_up_to_global_phase(mat, mat_swap)
assert commutes == commutes_check
@pytest.mark.parametrize('gate,other',
itertools.product(_all_clifford_gates(),
_all_clifford_gates()))
def test_single_qubit_gate_after_switching_order(gate, other):
q0 = cirq.NamedQubit('q0')
mat = cirq.Circuit.from_ops(
gate(q0),
other(q0),
).to_unitary_matrix()
mat_swap = cirq.Circuit.from_ops(
gate.equivalent_gate_before(other)(q0),
gate(q0),
).to_unitary_matrix()
assert_allclose_up_to_global_phase(mat, mat_swap, rtol=1e-7, atol=1e-7)
@pytest.mark.parametrize('gate,sym,exp', (
(cirq.SingleQubitCliffordGate.I, 'I', 1),
(cirq.SingleQubitCliffordGate.H, 'H', 1),
(cirq.SingleQubitCliffordGate.X, 'X', 1),
(cirq.SingleQubitCliffordGate.X_sqrt, 'X', 0.5),
(cirq.SingleQubitCliffordGate.X_nsqrt, 'X', -0.5),
(
cirq.SingleQubitCliffordGate.from_xz_map(
(cirq.Y, False), (cirq.X, True)),
'(X^-0.5-Z^0.5)',
1
)))
def test_text_diagram_info(gate, sym, exp):
assert cirq.circuit_diagram_info(gate) == cirq.CircuitDiagramInfo(
wire_symbols=(sym,),
exponent=exp)
| 40.87013 | 80 | 0.662589 |
acdf1c50f7757f82134fe022c747519b3f1e2628 | 10,883 | py | Python | sdktest/python/test/test_sdk.py | etecs-ru/gnomock | e8b9cb90061bcc26036206e97a2bd799603106dd | [
"MIT"
] | 1 | 2021-03-30T16:19:53.000Z | 2021-03-30T16:19:53.000Z | sdktest/python/test/test_sdk.py | etecs-ru/gnomock | e8b9cb90061bcc26036206e97a2bd799603106dd | [
"MIT"
] | 2 | 2021-04-03T14:06:39.000Z | 2021-04-04T10:25:33.000Z | sdktest/python/test/test_sdk.py | etecs-ru/gnomock | e8b9cb90061bcc26036206e97a2bd799603106dd | [
"MIT"
] | null | null | null | import gnomock
from gnomock.api.presets_api import PresetsApi
from gnomock.model.options import Options
from gnomock.model.localstack import Localstack
from gnomock.model.localstack_request import LocalstackRequest
from gnomock.model.splunk import Splunk
from gnomock.model.splunk_request import SplunkRequest
from gnomock.model.mongo import Mongo
from gnomock.model.mongo_request import MongoRequest
from gnomock.model.mysql import Mysql
from gnomock.model.mysql_request import MysqlRequest
from gnomock.model.mariadb import Mariadb
from gnomock.model.mariadb_request import MariadbRequest
from gnomock.model.mssql import Mssql
from gnomock.model.mssql_request import MssqlRequest
from gnomock.model.postgres import Postgres
from gnomock.model.postgres_request import PostgresRequest
from gnomock.model.cockroachdb import Cockroachdb
from gnomock.model.cockroachdb_request import CockroachdbRequest
from gnomock.model.elastic import Elastic
from gnomock.model.elastic_request import ElasticRequest
from gnomock.model.redis import Redis
from gnomock.model.redis_request import RedisRequest
from gnomock.model.memcached import Memcached
from gnomock.model.memcached_request import MemcachedRequest
from gnomock.model.rabbitmq import Rabbitmq
from gnomock.model.rabbitmq_request import RabbitmqRequest
from gnomock.model.rabbitmq_message import RabbitmqMessage
from gnomock.model.kubernetes import Kubernetes
from gnomock.model.kubernetes_request import KubernetesRequest
from gnomock.model.kafka import Kafka
from gnomock.model.kafka_request import KafkaRequest
from gnomock.model.kafka_messages import KafkaMessages
from gnomock.model.stop_request import StopRequest
import unittest
import os
class TestSDK(unittest.TestCase):
def setUp(self):
with gnomock.ApiClient() as client:
self.api = PresetsApi(client)
def tearDown(self):
return super().tearDown()
def test_mongo(self):
options = Options()
file_name = os.path.abspath("./test/testdata/mongo")
preset = Mongo(data_path=file_name, version="3")
mongo_request = MongoRequest(options=options, preset=preset)
id_candidate = ""
try:
response = self.api.start_mongo(mongo_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id_candidate != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_mysql(self):
options = Options()
file_name = os.path.abspath("./test/testdata/mysql/schema.sql")
preset = Mysql(queries_files=[file_name], version="8")
mysql_request = MysqlRequest(options=options, preset=preset)
id_candidate = ""
try:
response = self.api.start_mysql(mysql_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_mariadb(self):
options = Options()
file_name = os.path.abspath("./test/testdata/mysql/schema.sql")
preset = Mariadb(queries_files=[file_name], version="10")
mariadb_request = MariadbRequest(options=options, preset=preset)
id_candidate = ""
try:
response = self.api.start_mariadb(mariadb_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_mssql(self):
options = Options()
file_name = os.path.abspath("./test/testdata/mssql/schema.sql")
preset = Mssql(queries_files=[file_name], license=True, version="2019-latest")
mssql_request = MssqlRequest(options=options, preset=preset)
id_candidate = ""
try:
response = self.api.start_mssql(mssql_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id_candidate != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_postgres(self):
options = Options()
file_name = os.path.abspath("./test/testdata/postgres/schema.sql")
preset = Postgres(queries_files=[file_name], version="12")
postgres_request = PostgresRequest(options=options, preset=preset)
id_candidate = ""
try:
response = self.api.start_postgres(postgres_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id_candidate != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_redis(self):
options = Options(debug=True)
values = {
"foo": "bar",
"number": 42,
"float": 3.14
}
preset = Redis(version="5", values=values)
redis_request = RedisRequest(options=options, preset=preset)
id_candidate = ""
try:
response = self.api.start_redis(redis_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id_candidate != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_memcached(self):
options = Options()
values = {
"foo": "bar",
}
byte_values = {
"key": "Z25vbW9jawo="
}
preset = Memcached(version="1.6.9-alpine", values=values,
byte_values=byte_values)
memcached_request = MemcachedRequest(options=options, preset=preset)
id_candidate = ""
try:
response = self.api.start_memcached(memcached_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id_candidate != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_splunk(self):
options = Options()
file_name = os.path.abspath("./test/testdata/splunk/events.jsonl")
preset = Splunk(values_file=file_name, accept_license=True,
admin_password="12345678", version="8.0.2.1")
splunk_request = SplunkRequest(options=options, preset=preset)
id_candidate = ""
try:
response = self.api.start_splunk(splunk_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id_candidate != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_localstack(self):
options = Options()
preset = Localstack(services=['s3'], version="0.12.9")
localstack_request = LocalstackRequest(options=options, preset=preset)
id_candidate = ""
try:
response = self.api.start_localstack(localstack_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id_candidate != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_rabbitmq(self):
file_name = os.path.abspath("./test/testdata/rabbitmq/messages.jsonl")
options = Options()
message = RabbitmqMessage(queue="alerts",
content_type="text/plain", string_body="python")
preset = Rabbitmq(version="3.8.14-alpine",
messages_files=[file_name], messages=[message])
rabbitmq_request = RabbitmqRequest(options=options,
preset=preset)
id_candidate = ""
try:
response = self.api.start_rabbit_mq(rabbitmq_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id_candidate != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_kafka(self):
options = Options()
preset = Kafka(version="2.5.1-L1")
kafka_request = KafkaRequest(options=options,
preset=preset)
id_candidate = ""
try:
response = self.api.start_kafka(kafka_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_elastic(self):
options = Options()
preset = Elastic(version="7.12.0")
elastic_request = ElasticRequest(options=options,
preset=preset)
id_candidate = ""
try:
response = self.api.start_elastic(elastic_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id_candidate != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_kubernetes(self):
options = Options()
preset = Kubernetes(version="latest")
kubernetes_request = KubernetesRequest(options=options,
preset=preset)
id_candidate = ""
try:
response = self.api.start_kubernetes(kubernetes_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id_candidate != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
def test_cockroachdb(self):
options = Options()
preset = Cockroachdb(version="v20.2.7")
cockroachdb_request = CockroachdbRequest(options=options,
preset=preset)
id_candidate = ""
try:
response = self.api.start_cockroach_db(cockroachdb_request)
id_candidate = response.id
self.assertEqual("127.0.0.1", response.host)
finally:
if id_candidate != "":
stop_request = StopRequest(id=id_candidate)
self.api.stop(stop_request)
# gnomock-generator
if __name__ == "__main__":
unittest.main()
| 34.009375 | 86 | 0.615455 |
acdf1cc8c2c40784aaaf94220c54e527c83997dd | 1,811 | py | Python | src/collector/software/installed_software.py | blue-hexagon/WinScraper | d044f549fa846c5ca98f77e449cc7bc23b5fc4c3 | [
"MIT"
] | 1 | 2022-02-01T11:02:58.000Z | 2022-02-01T11:02:58.000Z | src/collector/software/installed_software.py | blue-hexagon/WinScraper | d044f549fa846c5ca98f77e449cc7bc23b5fc4c3 | [
"MIT"
] | null | null | null | src/collector/software/installed_software.py | blue-hexagon/WinScraper | d044f549fa846c5ca98f77e449cc7bc23b5fc4c3 | [
"MIT"
] | null | null | null | import winreg
from typing import Any, Dict
from src.collector.base_collector import BaseCollector
class InstalledSoftwareCollector(BaseCollector):
def __init__(self) -> None:
super().__init__()
def collect(self) -> Dict[Any, Any]:
access_keys = [
winreg.OpenKey(winreg.ConnectRegistry(None, winreg.HKEY_CURRENT_USER), r"SOFTWARE"),
winreg.OpenKey(winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE), r"SOFTWARE"),
winreg.OpenKey(winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE), r"SOFTWARE\\WOW6432Node"),
]
software = set()
for key in access_keys:
idx = -1
while True:
try:
software.add(winreg.EnumKey(key, idx := idx + 1))
except OSError:
break
return {"Software List from Windows Registry": list(sorted(software))}
# def firefox_version():
# try:
# version = get_registry_value(
# "HKEY_LOCAL_MACHINE",
# "SOFTWARE\\Mozilla\\Mozilla Firefox",
# "CurrentVersion")
# version = (u"Mozilla Firefox", version)
# except WindowsError:
# version = None
# return version
#
#
# def iexplore_version():
# try:
# version = get_registry_value(
# "HKEY_LOCAL_MACHINE",
# "SOFTWARE\\Microsoft\\Internet Explorer",
# "Version")
# version = (u"Internet Explorer", version)
# except WindowsError:
# version = None
# return version
#
#
# def browsers():
# browsers = []
# firefox = firefox_version()
# if firefox:
# browsers.append(firefox)
# iexplore = iexplore_version()
# if iexplore:
# browsers.append(iexplore)
#
# return browsers
#
| 28.746032 | 110 | 0.589729 |
acdf1ce0ff69a7c91df8342dc8a6883a4270814e | 1,031 | py | Python | fullstack/cashstatus/accounts/serializers.py | whywhyy/django-toy-project | ee5c91306605993ef70efa461cb41270ae689d6c | [
"MIT"
] | null | null | null | fullstack/cashstatus/accounts/serializers.py | whywhyy/django-toy-project | ee5c91306605993ef70efa461cb41270ae689d6c | [
"MIT"
] | 8 | 2021-03-10T08:03:03.000Z | 2022-02-26T23:50:01.000Z | fullstack/cashstatus/accounts/serializers.py | whywhyy/django-toy-project | ee5c91306605993ef70efa461cb41270ae689d6c | [
"MIT"
] | null | null | null | from rest_framework import serializers
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
# User Serializer
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email')
# Register Serializer
class RegisterSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
extra_kwargs = {'password' : {'write_only' : True}}
def create(self, validated_data):
user = User.objects.create_user(validated_data['username'], validated_data['email'], validated_data['password'])
return user
# Login Serializer
class LoginSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, data):
user = authenticate(**data)
if user and user.is_active:
return user
raise serializers.ValidationError("Incorrect Credentials") | 32.21875 | 120 | 0.695441 |
acdf1ce1983f17eb0fdff98f7d870994140b0d45 | 2,046 | py | Python | pywikibot/families/anarchopedia_family.py | Darkdadaah/pywikibot-core | 6cad0915f3e058fe4cf2bce4f37d395d21636df9 | [
"MIT"
] | 1 | 2016-03-23T14:07:51.000Z | 2016-03-23T14:07:51.000Z | pywikibot/families/anarchopedia_family.py | jayvdb/pywikibot-core | 70be2ea5cb34c4a054f89200f18b4e4be05c3ba4 | [
"MIT"
] | null | null | null | pywikibot/families/anarchopedia_family.py | jayvdb/pywikibot-core | 70be2ea5cb34c4a054f89200f18b4e4be05c3ba4 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Family module for Anarchopedia wiki."""
#
# (C) Pywikibot team, 2006-2015
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id: acdf1ce1983f17eb0fdff98f7d870994140b0d45 $'
from pywikibot import family
# The Anarchopedia family
class Family(family.SubdomainFamily):
"""Family class for Anarchopedia wiki."""
name = 'anarchopedia'
domain = 'anarchopedia.org'
interwiki_replacements = {
# ISO 639-2 -> ISO 639-1 mappings
'ara': 'ar',
'chi': 'zh',
'dan': 'da',
'deu': 'de',
'dut': 'nl',
'ell': 'el',
'eng': 'en',
'epo': 'eo',
'fas': 'fa',
'fin': 'fi',
'fra': 'fr',
'ger': 'de',
'gre': 'el',
'heb': 'he',
'hye': 'hy',
'ind': 'id',
'ita': 'it',
'jpn': 'ja',
'kor': 'ko',
'lav': 'lv',
'lit': 'lt',
'nno': 'no',
'nob': 'no',
'nor': 'no',
'pol': 'pl',
'por': 'pt',
'rum': 'ro',
'rus': 'ru',
'spa': 'es',
'srp': 'sr',
'sqi': 'sq',
'swe': 'sv',
'tur': 'tr',
'zho': 'zh',
# ISO 639-1 -> ISO 639-1 mappings
'bs': 'hr',
# Non-compliant mappings
'bos': 'hr',
'nsh': 'hr',
}
def __init__(self):
"""Constructor."""
self.languages_by_size = [
'ar', 'en', 'de', 'nl', 'el', 'it', 'fa', 'fi', 'fr', 'he', 'es',
'hy', 'id', 'meta', 'ja', 'ko', 'lv', 'lt', 'no', 'hr', 'pl', 'pt',
'ro', 'ru', 'hrv', 'sq', 'sr', 'sv', 'tr', 'zh', 'eo', 'da',
]
super(Family, self).__init__()
self.nocapitalize = list(self.langs.keys())
def force_version(self, code):
"""Return the version for this family."""
return '1.14'
def scriptpath(self, code):
"""Return the script path for this family."""
return ''
| 23.517241 | 79 | 0.445748 |
acdf1d42626931ce1dac0db7a212a105205d7cc9 | 12,455 | py | Python | sql/views.py | yetHandsome/Archery | 784b6459478a9adad3dab657ab6a005964975be9 | [
"Apache-2.0"
] | null | null | null | sql/views.py | yetHandsome/Archery | 784b6459478a9adad3dab657ab6a005964975be9 | [
"Apache-2.0"
] | null | null | null | sql/views.py | yetHandsome/Archery | 784b6459478a9adad3dab657ab6a005964975be9 | [
"Apache-2.0"
] | 1 | 2021-05-11T02:10:42.000Z | 2021-05-11T02:10:42.000Z | # -*- coding: UTF-8 -*-
import traceback
import simplejson as json
from django.contrib.auth.decorators import permission_required
from django.contrib.auth.models import Group
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponseRedirect
from django.urls import reverse
from common.config import SysConfig
from sql.engines import get_engine
from common.utils.permission import superuser_required
from sql.engines.models import ReviewResult, ReviewSet
from sql.utils.tasks import task_info
from .models import Users, SqlWorkflow, QueryPrivileges, ResourceGroup, \
QueryPrivilegesApply, Config, SQL_WORKFLOW_CHOICES, InstanceTag
from sql.utils.workflow_audit import Audit
from sql.utils.sql_review import can_execute, can_timingtask, can_cancel
from common.utils.const import Const, WorkflowDict
from sql.utils.resource_group import user_groups, user_instances
import logging
logger = logging.getLogger('default')
def index(request):
index_path_url = SysConfig().get('index_path_url', 'sqlworkflow')
return HttpResponseRedirect(f"/{index_path_url.strip('/')}/")
def login(request):
"""登录页面"""
if request.user and request.user.is_authenticated:
return HttpResponseRedirect('/')
return render(request, 'login.html')
def sqlworkflow(request):
"""SQL上线工单列表页面"""
return render(request, 'sqlworkflow.html', {'status_list': SQL_WORKFLOW_CHOICES})
# 提交SQL的页面
@permission_required('sql.sql_submit', raise_exception=True)
def submit_sql(request):
user = request.user
# 获取组信息
group_list = user_groups(user)
# 获取所有有效用户,通知对象
active_user = Users.objects.filter(is_active=1)
# 获取系统配置
archer_config = SysConfig()
context = {'active_user': active_user, 'group_list': group_list,
'enable_backup_switch': archer_config.get('enable_backup_switch')}
return render(request, 'sqlsubmit.html', context)
# 展示SQL工单详细页面
def detail(request, workflow_id):
workflow_detail = get_object_or_404(SqlWorkflow, pk=workflow_id)
if workflow_detail.status in ['workflow_finish', 'workflow_exception']:
rows = workflow_detail.sqlworkflowcontent.execute_result
else:
rows = workflow_detail.sqlworkflowcontent.review_content
# 自动审批不通过的不需要获取下列信息
if workflow_detail.status != 'workflow_autoreviewwrong':
# 获取当前审批和审批流程
audit_auth_group, current_audit_auth_group = Audit.review_info(workflow_id, 2)
# 是否可审核
is_can_review = Audit.can_review(request.user, workflow_id, 2)
# 是否可执行
is_can_execute = can_execute(request.user, workflow_id)
# 是否可定时执行
is_can_timingtask = can_timingtask(request.user, workflow_id)
# 是否可取消
is_can_cancel = can_cancel(request.user, workflow_id)
# 获取审核日志
try:
audit_id = Audit.detail_by_workflow_id(workflow_id=workflow_id,
workflow_type=WorkflowDict.workflow_type['sqlreview']).audit_id
last_operation_info = Audit.logs(audit_id=audit_id).latest('id').operation_info
except Exception as e:
logger.debug(f'无审核日志记录,错误信息{e}')
last_operation_info = ''
else:
audit_auth_group = '系统自动驳回'
current_audit_auth_group = '系统自动驳回'
is_can_review = False
is_can_execute = False
is_can_timingtask = False
is_can_cancel = False
last_operation_info = None
# 获取定时执行任务信息
if workflow_detail.status == 'workflow_timingtask':
job_id = Const.workflowJobprefix['sqlreview'] + '-' + str(workflow_id)
job = task_info(job_id)
if job:
run_date = job.next_run
else:
run_date = ''
else:
run_date = ''
# 获取是否开启手工执行确认
manual = SysConfig().get('manual')
review_result = ReviewSet()
if rows:
try:
# 检验rows能不能正常解析
loaded_rows = json.loads(rows)
# 兼容旧数据'[[]]'格式,转换为新格式[{}]
if isinstance(loaded_rows[-1], list):
for r in loaded_rows:
review_result.rows += [ReviewResult(inception_result=r)]
rows = review_result.json()
except json.decoder.JSONDecodeError:
review_result.rows += [ReviewResult(
# 迫于无法单元测试这里加上英文报错信息
errormessage="Json decode failed."
"执行结果Json解析失败, 请联系管理员"
)]
rows = review_result.json()
else:
rows = workflow_detail.sqlworkflowcontent.review_content
context = {'workflow_detail': workflow_detail, 'rows': rows, 'last_operation_info': last_operation_info,
'is_can_review': is_can_review, 'is_can_execute': is_can_execute, 'is_can_timingtask': is_can_timingtask,
'is_can_cancel': is_can_cancel, 'audit_auth_group': audit_auth_group, 'manual': manual,
'current_audit_auth_group': current_audit_auth_group, 'run_date': run_date}
return render(request, 'detail.html', context)
# 展示回滚的SQL页面
def rollback(request):
workflow_id = request.GET['workflow_id']
if workflow_id == '' or workflow_id is None:
context = {'errMsg': 'workflow_id参数为空.'}
return render(request, 'error.html', context)
workflow_id = int(workflow_id)
workflow = SqlWorkflow.objects.get(id=workflow_id)
try:
query_engine = get_engine(instance=workflow.instance)
list_backup_sql = query_engine.get_rollback(workflow=workflow)
except Exception as msg:
logger.error(traceback.format_exc())
context = {'errMsg': msg}
return render(request, 'error.html', context)
workflow_detail = SqlWorkflow.objects.get(id=workflow_id)
workflow_title = workflow_detail.workflow_name
rollback_workflow_name = "【回滚工单】原工单Id:%s ,%s" % (workflow_id, workflow_title)
context = {'list_backup_sql': list_backup_sql, 'workflow_detail': workflow_detail,
'rollback_workflow_name': rollback_workflow_name}
return render(request, 'rollback.html', context)
@permission_required('sql.menu_sqlanalyze', raise_exception=True)
def sqlanalyze(request):
"""
SQL分析页面
:param request:
:return:
"""
# 获取实例列表
instances = [instance.instance_name for instance in user_instances(request.user, type='all', db_type='mysql')]
return render(request, 'sqlanalyze.html', {'instances': instances})
# SQL文档页面
@permission_required('sql.menu_document', raise_exception=True)
def dbaprinciples(request):
return render(request, 'dbaprinciples.html')
# dashboard页面
@permission_required('sql.menu_dashboard', raise_exception=True)
def dashboard(request):
return render(request, 'dashboard.html')
# SQL在线查询页面
@permission_required('sql.menu_query', raise_exception=True)
def sqlquery(request):
# 获取实例支持查询的标签id
tag_id = InstanceTag.objects.get(tag_code='can_read').id
# 获取用户关联实例列表
instances = [slave for slave in user_instances(request.user, type='all', db_type='all', tags=[tag_id])]
context = {'instances': instances}
return render(request, 'sqlquery.html', context)
# SQL慢日志页面
@permission_required('sql.menu_slowquery', raise_exception=True)
def slowquery(request):
# 获取用户关联实例列表
instances = [instance.instance_name for instance in user_instances(request.user, type='all', db_type='mysql')]
context = {'tab': 'slowquery', 'instances': instances}
return render(request, 'slowquery.html', context)
# SQL优化工具页面
@permission_required('sql.menu_sqladvisor', raise_exception=True)
def sqladvisor(request):
# 获取用户关联实例列表
instances = [instance.instance_name for instance in user_instances(request.user, type='all', db_type='mysql')]
context = {'instances': instances}
return render(request, 'sqladvisor.html', context)
# 查询权限申请列表页面
@permission_required('sql.menu_queryapplylist', raise_exception=True)
def queryapplylist(request):
user = request.user
# 获取资源组
group_list = user_groups(user)
context = {'group_list': group_list}
return render(request, 'queryapplylist.html', context)
# 查询权限申请详情页面
def queryapplydetail(request, apply_id):
workflow_detail = QueryPrivilegesApply.objects.get(apply_id=apply_id)
# 获取当前审批和审批流程
audit_auth_group, current_audit_auth_group = Audit.review_info(apply_id, 1)
# 是否可审核
is_can_review = Audit.can_review(request.user, apply_id, 1)
# 获取审核日志
if workflow_detail.status == 2:
try:
audit_id = Audit.detail_by_workflow_id(workflow_id=apply_id, workflow_type=1).audit_id
last_operation_info = Audit.logs(audit_id=audit_id).latest('id').operation_info
except Exception as e:
logger.debug(f'无审核日志记录,错误信息{e}')
last_operation_info = ''
else:
last_operation_info = ''
context = {'workflow_detail': workflow_detail, 'audit_auth_group': audit_auth_group,
'last_operation_info': last_operation_info, 'current_audit_auth_group': current_audit_auth_group,
'is_can_review': is_can_review}
return render(request, 'queryapplydetail.html', context)
# 用户的查询权限管理页面
def queryuserprivileges(request):
# 获取所有用户
user_list = QueryPrivileges.objects.filter(is_deleted=0).values('user_display').distinct()
context = {'user_list': user_list}
return render(request, 'queryuserprivileges.html', context)
# 会话管理页面
@permission_required('sql.menu_dbdiagnostic', raise_exception=True)
def dbdiagnostic(request):
# 获取用户关联实例列表
instances = [instance.instance_name for instance in user_instances(request.user, type='all', db_type='mysql')]
context = {'tab': 'process', 'instances': instances}
return render(request, 'dbdiagnostic.html', context)
# 工作流审核列表页面
def workflows(request):
return render(request, "workflow.html")
# 工作流审核详情页面
def workflowsdetail(request, audit_id):
# 按照不同的workflow_type返回不同的详情
audit_detail = Audit.detail(audit_id)
if audit_detail.workflow_type == WorkflowDict.workflow_type['query']:
return HttpResponseRedirect(reverse('sql:queryapplydetail', args=(audit_detail.workflow_id,)))
elif audit_detail.workflow_type == WorkflowDict.workflow_type['sqlreview']:
return HttpResponseRedirect(reverse('sql:detail', args=(audit_detail.workflow_id,)))
# 配置管理页面
@superuser_required
def config(request):
# 获取所有资源组名称
group_list = ResourceGroup.objects.all()
# 获取所有权限组
auth_group_list = Group.objects.all()
# 获取所有配置项
all_config = Config.objects.all().values('item', 'value')
sys_config = {}
for items in all_config:
sys_config[items['item']] = items['value']
context = {'group_list': group_list, 'auth_group_list': auth_group_list,
'config': sys_config, 'WorkflowDict': WorkflowDict}
return render(request, 'config.html', context)
# 资源组管理页面
@superuser_required
def group(request):
return render(request, 'group.html')
# 资源组组关系管理页面
@superuser_required
def groupmgmt(request, group_id):
group = ResourceGroup.objects.get(group_id=group_id)
return render(request, 'groupmgmt.html', {'group': group})
# 实例管理页面
@permission_required('sql.menu_instance', raise_exception=True)
def instance(request):
# 获取实例标签
tags = InstanceTag.objects.filter(active=True)
return render(request, 'instance.html', {'tags': tags})
# 实例用户管理页面
@permission_required('sql.menu_instance', raise_exception=True)
def instanceuser(request, instance_id):
return render(request, 'instanceuser.html', {'instance_id': instance_id})
# 实例参数管理页面
@permission_required('sql.menu_param', raise_exception=True)
def instance_param(request):
# 获取用户关联实例列表
instances = user_instances(request.user, type='all', db_type='mysql')
context = {'tab': 'param_tab', 'instances': instances}
return render(request, 'param.html', context)
# binlog2sql页面
@permission_required('sql.menu_binlog2sql', raise_exception=True)
def binlog2sql(request):
# 获取实例列表
instances = [instance.instance_name for instance in user_instances(request.user, type='all', db_type='mysql')]
return render(request, 'binlog2sql.html', {'instances': instances})
# 数据库差异对比页面
@permission_required('sql.menu_schemasync', raise_exception=True)
def schemasync(request):
# 获取实例列表
instances = [instance.instance_name for instance in user_instances(request.user, type='all', db_type='mysql')]
return render(request, 'schemasync.html', {'instances': instances})
| 34.406077 | 120 | 0.706544 |
acdf1da3f2c5c259653c00f31c07329befa77c82 | 8,491 | py | Python | hubspot/cms/hubdb/models/column_request.py | Ronfer/hubspot-api-python | 1c87274ecbba4aa3c7728f890ccc6e77b2b6d2e4 | [
"Apache-2.0"
] | 117 | 2020-04-06T08:22:53.000Z | 2022-03-18T03:41:29.000Z | hubspot/cms/hubdb/models/column_request.py | Ronfer/hubspot-api-python | 1c87274ecbba4aa3c7728f890ccc6e77b2b6d2e4 | [
"Apache-2.0"
] | 62 | 2020-04-06T16:21:06.000Z | 2022-03-17T16:50:44.000Z | hubspot/cms/hubdb/models/column_request.py | Ronfer/hubspot-api-python | 1c87274ecbba4aa3c7728f890ccc6e77b2b6d2e4 | [
"Apache-2.0"
] | 45 | 2020-04-06T16:13:52.000Z | 2022-03-30T21:33:17.000Z | # coding: utf-8
"""
HubDB endpoints
HubDB is a relational data store that presents data as rows, columns, and cells in a table, much like a spreadsheet. HubDB tables can be added or modified [in the HubSpot CMS](https://knowledge.hubspot.com/cos-general/how-to-edit-hubdb-tables), but you can also use the API endpoints documented here. For more information on HubDB tables and using their data on a HubSpot site, see the [CMS developers site](https://designers.hubspot.com/docs/tools/hubdb). You can also see the [documentation for dynamic pages](https://designers.hubspot.com/docs/tutorials/how-to-build-dynamic-pages-with-hubdb) for more details about the `useForPages` field. HubDB tables support `draft` and `published` versions. This allows you to update data in the table, either for testing or to allow for a manual approval process, without affecting any live pages using the existing data. Draft data can be reviewed, and published by a user working in HubSpot or published via the API. Draft data can also be discarded, allowing users to go back to the published version of the data without disrupting it. If a table is set to be `allowed for public access`, you can access the published version of the table and rows without any authentication by specifying the portal id via the query parameter `portalId`. # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from hubspot.cms.hubdb.configuration import Configuration
class ColumnRequest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {"id": "int", "name": "str", "label": "str", "type": "str", "options": "list[Option]"}
attribute_map = {"id": "id", "name": "name", "label": "label", "type": "type", "options": "options"}
def __init__(self, id=None, name=None, label=None, type=None, options=None, local_vars_configuration=None): # noqa: E501
"""ColumnRequest - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._id = None
self._name = None
self._label = None
self._type = None
self._options = None
self.discriminator = None
self.id = id
self.name = name
self.label = label
self.type = type
self.options = options
@property
def id(self):
"""Gets the id of this ColumnRequest. # noqa: E501
Column Id # noqa: E501
:return: The id of this ColumnRequest. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ColumnRequest.
Column Id # noqa: E501
:param id: The id of this ColumnRequest. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and id is None: # noqa: E501
raise ValueError("Invalid value for `id`, must not be `None`") # noqa: E501
self._id = id
@property
def name(self):
"""Gets the name of this ColumnRequest. # noqa: E501
Name of the column # noqa: E501
:return: The name of this ColumnRequest. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this ColumnRequest.
Name of the column # noqa: E501
:param name: The name of this ColumnRequest. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and name is None: # noqa: E501
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def label(self):
"""Gets the label of this ColumnRequest. # noqa: E501
Label of the column # noqa: E501
:return: The label of this ColumnRequest. # noqa: E501
:rtype: str
"""
return self._label
@label.setter
def label(self, label):
"""Sets the label of this ColumnRequest.
Label of the column # noqa: E501
:param label: The label of this ColumnRequest. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and label is None: # noqa: E501
raise ValueError("Invalid value for `label`, must not be `None`") # noqa: E501
self._label = label
@property
def type(self):
"""Gets the type of this ColumnRequest. # noqa: E501
Type of the column # noqa: E501
:return: The type of this ColumnRequest. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this ColumnRequest.
Type of the column # noqa: E501
:param type: The type of this ColumnRequest. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
allowed_values = [
"NULL",
"TEXT",
"NUMBER",
"URL",
"IMAGE",
"SELECT",
"MULTISELECT",
"BOOLEAN",
"LOCATION",
"DATE",
"DATETIME",
"CURRENCY",
"RICHTEXT",
"FOREIGN_ID",
"VIDEO",
"CTA",
] # noqa: E501
if self.local_vars_configuration.client_side_validation and type not in allowed_values: # noqa: E501
raise ValueError("Invalid value for `type` ({0}), must be one of {1}".format(type, allowed_values)) # noqa: E501
self._type = type
@property
def options(self):
"""Gets the options of this ColumnRequest. # noqa: E501
Options to choose for select and multi-select columns # noqa: E501
:return: The options of this ColumnRequest. # noqa: E501
:rtype: list[Option]
"""
return self._options
@options.setter
def options(self, options):
"""Sets the options of this ColumnRequest.
Options to choose for select and multi-select columns # noqa: E501
:param options: The options of this ColumnRequest. # noqa: E501
:type: list[Option]
"""
if self.local_vars_configuration.client_side_validation and options is None: # noqa: E501
raise ValueError("Invalid value for `options`, must not be `None`") # noqa: E501
self._options = options
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items()))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ColumnRequest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ColumnRequest):
return True
return self.to_dict() != other.to_dict()
| 35.232365 | 1,302 | 0.611118 |
acdf1e7d57853880e23336bbb8c2ba62d84ad928 | 1,599 | py | Python | utils/logger.py | lxc86739795/vehicle_reid_by_parsing | a96496e11124d47d08a478696e0d3deb1e9b0c1a | [
"Apache-2.0"
] | 36 | 2020-11-20T05:40:14.000Z | 2022-02-18T10:15:23.000Z | utils/logger.py | lxc86739795/vehicle_reid_by_parsing | a96496e11124d47d08a478696e0d3deb1e9b0c1a | [
"Apache-2.0"
] | 3 | 2021-06-25T07:51:12.000Z | 2021-12-05T09:44:26.000Z | utils/logger.py | lxc86739795/vehicle_reid_by_parsing | a96496e11124d47d08a478696e0d3deb1e9b0c1a | [
"Apache-2.0"
] | 5 | 2020-12-14T02:19:11.000Z | 2022-02-18T10:15:47.000Z | # encoding: utf-8
"""
@author: sherlock
@contact: sherlockliao01@gmail.com
"""
import logging
import os
import sys
from .iotools import mkdir_if_missing
def setup_logger(name, save_dir, distributed_rank):
logger = logging.getLogger(name)
logger.setLevel(logging.DEBUG)
# don't log results for the non-master process
if distributed_rank > 0:
return logger
ch = logging.StreamHandler(stream=sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s %(name)s %(levelname)s: %(message)s")
ch.setFormatter(formatter)
logger.addHandler(ch)
if save_dir:
fh = logging.FileHandler(os.path.join(save_dir, "log.txt"), mode='w')
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
return logger
class Logger(object):
def __init__(self, fpath=None):
self.console = sys.stdout
self.file = None
if fpath is not None:
mkdir_if_missing(os.path.dirname(fpath))
self.file = open(fpath, 'w')
def __del__(self):
self.close()
def __enter__(self):
pass
def __exit__(self, *args):
self.close()
def write(self, msg):
self.console.write(msg)
if self.file is not None:
self.file.write(msg)
def flush(self):
self.console.flush()
if self.file is not None:
self.file.flush()
os.fsync(self.file.fileno())
def close(self):
self.console.close()
if self.file is not None:
self.file.close()
| 23.865672 | 84 | 0.620388 |
acdf1f477178156661e599c152825b3d4d0a5194 | 21,464 | py | Python | tests/postgres_tests/test_indexes.py | Saduk0/django | 98f23a8af0be7e87535426c5c83058e2682bfdf8 | [
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | 9 | 2020-09-30T16:32:05.000Z | 2020-10-12T13:52:07.000Z | tests/postgres_tests/test_indexes.py | Saduk0/django | 98f23a8af0be7e87535426c5c83058e2682bfdf8 | [
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | 3 | 2016-05-15T22:05:14.000Z | 2019-11-02T15:58:14.000Z | tests/postgres_tests/test_indexes.py | Saduk0/django | 98f23a8af0be7e87535426c5c83058e2682bfdf8 | [
"CNRI-Python-GPL-Compatible",
"BSD-3-Clause"
] | 2 | 2016-01-19T18:29:21.000Z | 2021-12-23T18:17:39.000Z | from unittest import mock
from django.contrib.postgres.indexes import (
BloomIndex, BrinIndex, BTreeIndex, GinIndex, GistIndex, HashIndex,
SpGistIndex,
)
from django.db import NotSupportedError, connection
from django.db.models import CharField, Q
from django.db.models.functions import Length
from django.test import skipUnlessDBFeature
from django.test.utils import register_lookup
from . import PostgreSQLSimpleTestCase, PostgreSQLTestCase
from .models import CharFieldModel, IntegerArrayModel
class IndexTestMixin:
def test_name_auto_generation(self):
index = self.index_class(fields=['field'])
index.set_name_with_model(CharFieldModel)
self.assertRegex(index.name, r'postgres_te_field_[0-9a-f]{6}_%s' % self.index_class.suffix)
def test_deconstruction_no_customization(self):
index = self.index_class(fields=['title'], name='test_title_%s' % self.index_class.suffix)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.%s' % self.index_class.__name__)
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_%s' % self.index_class.suffix})
class BloomIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BloomIndex
def test_suffix(self):
self.assertEqual(BloomIndex.suffix, 'bloom')
def test_deconstruction(self):
index = BloomIndex(fields=['title'], name='test_bloom', length=80, columns=[4])
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.BloomIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'fields': ['title'],
'name': 'test_bloom',
'length': 80,
'columns': [4],
})
def test_invalid_fields(self):
msg = 'Bloom indexes support a maximum of 32 fields.'
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=['title'] * 33, name='test_bloom')
def test_invalid_columns(self):
msg = 'BloomIndex.columns must be a list or tuple.'
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=['title'], name='test_bloom', columns='x')
msg = 'BloomIndex.columns cannot have more values than fields.'
with self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=['title'], name='test_bloom', columns=[4, 3])
def test_invalid_columns_value(self):
msg = 'BloomIndex.columns must contain integers from 1 to 4095.'
for length in (0, 4096):
with self.subTest(length), self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=['title'], name='test_bloom', columns=[length])
def test_invalid_length(self):
msg = 'BloomIndex.length must be None or an integer from 1 to 4096.'
for length in (0, 4097):
with self.subTest(length), self.assertRaisesMessage(ValueError, msg):
BloomIndex(fields=['title'], name='test_bloom', length=length)
class BrinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BrinIndex
def test_suffix(self):
self.assertEqual(BrinIndex.suffix, 'brin')
def test_deconstruction(self):
index = BrinIndex(fields=['title'], name='test_title_brin', autosummarize=True, pages_per_range=16)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.BrinIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'fields': ['title'],
'name': 'test_title_brin',
'autosummarize': True,
'pages_per_range': 16,
})
def test_invalid_pages_per_range(self):
with self.assertRaisesMessage(ValueError, 'pages_per_range must be None or a positive integer'):
BrinIndex(fields=['title'], name='test_title_brin', pages_per_range=0)
class BTreeIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = BTreeIndex
def test_suffix(self):
self.assertEqual(BTreeIndex.suffix, 'btree')
def test_deconstruction(self):
index = BTreeIndex(fields=['title'], name='test_title_btree', fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.BTreeIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_btree', 'fillfactor': 80})
class GinIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GinIndex
def test_suffix(self):
self.assertEqual(GinIndex.suffix, 'gin')
def test_deconstruction(self):
index = GinIndex(
fields=['title'],
name='test_title_gin',
fastupdate=True,
gin_pending_list_limit=128,
)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.GinIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'fields': ['title'],
'name': 'test_title_gin',
'fastupdate': True,
'gin_pending_list_limit': 128,
})
class GistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = GistIndex
def test_suffix(self):
self.assertEqual(GistIndex.suffix, 'gist')
def test_deconstruction(self):
index = GistIndex(fields=['title'], name='test_title_gist', buffering=False, fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.GistIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {
'fields': ['title'],
'name': 'test_title_gist',
'buffering': False,
'fillfactor': 80,
})
class HashIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = HashIndex
def test_suffix(self):
self.assertEqual(HashIndex.suffix, 'hash')
def test_deconstruction(self):
index = HashIndex(fields=['title'], name='test_title_hash', fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.HashIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_hash', 'fillfactor': 80})
class SpGistIndexTests(IndexTestMixin, PostgreSQLSimpleTestCase):
index_class = SpGistIndex
def test_suffix(self):
self.assertEqual(SpGistIndex.suffix, 'spgist')
def test_deconstruction(self):
index = SpGistIndex(fields=['title'], name='test_title_spgist', fillfactor=80)
path, args, kwargs = index.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.indexes.SpGistIndex')
self.assertEqual(args, ())
self.assertEqual(kwargs, {'fields': ['title'], 'name': 'test_title_spgist', 'fillfactor': 80})
class SchemaTests(PostgreSQLTestCase):
def get_constraints(self, table):
"""
Get the indexes on the table using a new cursor.
"""
with connection.cursor() as cursor:
return connection.introspection.get_constraints(cursor, table)
def test_gin_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(IntegerArrayModel._meta.db_table))
# Add the index
index_name = 'integer_array_model_field_gin'
index = GinIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
# Check gin index was added
self.assertEqual(constraints[index_name]['type'], GinIndex.suffix)
# Drop the index
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table))
def test_gin_fastupdate(self):
index_name = 'integer_array_gin_fastupdate'
index = GinIndex(fields=['field'], name=index_name, fastupdate=False)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], 'gin')
self.assertEqual(constraints[index_name]['options'], ['fastupdate=off'])
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table))
def test_partial_gin_index(self):
with register_lookup(CharField, Length):
index_name = 'char_field_gin_partial_idx'
index = GinIndex(fields=['field'], name=index_name, condition=Q(field__length=40))
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], 'gin')
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_partial_gin_index_with_tablespace(self):
with register_lookup(CharField, Length):
index_name = 'char_field_gin_partial_idx'
index = GinIndex(
fields=['field'],
name=index_name,
condition=Q(field__length=40),
db_tablespace='pg_default',
)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
self.assertIn('TABLESPACE "pg_default" ', str(index.create_sql(CharFieldModel, editor)))
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], 'gin')
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_gin_parameters(self):
index_name = 'integer_array_gin_params'
index = GinIndex(fields=['field'], name=index_name, fastupdate=True, gin_pending_list_limit=64)
with connection.schema_editor() as editor:
editor.add_index(IntegerArrayModel, index)
constraints = self.get_constraints(IntegerArrayModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], 'gin')
self.assertEqual(constraints[index_name]['options'], ['gin_pending_list_limit=64', 'fastupdate=on'])
with connection.schema_editor() as editor:
editor.remove_index(IntegerArrayModel, index)
self.assertNotIn(index_name, self.get_constraints(IntegerArrayModel._meta.db_table))
@skipUnlessDBFeature('has_bloom_index')
def test_bloom_index(self):
index_name = 'char_field_model_field_bloom'
index = BloomIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], BloomIndex.suffix)
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
@skipUnlessDBFeature('has_bloom_index')
def test_bloom_parameters(self):
index_name = 'char_field_model_field_bloom_params'
index = BloomIndex(fields=['field'], name=index_name, length=512, columns=[3])
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], BloomIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['length=512', 'col1=3'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_bloom_index_not_supported(self):
index_name = 'bloom_index_exception'
index = BloomIndex(fields=['field'], name=index_name)
msg = 'Bloom indexes require PostgreSQL 9.6+.'
with self.assertRaisesMessage(NotSupportedError, msg):
with mock.patch('django.db.backends.postgresql.features.DatabaseFeatures.has_bloom_index', False):
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_brin_index(self):
index_name = 'char_field_model_field_brin'
index = BrinIndex(fields=['field'], name=index_name, pages_per_range=4)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], BrinIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['pages_per_range=4'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
@skipUnlessDBFeature('has_brin_autosummarize')
def test_brin_parameters(self):
index_name = 'char_field_brin_params'
index = BrinIndex(fields=['field'], name=index_name, autosummarize=True)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], BrinIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['autosummarize=on'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_brin_autosummarize_not_supported(self):
index_name = 'brin_options_exception'
index = BrinIndex(fields=['field'], name=index_name, autosummarize=True)
with self.assertRaisesMessage(NotSupportedError, 'BRIN option autosummarize requires PostgreSQL 10+.'):
with mock.patch('django.db.backends.postgresql.features.DatabaseFeatures.has_brin_autosummarize', False):
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_btree_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = 'char_field_model_field_btree'
index = BTreeIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]['type'], BTreeIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_btree_parameters(self):
index_name = 'integer_array_btree_fillfactor'
index = BTreeIndex(fields=['field'], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], BTreeIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['fillfactor=80'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_gist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = 'char_field_model_field_gist'
index = GistIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]['type'], GistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_gist_parameters(self):
index_name = 'integer_array_gist_buffering'
index = GistIndex(fields=['field'], name=index_name, buffering=True, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], GistIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['buffering=on', 'fillfactor=80'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_hash_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = 'char_field_model_field_hash'
index = HashIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]['type'], HashIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_hash_parameters(self):
index_name = 'integer_array_hash_fillfactor'
index = HashIndex(fields=['field'], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], HashIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['fillfactor=80'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_spgist_index(self):
# Ensure the table is there and doesn't have an index.
self.assertNotIn('field', self.get_constraints(CharFieldModel._meta.db_table))
# Add the index.
index_name = 'char_field_model_field_spgist'
index = SpGistIndex(fields=['field'], name=index_name)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
# The index was added.
self.assertEqual(constraints[index_name]['type'], SpGistIndex.suffix)
# Drop the index.
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
def test_spgist_parameters(self):
index_name = 'integer_array_spgist_fillfactor'
index = SpGistIndex(fields=['field'], name=index_name, fillfactor=80)
with connection.schema_editor() as editor:
editor.add_index(CharFieldModel, index)
constraints = self.get_constraints(CharFieldModel._meta.db_table)
self.assertEqual(constraints[index_name]['type'], SpGistIndex.suffix)
self.assertEqual(constraints[index_name]['options'], ['fillfactor=80'])
with connection.schema_editor() as editor:
editor.remove_index(CharFieldModel, index)
self.assertNotIn(index_name, self.get_constraints(CharFieldModel._meta.db_table))
| 48.451467 | 117 | 0.688362 |
acdf20c3594111946c5a952452576af3ba5a7855 | 6,654 | py | Python | kitsune/users/tests/test_views.py | wescthatsme33/kitsune | 6a29049de625c0c93997dc55b36c9c39262d0057 | [
"BSD-3-Clause"
] | null | null | null | kitsune/users/tests/test_views.py | wescthatsme33/kitsune | 6a29049de625c0c93997dc55b36c9c39262d0057 | [
"BSD-3-Clause"
] | null | null | null | kitsune/users/tests/test_views.py | wescthatsme33/kitsune | 6a29049de625c0c93997dc55b36c9c39262d0057 | [
"BSD-3-Clause"
] | null | null | null |
from django.contrib import messages
from django.contrib.messages.middleware import MessageMiddleware
from django.contrib.sessions.middleware import SessionMiddleware
from django.test.client import RequestFactory
from nose.tools import eq_
from pyquery import PyQuery as pq
from kitsune.questions.models import Answer, Question
from kitsune.questions.tests import AnswerFactory, QuestionFactory
from kitsune.sumo.tests import LocalizingClient, TestCase
from kitsune.sumo.urlresolvers import reverse
from kitsune.users.models import (CONTRIBUTOR_GROUP, Deactivation, Profile,
Setting)
from kitsune.users.tests import GroupFactory, UserFactory, add_permission
from kitsune.users.views import edit_profile
class MakeContributorTests(TestCase):
def setUp(self):
self.user = UserFactory()
self.client.login(username=self.user.username, password='testpass')
GroupFactory(name=CONTRIBUTOR_GROUP)
super(MakeContributorTests, self).setUp()
def test_make_contributor(self):
"""Test adding a user to the contributor group"""
eq_(0, self.user.groups.filter(name=CONTRIBUTOR_GROUP).count())
response = self.client.post(reverse('users.make_contributor',
force_locale=True))
eq_(302, response.status_code)
eq_(1, self.user.groups.filter(name=CONTRIBUTOR_GROUP).count())
class UserSettingsTests(TestCase):
def setUp(self):
self.user = UserFactory()
self.profile = self.user.profile
self.client.login(username=self.user.username, password='testpass')
super(UserSettingsTests, self).setUp()
def test_create_setting(self):
url = reverse('users.edit_settings', locale='en-US')
eq_(Setting.objects.filter(user=self.user).count(), 0) # No settings
res = self.client.get(url, follow=True)
eq_(200, res.status_code)
res = self.client.post(url, {'forums_watch_new_thread': True},
follow=True)
eq_(200, res.status_code)
assert Setting.get_for_user(self.user, 'forums_watch_new_thread')
class UserProfileTests(TestCase):
def setUp(self):
self.user = UserFactory()
self.profile = self.user.profile
self.userrl = reverse('users.profile', args=[self.user.username], locale='en-US')
super(UserProfileTests, self).setUp()
def test_ProfileFactory(self):
res = self.client.get(self.userrl)
self.assertContains(res, self.user.username)
def test_profile_redirect(self):
"""Ensure that old profile URL's get redirected."""
res = self.client.get(reverse('users.profile', args=[self.user.pk],
locale='en-US'))
eq_(302, res.status_code)
def test_profile_inactive(self):
"""Inactive users don't have a public profile."""
self.user.is_active = False
self.user.save()
res = self.client.get(self.userrl)
eq_(404, res.status_code)
def test_profile_post(self):
res = self.client.post(self.userrl)
eq_(405, res.status_code)
def test_profile_deactivate(self):
"""Test user deactivation"""
p = UserFactory().profile
self.client.login(username=self.user.username, password='testpass')
res = self.client.post(reverse('users.deactivate', locale='en-US'), {'user_id': p.user.id})
eq_(403, res.status_code)
add_permission(self.user, Profile, 'deactivate_users')
res = self.client.post(reverse('users.deactivate', locale='en-US'), {'user_id': p.user.id})
eq_(302, res.status_code)
log = Deactivation.objects.get(user_id=p.user_id)
eq_(log.moderator_id, self.user.id)
p = Profile.objects.get(user_id=p.user_id)
assert not p.user.is_active
def test_deactivate_and_flag_spam(self):
self.client.login(username=self.user.username, password='testpass')
add_permission(self.user, Profile, 'deactivate_users')
# Verify content is flagged as spam when requested.
u = UserFactory()
AnswerFactory(creator=u)
QuestionFactory(creator=u)
url = reverse('users.deactivate-spam', locale='en-US')
res = self.client.post(url, {'user_id': u.id})
eq_(302, res.status_code)
eq_(1, Question.objects.filter(creator=u, is_spam=True).count())
eq_(0, Question.objects.filter(creator=u, is_spam=False).count())
eq_(1, Answer.objects.filter(creator=u, is_spam=True).count())
eq_(0, Answer.objects.filter(creator=u, is_spam=False).count())
class ProfileNotificationTests(TestCase):
"""
These tests confirm that FXA and non-FXA messages render properly.
We use RequestFactory because the request object from self.client.request
cannot be passed into messages.info()
"""
def _get_request(self):
user = UserFactory()
request = RequestFactory().get(reverse('users.edit_profile', args=[user.username]))
request.user = user
request.LANGUAGE_CODE = 'en'
middleware = SessionMiddleware()
middleware.process_request(request)
request.session.save()
middleware = MessageMiddleware()
middleware.process_request(request)
request.session.save()
return request
def test_fxa_notification_updated(self):
request = self._get_request()
messages.info(request, 'fxa_notification_updated')
response = edit_profile(request)
doc = pq(response.content)
eq_(1, len(doc('#fxa-notification-updated')))
eq_(0, len(doc('#fxa-notification-created')))
def test_non_fxa_notification_created(self):
request = self._get_request()
text = 'This is a helpful piece of information'
messages.info(request, text)
response = edit_profile(request)
doc = pq(response.content)
eq_(0, len(doc('#fxa-notification-updated')))
eq_(0, len(doc('#fxa-notification-created')))
eq_(1, len(doc('.user-messages li')))
eq_(doc('.user-messages li').text(), text)
class FXAAuthenticationTests(TestCase):
client_class = LocalizingClient
def test_authenticate_does_not_update_session(self):
self.client.get(reverse('users.fxa_authentication_init'))
assert not self.client.session.get('is_contributor')
def test_authenticate_does_update_session(self):
url = reverse('users.fxa_authentication_init') + '?is_contributor=True'
self.client.get(url)
assert self.client.session.get('is_contributor')
| 38.462428 | 99 | 0.669823 |
acdf21bb52a3d34a17f01d6e3bfe6c4aebd41b15 | 3,930 | py | Python | tests/test_gui/test_layouting_boxgroup.py | yegarti/arcade | 1862e61aab9a7dc646265005b0e808d953a9dfe3 | [
"MIT"
] | null | null | null | tests/test_gui/test_layouting_boxgroup.py | yegarti/arcade | 1862e61aab9a7dc646265005b0e808d953a9dfe3 | [
"MIT"
] | null | null | null | tests/test_gui/test_layouting_boxgroup.py | yegarti/arcade | 1862e61aab9a7dc646265005b0e808d953a9dfe3 | [
"MIT"
] | null | null | null | from arcade.gui.widgets import UIBoxLayout, UIDummy
# Vertical
def test_do_layout_vertical_with_initial_children():
# add two 100x100 Dummy widgets
element_1 = UIDummy()
element_2 = UIDummy()
group = UIBoxLayout(x=100, y=400, vertical=True, children=[element_1, element_2])
group.do_layout()
assert element_1.top == 400
assert element_1.bottom == 300
assert element_1.left == 100
assert element_2.top == 300
assert element_2.bottom == 200
assert element_2.left == 100
def test_do_layout_vertical_add_children():
group = UIBoxLayout(x=100, y=400, vertical=True)
element_1 = UIDummy()
element_2 = UIDummy()
group.add(element_1)
group.add(element_2)
group.do_layout()
assert element_1.top == 400
assert element_1.bottom == 300
assert element_1.left == 100
assert element_2.top == 300
assert element_2.bottom == 200
assert element_2.left == 100
def test_do_layout_vertical_add_child_with_initial_children():
element_1 = UIDummy()
element_2 = UIDummy()
element_3 = UIDummy()
group = UIBoxLayout(x=100, y=400, vertical=True, children=[element_1, element_2])
group.add(element_3)
group.do_layout()
assert element_1.top == 400
assert element_1.bottom == 300
assert element_1.left == 100
assert element_2.top == 300
assert element_2.bottom == 200
assert element_2.left == 100
assert element_3.top == 200
assert element_3.bottom == 100
assert element_3.left == 100
def test_vertical_group_keep_top_alignment_while_adding_children():
element_1 = UIDummy()
element_2 = UIDummy()
element_3 = UIDummy()
group = UIBoxLayout(x=100, y=400, vertical=True, children=[element_1, element_2])
assert group.top == 400
group.add(element_3)
group.do_layout()
assert group.top == 400 # group starts with 0 height, so top == bottom, adding children should keep top alignment
assert group.left == 100
assert group.height == 300
assert group.width == 100
# Horizontal
def test_do_layout_horizontal_with_initial_children():
# add two 100x100 Dummy widgets
element_1 = UIDummy()
element_2 = UIDummy()
group = UIBoxLayout(x=100, y=400, vertical=False, children=[element_1, element_2])
group.do_layout()
assert element_1.left == 100
assert element_1.right == 200
assert element_1.top == 400
assert element_2.left == 200
assert element_2.right == 300
assert element_2.top == 400
def test_do_layout_horizontal_add_children():
group = UIBoxLayout(x=100, y=400, vertical=False)
element_1 = UIDummy()
element_2 = UIDummy()
group.add(element_1)
group.add(element_2)
group.do_layout()
assert element_1.left == 100
assert element_1.right == 200
assert element_1.top == 400
assert element_2.left == 200
assert element_2.right == 300
assert element_2.top == 400
def test_do_layout_horizontal_add_child_with_initial_children():
element_1 = UIDummy()
element_2 = UIDummy()
element_3 = UIDummy()
group = UIBoxLayout(x=100, y=400, vertical=False, children=[element_1, element_2])
group.add(element_3)
group.do_layout()
assert element_1.left == 100
assert element_1.right == 200
assert element_1.top == 400
assert element_2.left == 200
assert element_2.right == 300
assert element_2.top == 400
assert element_3.left == 300
assert element_3.right == 400
assert element_3.top == 400
def test_horizontal_group_keep_left_alignment_while_adding_children():
element_1 = UIDummy()
element_2 = UIDummy()
element_3 = UIDummy()
group = UIBoxLayout(x=100, y=400, vertical=False, children=[element_1, element_2])
group.add(element_3)
group.do_layout()
assert group.left == 100
assert group.top == 400
assert group.height == 100
assert group.width == 300 | 25.192308 | 118 | 0.691603 |
acdf222e1432829f36f4ae74658624bcb3c260d7 | 7,208 | py | Python | versio9/base_asset.py | doubleO8/versionone-sdk-spoon | 7d6bded063a07e728e67d82e33ccd10d4f5d12eb | [
"BSD-3-Clause"
] | null | null | null | versio9/base_asset.py | doubleO8/versionone-sdk-spoon | 7d6bded063a07e728e67d82e33ccd10d4f5d12eb | [
"BSD-3-Clause"
] | null | null | null | versio9/base_asset.py | doubleO8/versionone-sdk-spoon | 7d6bded063a07e728e67d82e33ccd10d4f5d12eb | [
"BSD-3-Clause"
] | null | null | null | from pprint import pformat as pf
from query import V1Query
class BaseAsset(object):
"""
Provides common methods for the dynamically derived asset type classes
built by V1Meta.asset_class
"""
@classmethod
def query(cls, where=None, sel=None):
"""
Takes a V1 Data query string and returns an iterable of all matching
items
"""
return V1Query(cls, sel, where)
@classmethod
def select(cls, *selectlist):
return V1Query(cls).select(*selectlist)
@classmethod
def where(cls, **wherekw):
return V1Query(cls).where(**wherekw)
@classmethod
def filter(cls, filterexpr):
return V1Query(cls).filter(filterexpr)
@classmethod
def asof(cls, *asofs):
return V1Query(cls).asof(*asofs)
@classmethod
def from_query_select(cls, xml, asof=None):
"""
Find or instantiate an object and fill it with data that just came back
from query
"""
idref = xml.get('id')
data = cls._v1_v1meta.unpack_asset(xml)
instance = cls._v1_v1meta.asset_from_oid(idref)
instance.AsOf = asof
data['AsOf'] = asof
return instance.with_data(data)
@classmethod
def create(cls, **newdata):
"""
create new asset on server and return created asset proxy instance
"""
return cls._v1_v1meta.create_asset(cls._v1_asset_type_name, newdata)
class IterableType(type):
def __iter__(cls):
for instance in cls.query():
instance.needs_refresh = True
yield instance
"""
The type that's instantiated to make THIS class must have an __iter__,
so we provide a metaclass (a thing that provides a class when instantiated)
that knows how to be iterated over, so we can say list(v1.Story)
"""
__metaclass__ = IterableType
def __new__(cls, oid, moment=None):
"""
Tries to get an instance out of the cache first, otherwise creates one
"""
cache_key = (cls._v1_asset_type_name, oid, moment)
cache = cls._v1_v1meta.global_cache
if cache.has_key(cache_key):
self = cache[cache_key]
else:
self = object.__new__(cls)
self._v1_moment = moment
self._v1_oid = oid
self._v1_new_data = {}
self._v1_current_data = {}
self._v1_needs_refresh = True
cache[cache_key] = self
return self
@property
def intid(self):
return self._v1_oid
@property
def data(self):
return self._v1_current_data
def __getitem__(self, key):
return self._v1_current_data[key]
@property
def idref(self):
return self._v1_asset_type_name + ':' + str(self._v1_oid)
@property
def reprref(self):
if self._v1_moment:
return "{0}({1}:{2})".format(self._v1_asset_type_name, self._v1_oid,
self._v1_moment)
else:
return "{0}({1})".format(self._v1_asset_type_name, self._v1_oid)
@property
def url(self):
return self._v1_v1meta.server.build_url('/assetdetail.v1',
query={'oid': self.idref})
class ReprDummy:
def __init__(self, value):
self.value = value
def __repr__(self):
return self.value.reprref
def repr_dummy(self, v):
if isinstance(v, list):
return [
self.ReprDummy(item) if isinstance(item, BaseAsset) else item
for item in v]
elif isinstance(v, BaseAsset):
return self.ReprDummy(v)
else:
return v
def repr_shallow(self, d):
# patch up the dict that pformat sees to avoid repr loops
return pf(dict(
(k, self.repr_dummy(v))
for (k, v)
in d.items()
if v
)
)
def __repr__(self):
out = self.reprref
shallowdata = self.repr_shallow(self._v1_current_data)
if shallowdata != '{}':
out += '.with_data({0})'.format(shallowdata)
shallownew = self.repr_shallow(self._v1_new_data)
if shallownew != '{}':
out += '.pending({0})'.format(shallownew)
return out
def _v1_getattr(self, attr):
"""
Intercept access to missing attribute names.
first return uncommitted data, then refresh if needed,
then get single attr, else fail
"""
if self._v1_new_data.has_key(attr):
value = self._v1_new_data[attr]
else:
if self._v1_needs_refresh:
self._v1_refresh()
if attr not in self._v1_current_data.keys():
self._v1_current_data[attr] = self._v1_get_single_attr(attr)
value = self._v1_current_data[attr]
return value
def _v1_setattr(self, attr, value):
"""
Stores a new value for later commit
"""
if attr.startswith('_v1_'):
object.__setattr__(self, attr, value)
else:
self._v1_new_data[attr] = value
self._v1_v1meta.add_to_dirty_list(self)
self._v1_needs_commit = True
def set(self, **kw):
self.pending(kw)
return self
def with_data(self, newdata):
"""
bulk-set instance data
"""
self._v1_current_data.update(dict(newdata))
self._v1_needs_refresh = False
return self
def pending(self, newdata):
"""
bulk-set data to commit
"""
self._v1_new_data.update(dict(newdata))
self._v1_v1meta.add_to_dirty_list(self)
self._v1_needs_commit = True
def _v1_commit(self):
"""
Commits the object to the server and invalidates its sync state
"""
if self._v1_needs_commit:
self._v1_v1meta.update_asset(self._v1_asset_type_name, self._v1_oid,
self._v1_new_data)
self._v1_needs_commit = False
self._v1_new_data = {}
self._v1_current_data = {}
self._v1_needs_refresh = True
def _v1_refresh(self):
"""
Syncs the objects from current server data
"""
self._v1_current_data = self._v1_v1meta.read_asset(
self._v1_asset_type_name, self._v1_oid, self._v1_moment)
self._v1_needs_refresh = False
def _v1_get_single_attr(self, attr):
return self._v1_v1meta.get_attr(self._v1_asset_type_name, self._v1_oid,
attr, self._v1_moment)
def _v1_execute_operation(self, opname):
result = self._v1_v1meta.execute_operation(self._v1_asset_type_name,
self._v1_oid, opname)
self._v1_needs_refresh = True
return result
| 31.614035 | 81 | 0.560766 |
acdf249f04c3771613ea0c157d57ee6599a21954 | 1,796 | py | Python | hendrix/utils/__init__.py | mpasternak/hendrix | 1cc96b9edb3a6760306b80b50001e3dfbf57fceb | [
"MIT"
] | null | null | null | hendrix/utils/__init__.py | mpasternak/hendrix | 1cc96b9edb3a6760306b80b50001e3dfbf57fceb | [
"MIT"
] | null | null | null | hendrix/utils/__init__.py | mpasternak/hendrix | 1cc96b9edb3a6760306b80b50001e3dfbf57fceb | [
"MIT"
] | null | null | null | import os
import sys
from importlib import import_module
import chalk
import six
HENDRIX_DIR = os.path.dirname(os.path.abspath(__file__))
SHARE_PATH = os.path.join(
os.path.dirname(sys.executable),
'share/hendrix'
)
def get_pid(options):
"""returns The default location of the pid file for process management"""
namespace = options['settings'] if options['settings'] else options['wsgi']
return '%s/%s_%s.pid' % (
HENDRIX_DIR, options['http_port'], namespace.replace('.', '_')
)
def responseInColor(request, status, headers, prefix='Response', opts=None):
"Prints the response info in color"
code, message = status.split(None, 1)
message = '%s [%s] => Request %s %s %s on pid %d' % (
prefix,
code,
str(request.host),
request.method,
request.path,
os.getpid()
)
signal = int(code) / 100
if signal == 2:
chalk.green(message, opts=opts)
elif signal == 3:
chalk.blue(message, opts=opts)
else:
chalk.red(message, opts=opts)
def import_string(dotted_path):
"""
Import a dotted module path and return the attribute/class designated by
the last name in the path. Raise ImportError if the import failed.
"""
try:
module_path, class_name = dotted_path.rsplit('.', 1)
except ValueError:
msg = "%s doesn't look like a module path" % dotted_path
six.reraise(ImportError, ImportError(msg), sys.exc_info()[2])
module = import_module(module_path)
try:
return getattr(module, class_name)
except AttributeError:
msg = 'Module "%s" does not define a "%s" attribute/class' % (
dotted_path, class_name
)
six.reraise(ImportError, ImportError(msg), sys.exc_info()[2])
| 28.0625 | 79 | 0.635857 |
acdf24cdda361ce8623f5e8e6dddbcc8f81cccc1 | 2,616 | py | Python | python/bark_client/main.py | pushyzheng/bark-client | 7c396e82ad60814bc55c83881ed0f4f4dc337a93 | [
"MIT"
] | null | null | null | python/bark_client/main.py | pushyzheng/bark-client | 7c396e82ad60814bc55c83881ed0f4f4dc337a93 | [
"MIT"
] | null | null | null | python/bark_client/main.py | pushyzheng/bark-client | 7c396e82ad60814bc55c83881ed0f4f4dc337a93 | [
"MIT"
] | null | null | null | import requests
import json
from bark_client.utils import logger, is_blank
class SoundType(object):
ALARM = 'alarm'
ANTICIPATE = 'anticipate'
BELL = 'bell'
BIRDSONG = 'birdsong'
BLOOM = 'bloom'
CALYPSO = 'calypso'
CHIME = 'chime'
CHOO = 'choo'
DESCENT = 'descent'
ELECTRONIC = 'electronic'
FANFARE = 'fanfare'
GLASS = 'glass'
GOTOSLEEP = 'gotosleep'
HEALTHNOTIFICATION = 'healthnotification'
HORN = 'horn'
LADDER = 'ladder'
MAILSEND = 'mailsend'
MINUET = 'minuet'
MULTIWAYINVITATION = 'multiwayinvitation'
NEWMAIL = 'newmail'
NEWSFLASH = 'newsflash'
NOIR = 'noir'
PAYMENTSUCCESS = 'paymentsuccess'
SHAKE = 'shake'
SHERWOODFOREST = 'sherwoodforest'
SPELL = 'spell'
SUSPENSE = 'suspense'
TELEGRAPH = 'telegraph'
TIPTOES = 'tiptoes'
TYPEWRITERS = 'typewriters'
UPDATE = 'update'
class BarkClient(object):
def __init__(self, domain, key_list):
self.domain = domain
self.key_list = key_list
def get_request_url(self, content, key, title=None, group=None,
url=None, sound=None, automatically_copy=False):
base_url = 'https://{domain}/{key}'.format(domain=self.domain, key=key)
if title:
base_url += '/{title}'.format(title=title)
base_url += '/{}'.format(content)
params = {
'group': None if is_blank(group) else group,
'automatically_copy': None if is_blank(automatically_copy) else automatically_copy,
'url': None if is_blank(url) else url,
'sound': None if is_blank(sound) else sound
}
return base_url, params
def push(self, content, title=None, url=None, group=None,
receivers=None, sound=None, automatically_copy=False):
failing_receiver = []
for key in (receivers or self.key_list):
base_url, params = self.get_request_url(
content=content, key=key, title=title,
group=group, url=url, sound=sound,
automatically_copy=automatically_copy
)
logger.info("Push to {}".format(base_url))
resp = requests.get(base_url, params=params)
data = json.loads(resp.text)
if not (resp.status_code == 200 and data['code'] == 200):
logger.error("Fail to push to [{}], error message = {}".format(key, data['message']))
failing_receiver.append(key)
logger.info("Number of failed pushes: {}".format(len(failing_receiver)))
return failing_receiver
| 32.7 | 101 | 0.606651 |
acdf25b03d551be844712d70e87e6316c406c177 | 4,717 | py | Python | tools/utils.py | lzmisscc/Tilt-License-Plate-Recognition | 5e69b02a4ef514968dc625ef687056fa8f1afcf9 | [
"MIT"
] | null | null | null | tools/utils.py | lzmisscc/Tilt-License-Plate-Recognition | 5e69b02a4ef514968dc625ef687056fa8f1afcf9 | [
"MIT"
] | null | null | null | tools/utils.py | lzmisscc/Tilt-License-Plate-Recognition | 5e69b02a4ef514968dc625ef687056fa8f1afcf9 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
from torch.autograd import Variable
import collections
class strLabelConverterForAttention(object):
"""Convert between str and label.
NOTE:
Insert `EOS` to the alphabet for attention.
Args:
alphabet (str): set of the possible characters.
ignore_case (bool, default=True): whether or not to ignore all of the case.
"""
def __init__(self, alphabet, sep):
self._scanned_list = False
self._out_of_list = ''
self._ignore_case = True
self.sep = sep
self.alphabet = alphabet.split(sep)
self.dict = {}
for i, item in enumerate(self.alphabet):
self.dict[item] = i
def scan(self, text):
# print(text)
text_tmp = text
text = []
for i in range(len(text_tmp)):
text_result = ''
for j in range(len(text_tmp[i])):
chara = text_tmp[i][j] if self._ignore_case else text_tmp[i][j]
if chara not in self.alphabet:
if chara in self._out_of_list:
continue
else:
self._out_of_list += chara
file_out_of_list = open("out_of_list.txt", "a+")
file_out_of_list.write(chara + "\n")
file_out_of_list.close()
print('" %s " is not in alphabet...' % chara)
continue
else:
text_result += chara
text.append(text_result)
text_result = tuple(text)
self._scanned_list = True
return text_result
def encode(self, text, scanned=True):
"""Support batch or single str.
Args:
text (str or list of str): texts to convert.
Returns:
torch.IntTensor [length_0 + length_1 + ... length_{n - 1}]: encoded texts.
torch.IntTensor [n]: length of each text.
"""
self._scanned_list = scanned
if not self._scanned_list:
text = self.scan(text)
if isinstance(text, str):
text = [
self.dict[char if self._ignore_case else char]
for char in text
]
length = [len(text)]
elif isinstance(text, collections.Iterable):
length = [len(s) for s in text]
text = ''.join(text)
text, _ = self.encode(text)
return (torch.LongTensor(text), torch.LongTensor(length))
def decode(self, t, length):
"""Decode encoded texts back into strs.
Args:
torch.IntTensor [length_0 + length_1 + ... length_{n - 1}]: encoded texts.
torch.IntTensor [n]: length of each text.
Raises:
AssertionError: when the texts and its length does not match.
Returns:
text (str or list of str): texts to convert.
"""
if length.numel() == 1:
length = length[0]
assert t.numel() == length, "text with length: {} does not match declared length: {}".format(t.numel(), length)
return ''.join([self.alphabet[i] for i in t])
else:
# batch mode
assert t.numel() == length.sum(), "texts with length: {} does not match declared length: {}".format(t.numel(), length.sum())
texts = []
index = 0
for i in range(length.numel()):
l = length[i]
texts.append(
self.decode(
t[index:index + l], torch.LongTensor([l])))
index += l
return texts
class averager(object):
"""Compute average for `torch.Variable` and `torch.Tensor`. """
def __init__(self):
self.reset()
def add(self, v):
if isinstance(v, Variable):
count = v.data.numel()
v = v.data.sum()
elif isinstance(v, torch.Tensor):
count = v.numel()
v = v.sum()
self.n_count += count
self.sum += v
def reset(self):
self.n_count = 0
self.sum = 0
def val(self):
res = 0
if self.n_count != 0:
res = self.sum / float(self.n_count)
return res
def loadData(v, data):
major, _ = get_torch_version()
if major >= 1:
v.resize_(data.size()).copy_(data)
else:
v.data.resize_(data.size()).copy_(data)
def get_torch_version():
"""
Find pytorch version and return it as integers
for major and minor versions
"""
torch_version = str(torch.__version__).split(".")
return int(torch_version[0]), int(torch_version[1])
| 31.238411 | 136 | 0.527454 |
acdf262941b8d5f38ea37521c4f2e210c713788b | 253,206 | py | Python | Admin.py | MisaelGuilherme/Projeto_Multimoldes_Monitoramento_ADMIN | 9c5ed0c83d11a36e9667a4f01a0a98829ff91948 | [
"MIT"
] | 1 | 2021-02-22T16:08:04.000Z | 2021-02-22T16:08:04.000Z | Admin.py | MisaelGuilherme/Projeto_Multimoldes_Monitoramento_ADMIN | 9c5ed0c83d11a36e9667a4f01a0a98829ff91948 | [
"MIT"
] | null | null | null | Admin.py | MisaelGuilherme/Projeto_Multimoldes_Monitoramento_ADMIN | 9c5ed0c83d11a36e9667a4f01a0a98829ff91948 | [
"MIT"
] | null | null | null | from tkinter import *
from tkinter import ttk
from tkinter import tix
from tkinter import filedialog
from tkscrolledframe import ScrolledFrame
from ttkwidgets import CheckboxTreeview
from math import *
from platform import *
from tkinter import messagebox
from datetime import *
import mysql.connector
import threading
import pandas as pd
from random import randint
class Database_Server():
def connection_database(self):
try:
self.bancoServer = mysql.connector.connect(
host='10.0.0.65',
database='empresa_funcionarios',
user='MultimoldesAdmin',
password='')
self.bancoServer.autocommit = True
self.cursor = self.bancoServer.cursor()
self.bancoConnect = True
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
self.bancoConnect = False
def encerrando_conexao_database(self):
self.bancoServer.close()
self.cursor.close()
class Funcs():
def verificar_campos_cadastro(self, parametro):
#Atribuição dos campos cadastrais nas variáveis
a = self.campoNome.get()
b = self.campoCPF.get()
c = self.campoSenha.get()
d = self.campoConfirmaSenha.get()
e = self.campoFuncao.get()
f = self.campoFone.get()
g = self.campoNasc.get()
#Verificando se algum campo não foi preenchido
if a == '' or b == '' or c == '' or d == '' or e == 'Selecione' or f == '' or g == '' or self.box1.get() == '' or self.box2.get() == '' or self.box3.get() == '' or self.box4.get() == '' or self.box5.get() == '' or self.box6.get() == '' or self.box7.get() == '' or self.box8.get() == '' or self.box9.get() == '' or self.box10.get() == '' or self.box11.get() == '' or self.box12.get() == '' or self.box13.get() == '' or self.box14.get() == '' or self.box15.get() == '' or self.box16.get() == '' or self.box17.get() == '' or self.box18.get() == '' or self.box19.get() == '' or self.box20.get() == '' or self.box21.get() == '' or self.box22.get() == '' or self.box23.get() == '' or self.box24.get() == '' or self.box25.get() == '' or self.box26.get() == '' or self.box27.get() == '' or self.box28.get() == '' or self.box29.get() == '' or self.box30.get() == '' or self.box31.get() == '' or self.box32.get() == '' or self.box33.get() == '' or self.box34.get() == '' or self.box35.get() == '' or self.box36.get() == '' or self.box37.get() == '' or self.box38.get() == '' or self.box39.get() == '' or self.box40.get() == '' or self.box41.get() == '':
#Mudando cor para preto caso o usuário tenha errado em algum campo e tenha corrigdo
self.lbNome['fg'] = '#4c78ff'
self.lbCPF['fg'] = '#4c78ff'
self.lbFuncao['fg'] = '#4c78ff'
self.lbFone['fg'] = '#4c78ff'
self.lbNasc['fg'] = '#4c78ff'
self.lbSenha['fg'] = '#4c78ff'
self.lbConfirmaSenha['fg'] = '#4c78ff'
self.campoNome['bg'] = 'white'
self.campoCPF['bg'] = 'white'
self.campoFone['bg'] = 'white'
self.campoNasc['bg'] = 'white'
self.campoSenha['bg'] = 'white'
self.campoConfirmaSenha['bg'] = 'white'
self.lbAtribuicao1['fg'] = '#4c78ff'
self.lbAtribuicao2['fg'] = '#4c78ff'
self.lbAtribuicao3['fg'] = '#4c78ff'
self.lbAtribuicao4['fg'] = '#4c78ff'
self.lbAtribuicao5['fg'] = '#4c78ff'
self.lbAtribuicao6['fg'] = '#4c78ff'
self.lbAtribuicao7['fg'] = '#4c78ff'
self.lbAtribuicao8['fg'] = '#4c78ff'
self.lbAtribuicao9['fg'] = '#4c78ff'
self.lbAtribuicao10['fg'] = '#4c78ff'
self.lbAtribuicao11['fg'] = '#4c78ff'
self.lbAtribuicao12['fg'] = '#4c78ff'
self.lbAtribuicao13['fg'] = '#4c78ff'
self.lbAtribuicao14['fg'] = '#4c78ff'
self.lbAtribuicao15['fg'] = '#4c78ff'
self.lbAtribuicao16['fg'] = '#4c78ff'
self.lbAtribuicao17['fg'] = '#4c78ff'
self.lbAtribuicao18['fg'] = '#4c78ff'
self.lbAtribuicao19['fg'] = '#4c78ff'
self.lbAtribuicao20['fg'] = '#4c78ff'
self.lbAtribuicao21['fg'] = '#4c78ff'
self.lbAtribuicao22['fg'] = '#4c78ff'
self.lbAtribuicao23['fg'] = '#4c78ff'
self.lbAtribuicao24['fg'] = '#4c78ff'
self.lbAtribuicao25['fg'] = '#4c78ff'
self.lbAtribuicao26['fg'] = '#4c78ff'
self.lbAtribuicao27['fg'] = '#4c78ff'
self.lbAtribuicao28['fg'] = '#4c78ff'
self.lbAtribuicao29['fg'] = '#4c78ff'
self.lbAtribuicao30['fg'] = '#4c78ff'
self.lbAtribuicao31['fg'] = '#4c78ff'
self.lbAtribuicao32['fg'] = '#4c78ff'
self.lbAtribuicao33['fg'] = '#4c78ff'
self.lbAtribuicao34['fg'] = '#4c78ff'
self.lbAtribuicao35['fg'] = '#4c78ff'
self.lbAtribuicao36['fg'] = '#4c78ff'
self.lbAtribuicao37['fg'] = '#4c78ff'
self.lbAtribuicao38['fg'] = '#4c78ff'
self.lbAtribuicao39['fg'] = '#4c78ff'
self.lbAtribuicao40['fg'] = '#4c78ff'
if a == '':
self.campoNome['bg'] = 'pink'
self.lbNome['fg'] = 'red'
if b == '':
self.campoCPF['bg'] = 'pink'
self.lbCPF['fg'] = 'red'
if c == '':
self.campoSenha['bg'] = 'pink'
self.lbSenha['fg'] = 'red'
if d == '':
self.campoConfirmaSenha['bg'] = 'pink'
self.lbConfirmaSenha['fg'] = 'red'
if e == 'Selecione':
self.lbFuncao['fg'] = 'red'
if f == '':
self.campoFone['bg'] = 'pink'
self.lbFone['fg'] = 'red'
if g == '':
self.campoNasc['bg'] = 'pink'
self.lbNasc['fg'] = 'red'
if self.box1.get() == '' or int(self.box1.get()) > 4:
self.lbAtribuicao1['fg'] = 'red'
if self.box2.get() == '' or int(self.box2.get()) > 4:
self.lbAtribuicao2['fg'] = 'red'
if self.box3.get() == '' or int(self.box3.get()) > 4:
self.lbAtribuicao3['fg'] = 'red'
if self.box4.get() == '' or int(self.box4.get()) > 4:
self.lbAtribuicao4['fg'] = 'red'
if self.box5.get() == '' or int(self.box5.get()) > 4:
self.lbAtribuicao5['fg'] = 'red'
if self.box6.get() == '' or int(self.box6.get()) > 4:
self.lbAtribuicao6['fg'] = 'red'
if self.box7.get() == '' or int(self.box7.get()) > 4:
self.lbAtribuicao7['fg'] = 'red'
if self.box8.get() == '' or int(self.box8.get()) > 4:
self.lbAtribuicao8['fg'] = 'red'
if self.box9.get() == '' or int(self.box9.get()) > 4:
self.lbAtribuicao9['fg'] = 'red'
if self.box10.get() == '' or int(self.box10.get()) > 4:
self.lbAtribuicao10['fg'] = 'red'
if self.box11.get() == '' or int(self.box11.get()) > 4:
self.lbAtribuicao11['fg'] = 'red'
if self.box12.get() == '' or int(self.box12.get()) > 4:
self.lbAtribuicao12['fg'] = 'red'
if self.box13.get() == '' or int(self.box13.get()) > 4:
self.lbAtribuicao13['fg'] = 'red'
if self.box14.get() == '' or int(self.box14.get()) > 4:
self.lbAtribuicao14['fg'] = 'red'
if self.box15.get() == '' or int(self.box15.get()) > 4:
self.lbAtribuicao15['fg'] = 'red'
if self.box16.get() == '' or int(self.box16.get()) > 4:
self.lbAtribuicao16['fg'] = 'red'
if self.box17.get() == '' or int(self.box17.get()) > 4:
self.lbAtribuicao17['fg'] = 'red'
if self.box18.get() == '' or int(self.box18.get()) > 4:
self.lbAtribuicao18['fg'] = 'red'
if self.box19.get() == '' or int(self.box19.get()) > 4:
self.lbAtribuicao19['fg'] = 'red'
if self.box20.get() == '' or int(self.box20.get()) > 4:
self.lbAtribuicao20['fg'] = 'red'
if self.box21.get() == '' or int(self.box21.get()) > 4:
self.lbAtribuicao21['fg'] = 'red'
if self.box22.get() == '' or int(self.box22.get()) > 4:
self.lbAtribuicao22['fg'] = 'red'
if self.box23.get() == '' or int(self.box23.get()) > 4:
self.lbAtribuicao23['fg'] = 'red'
if self.box24.get() == '' or int(self.box24.get()) > 4:
self.lbAtribuicao24['fg'] = 'red'
if self.box25.get() == '' or int(self.box25.get()) > 4:
self.lbAtribuicao25['fg'] = 'red'
if self.box26.get() == '' or int(self.box26.get()) > 4:
self.lbAtribuicao26['fg'] = 'red'
if self.box27.get() == '' or int(self.box27.get()) > 4:
self.lbAtribuicao27['fg'] = 'red'
if self.box28.get() == '' or int(self.box28.get()) > 4:
self.lbAtribuicao28['fg'] = 'red'
if self.box29.get() == '' or int(self.box29.get()) > 4:
self.lbAtribuicao29['fg'] = 'red'
if self.box30.get() == '' or int(self.box30.get()) > 4:
self.lbAtribuicao30['fg'] = 'red'
if self.box31.get() == '' or int(self.box31.get()) > 4:
self.lbAtribuicao31['fg'] = 'red'
if self.box32.get() == '' or int(self.box32.get()) > 4:
self.lbAtribuicao32['fg'] = 'red'
if self.box33.get() == '' or int(self.box33.get()) > 4:
self.lbAtribuicao33['fg'] = 'red'
if self.box34.get() == '' or int(self.box34.get()) > 4:
self.lbAtribuicao34['fg'] = 'red'
if self.box35.get() == '' or int(self.box35.get()) > 4:
self.lbAtribuicao35['fg'] = 'red'
if self.box36.get() == '' or int(self.box36.get()) > 4:
self.lbAtribuicao36['fg'] = 'red'
if self.box37.get() == '' or int(self.box37.get()) > 4:
self.lbAtribuicao37['fg'] = 'red'
if self.box38.get() == '' or int(self.box38.get()) > 4:
self.lbAtribuicao38['fg'] = 'red'
if self.box39.get() == '' or int(self.box39.get()) > 4:
self.lbAtribuicao39['fg'] = 'red'
if self.box40.get() == '' or int(self.box40.get()) > 4:
self.lbAtribuicao40['fg'] = 'red'
if self.box41.get() == '' or int(self.box41.get()) > 4:
self.lbAtribuicao41['fg'] = 'red'
return messagebox.showerror('Alerta', 'Verifique os campos')
elif len(a) < 6 or len(b) < 11 or len(c) < 8 or len(d) < 8 or c != d:
#Mudando cor para preto caso o usuário tenha errado em algum campo e tenha corrigdo
self.lbNome['fg'] = '#4c78ff'
self.lbCPF['fg'] = '#4c78ff'
self.lbSenha['fg'] = '#4c78ff'
self.lbConfirmaSenha['fg'] = '#4c78ff'
self.campoNome['bg'] = 'white'
self.campoCPF['bg'] = 'white'
self.campoSenha['bg'] = 'white'
self.campoConfirmaSenha['bg'] = 'white'
if len(a) < 6:
self.campoNome['bg'] = 'pink'
self.lbNome['fg'] = 'red'
if len(b) < 11:
self.campoCPF['bg'] = 'pink'
self.lbCPF['fg'] = 'red'
if len(c) < 8:
self.campoSenha['bg'] = 'pink'
self.lbSenha['fg'] = 'red'
if len(d) < 8:
self.campoConfirmaSenha['bg'] = 'pink'
self.lbConfirmaSenha['fg'] = 'red'
return messagebox.showerror('Alerta', 'Verifique os campos')
self.lbNome['fg'] = '#4c78ff'
self.lbCPF['fg'] = '#4c78ff'
self.lbSenha['fg'] = '#4c78ff'
self.lbConfirmaSenha['fg'] = '#4c78ff'
self.campoNome['bg'] = 'white'
self.campoCPF['bg'] = 'white'
self.campoSenha['bg'] = 'white'
self.campoConfirmaSenha['bg'] = 'white'
self.lbAtribuicao1['fg'] = '#4c78ff'
self.lbAtribuicao2['fg'] = '#4c78ff'
self.lbAtribuicao3['fg'] = '#4c78ff'
self.lbAtribuicao4['fg'] = '#4c78ff'
self.lbAtribuicao5['fg'] = '#4c78ff'
self.lbAtribuicao6['fg'] = '#4c78ff'
self.lbAtribuicao7['fg'] = '#4c78ff'
self.lbAtribuicao8['fg'] = '#4c78ff'
self.lbAtribuicao9['fg'] = '#4c78ff'
self.lbAtribuicao10['fg'] = '#4c78ff'
self.lbAtribuicao11['fg'] = '#4c78ff'
self.lbAtribuicao12['fg'] = '#4c78ff'
self.lbAtribuicao13['fg'] = '#4c78ff'
self.lbAtribuicao14['fg'] = '#4c78ff'
self.lbAtribuicao15['fg'] = '#4c78ff'
self.lbAtribuicao16['fg'] = '#4c78ff'
self.lbAtribuicao17['fg'] = '#4c78ff'
self.lbAtribuicao18['fg'] = '#4c78ff'
self.lbAtribuicao19['fg'] = '#4c78ff'
self.lbAtribuicao20['fg'] = '#4c78ff'
self.lbAtribuicao21['fg'] = '#4c78ff'
self.lbAtribuicao22['fg'] = '#4c78ff'
self.lbAtribuicao23['fg'] = '#4c78ff'
self.lbAtribuicao24['fg'] = '#4c78ff'
self.lbAtribuicao25['fg'] = '#4c78ff'
self.lbAtribuicao26['fg'] = '#4c78ff'
self.lbAtribuicao27['fg'] = '#4c78ff'
self.lbAtribuicao28['fg'] = '#4c78ff'
self.lbAtribuicao29['fg'] = '#4c78ff'
self.lbAtribuicao30['fg'] = '#4c78ff'
self.lbAtribuicao31['fg'] = '#4c78ff'
self.lbAtribuicao32['fg'] = '#4c78ff'
self.lbAtribuicao33['fg'] = '#4c78ff'
self.lbAtribuicao34['fg'] = '#4c78ff'
self.lbAtribuicao35['fg'] = '#4c78ff'
self.lbAtribuicao36['fg'] = '#4c78ff'
self.lbAtribuicao37['fg'] = '#4c78ff'
self.lbAtribuicao38['fg'] = '#4c78ff'
self.lbAtribuicao39['fg'] = '#4c78ff'
self.lbAtribuicao40['fg'] = '#4c78ff'
cont1 = 0
cont2 = 0
#Verificando se o campo de telefone foi digitado corretamente
for caracter in f:
if caracter.isnumeric():
cont1 +=1
elif not caracter.isnumeric():
cont2 +=1
if cont1 != 11 or cont2 != 4:
self.lbFone['fg'] = 'red'
self.campoFone['bg'] = 'pink'
return messagebox.showerror('Alerta', 'Verifique os campos')
else:
self.lbFone['fg'] = '#4c78ff'
self.campoFone['bg'] = 'white'
cont1 = 0
cont2 = 0
#Verificando se o campo de data de nascimento foi digitado corretamente
for caracter in g:
if caracter.isnumeric():
cont1 +=1
elif not caracter.isnumeric():
cont2 +=1
if cont1 != 8 or cont2 != 2:
self.lbNasc['fg'] = 'red'
self.campoNasc['bg'] = 'pink'
return messagebox.showerror('Alerta', 'Verifique os campos')
else:
self.lbNasc['fg'] = '#4c78ff'
self.campoNasc['bg'] = 'white'
try:
if self.bancoServer.is_connected():
#Após a verificação invocará a função de cadastramento
if parametro == 'cadastrar':
self.inserindo_dados_cadastro()
#Após a verificação invocará a função para alteraçãod e registro
elif parametro == 'alterar':
self.alterar_dados_funcionarios()
except:
messagebox.showerror('Alerta', 'Erro conexão com Banco de Dados não estabelecida')
def inserindo_dados_cadastro(self):
#Abrindo imagem selecionada para perfil e codificando para binário
with open(self.arquivoIMG, 'rb') as arquivoBinary:
self.imageBinary = arquivoBinary.read()
try:
#Verificando se o CPF digitado no campo não já foi cadastrado no banco
self.cursor.execute("select count(CPF) from funcionarios where CPF = "+self.campoCPF.get()+" and Membro = 'inativo'")
verificaCPF = self.cursor.fetchall()[0][0]
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
return messagebox.showerror('Alerta', 'Erro ao realizar consulta com MySQL. Verifique a conexão com o banco')
if verificaCPF == 1:
if messagebox.askyesno('Alerta', 'O CPF: '+self.campoCPF.get()+' está inativo. Deseja reativá-lo novamente?'):
self.cursor.execute("update funcionarios set Membro = 'ativo' where CPF = "+self.campoCPF.get())
return messagebox.showinfo('Alerta', 'Usuário reativado com sucesso!')
else: return ''
try:
#Verificando se o CPF digitado no campo não está cadastrado no banco
self.cursor.execute("select count(CPF) from funcionarios where CPF = "+self.campoCPF.get())
verificaCPF = self.cursor.fetchall()[0][0]
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
return messagebox.showerror('Alerta', 'Erro ao realizar consulta com MySQL. Verifique a conexão com o banco')
if verificaCPF == 1:
return messagebox.showwarning('Alerta', 'O CPF - '+self.campoCPF.get()+', já possui cadastro')
#Atribuição dos campos cadastrais nas variáveis
a = self.campoNome.get().title()
b = self.campoCPF.get()
c = self.campoConfirmaSenha.get()
e = self.campoFuncao.get()
f = self.campoFone.get()
g = self.campoNasc.get()
#Formatando comando SQL com os dados a serem enviados
tp1 = "INSERT INTO funcionarios (ID, Nome, CPF, Senha, Funcao, Telefone, Data_Nasc, Foto) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)"
tp2 = ('id', a, b, c, e, f, g, self.imageBinary)
try:
#Executando comandos
self.cursor.execute(tp1, tp2)
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
return messagebox.showerror('Alerta', 'Erro ao inserir dados. Verifique a conexão com o banco')
#Inserindo dados cadastrado don funcionário no Banco de Dados Servidor
v1 = self.box1.get()
v2 = self.box2.get()
v3 = self.box3.get()
v4 = self.box4.get()
v5 = self.box5.get()
v6 = self.box6.get()
v7 = self.box7.get()
v8 = self.box8.get()
v9 = self.box9.get()
v10 = self.box10.get()
v11 = self.box11.get()
v12 = self.box12.get()
v13 = self.box13.get()
v14 = self.box14.get()
v15 = self.box15.get()
v16 = self.box16.get()
v17 = self.box17.get()
v18 = self.box18.get()
v19 = self.box19.get()
v20 = self.box20.get()
v21 = self.box21.get()
v22 = self.box22.get()
v23 = self.box23.get()
v24 = self.box24.get()
v25 = self.box25.get()
v26 = self.box26.get()
v27 = self.box27.get()
v28 = self.box28.get()
v29 = self.box29.get()
v30 = self.box30.get()
v31 = self.box31.get()
v32 = self.box32.get()
v33 = self.box33.get()
v34 = self.box34.get()
v35 = self.box35.get()
v36 = self.box36.get()
v37 = self.box37.get()
v38 = self.box38.get()
v39 = self.box39.get()
v40 = self.box40.get()
v41 = self.box41.get()
listaNivel = [v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29,v30,v31,v32,v33,v34,v35,v36,v37,v38,v39,v40,v41]
self.cursor.execute("select id from funcionarios where CPF ='"+b+"'")
idOperador = self.cursor.fetchall()[0][0]
self.cursor.execute("select count(id) from operacao")
operacoesTotal = self.cursor.fetchall()[0][0]
try:
for idOperacao in range(operacoesTotal):
nivel = listaNivel[idOperacao]
idOperacao += 1
self.cursor.execute("INSERT INTO competencia (id, idOperador, idOperacao, Nivel) VALUES (DEFAULT, '"+str(idOperador)+"', '"+str(idOperacao)+"', '"+nivel+"')")
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
self.atualiza_valores_funcionario()
#Chamando função para exibir os funcionários cadastrados
self.exibir_funcionarios('cadastro')
if messagebox.showinfo('Alerta', 'Usuário cadastrado com sucesso!'):
self.limpar_aba_cadastrar('fim_cadastro')
def exibir_funcionarios(self, insert):
self.listaFuncionario = []
try:
#Exibindo Funcionários na Treeview
if self.bancoServer.is_connected():
if insert == 'funcionarios':
self.cursor.execute("select id, Nome, Funcao, CPF from funcionarios where Membro = 'ativo'")
valido = self.cursor.fetchall()
for i in range (len(valido)):
#extraindo do banco de dados as informações e armazenando nas variáveis
idd = valido[i][0]
nome = valido[i][1].title()
funcao = valido[i][2]
CPF = valido[i][3]
self.viewFuncionarios.insert("", "end", values=(idd, nome, funcao, CPF))
elif insert == 'cadastro':
self.cursor.execute("select id, Nome, Funcao, CPF from funcionarios where Membro = 'ativo' order by id desc limit 1")
valido = self.cursor.fetchall()
for i in range (len(valido)):
#extraindo do banco de dados as informações e armazenando nas variáveis
idd = valido[i][0]
nome = valido[i][1].title()
funcao = valido[i][2]
CPF = valido[i][3]
self.viewFuncionarios.insert("", "end", values=(idd, nome, funcao, CPF))
except: pass
def exibir_perfil_funcionarios(self, event):
#Pegando informações da lista selecionada
selecionada = self.viewFuncionarios.selection()[0]
x = self.viewFuncionarios.item(selecionada, "values")
i_d = x[0]
try:
self.cursor.execute("select id, Nome, CPF, Senha, Funcao, Telefone, Data_Nasc, Foto from funcionarios where id = "+i_d)
valido = self.cursor.fetchall()
except: return messagebox.showerror('Alerta', 'Verifique a conexão com o Servidor')
#Escondendo Label
self.labelAviso.place_forget()
for valor in valido:
idd, nome, CPF, senha, funcao, telefone, dataNasc, foto = valor
#Calculo para obter a idade do funcionário
v1 = int(dataNasc[6:])
v2 = int(datetime.now().date().strftime('%Y'))
idade = v2 - v1
self.lNome['text'] = nome
self.lIdade['text'] = str(idade)+' anos'
self.lFone['text'] = telefone
caminho = "FotoPerfil.png"
#Exibindo foto
with open(caminho, 'wb') as fotoConvertida:
fotoConvertida.write(foto)
self.abrirFoto = PhotoImage(file=caminho)
self.lbPerfil['image'] = self.abrirFoto
#Exibindo quantas OS Concluídas e Pausadas o Funcionário possui
try:
self.cursor.execute("select OS from concluidas where CPF ="+str(CPF))
valido = self.cursor.fetchall()
self.l_OS_Dados1['text'] = len(valido)
self.cursor.execute("select OS from pausas where CPF = '"+str(CPF)+"' and DataRetomada = 'none'")
valido = self.cursor.fetchall()
self.l_OS_Dados2['text'] = len(valido)
except: return messagebox.showerror('Alerta', 'Verifique a conexão com o Servidor')
#Exibindo widgets do perfil funcionários
self.botVer.place(relx=0.300, rely=0.800)
self.lNome.place(relx=0, rely=0.350, relwidth=1)
self.lIdade.place(relx=0, rely=0.400, relwidth=1)
self.lFone.place(relx=0, rely=0.450, relwidth=1)
self.l_OS_Con.place(relx=0.010, rely=0.540)
self.l_OS_Pen.place(relx=0.010, rely=0.590)
self.l_OS_Dados1.place(relx=0.410, rely=0.540)
self.l_OS_Dados2.place(relx=0.410, rely=0.590)
def limpar_perfil_funcionario(self):
self.lbPerfil['image'] = self.imgPerfil
self.lNome.place_forget()
self.lIdade.place_forget()
self.lFone.place_forget()
self.l_OS_Con.place_forget()
self.l_OS_Pen.place_forget()
self.l_OS_Dados1.place_forget()
self.l_OS_Dados2.place_forget()
self.labelAviso.place(relx=0.150, rely=0.400)
def deletar_perfil_funcionario(self):
#Pegando informações da lista selecionada
try:
selecionada = self.viewFuncionarios.selection()[0]
x = self.viewFuncionarios.item(selecionada, "values")
except: return ''
if messagebox.askyesno('Remover Funcionário', 'Deseja realmente excluir este usuário?'):
cpf = x[3]
try:
#self.cursor.execute('delete from funcionarios where CPF ='+cpf)
#self.cursor.execute('delete from habilidade_funcionarios where CPF ='+cpf)
self.cursor.execute("update funcionarios set Membro = 'inativo' where CPF ="+cpf)
messagebox.showinfo('Ação Concluída', 'Ação concluída com sucesso')
self.viewFuncionarios.delete(selecionada)
self.limpar_perfil_funcionario()
self.atualiza_valores_funcionario()
except: messagebox.showerror('Erro', 'Erro ao tentar fazer operação')
def editar_perfil_funcionario(self):
#Pegando informações da lista selecionada
try:
selecionada = self.viewFuncionarios.selection()[0]
x = self.viewFuncionarios.item(selecionada, "values")
i_d = x[0]
CPF = x[3]
except: return ''
#Tentando adquirir dados do banco
try:
self.cursor.execute("select Nome, CPF, Senha, Funcao, Telefone, Data_Nasc, Foto from funcionarios where id = '"+i_d+"'")
valido = self.cursor.fetchall()
self.abas.select(self.aba3)
except: return messagebox.showerror('Sem Conexão', 'Sem Conexão com o Banco de Dados')
self.cpf_funcionario = str(valido[0][1])
self.id_funcionario = i_d
#Deletando dados dos campos
self.campoNome.delete(0, END)
self.campoCPF.delete(0, END)
self.campoConfirmaSenha.delete(0, END)
self.campoSenha.delete(0, END)
self.campoFone.delete(0, END)
self.campoNasc.delete(0, END)
#Inserindo dados nos campos
self.campoNome.insert(END, valido[0][0])
self.campoCPF.insert(END, valido[0][1])
self.campoSenha.insert(END, valido[0][2])
self.campoConfirmaSenha.insert(END, valido[0][2])
self.campoFone.insert(END, valido[0][4])
self.campoNasc.insert(END, valido[0][5])
#Lógica para descobrir qual é a função selecionada do funcionário
a = self.campoFuncao['values']
b = valido[0][3]
for c in range(len(a)):
if a[c] == b:
self.campoFuncao.current(c)
try:
self.cursor.execute("select Nivel from competencia where idOperador = '"+i_d+"'")
valido = self.cursor.fetchall()
except Exception as erro:
print(f'errou 1 {erro}, {(erro.__class__)}')
return messagebox.showerror('Alerta', 'Verifique a conexão com o Servidor')
try:
self.box1.delete(0, END)
self.box2.delete(0, END)
self.box3.delete(0, END)
self.box4.delete(0, END)
self.box5.delete(0, END)
self.box6.delete(0, END)
self.box7.delete(0, END)
self.box8.delete(0, END)
self.box9.delete(0, END)
self.box10.delete(0, END)
self.box11.delete(0, END)
self.box12.delete(0, END)
self.box13.delete(0, END)
self.box14.delete(0, END)
self.box15.delete(0, END)
self.box16.delete(0, END)
self.box17.delete(0, END)
self.box18.delete(0, END)
self.box19.delete(0, END)
self.box20.delete(0, END)
self.box21.delete(0, END)
self.box22.delete(0, END)
self.box23.delete(0, END)
self.box24.delete(0, END)
self.box25.delete(0, END)
self.box26.delete(0, END)
self.box27.delete(0, END)
self.box28.delete(0, END)
self.box29.delete(0, END)
self.box30.delete(0, END)
self.box31.delete(0, END)
self.box32.delete(0, END)
self.box33.delete(0, END)
self.box34.delete(0, END)
self.box35.delete(0, END)
self.box36.delete(0, END)
self.box37.delete(0, END)
self.box38.delete(0, END)
self.box39.delete(0, END)
self.box40.delete(0, END)
self.box41.delete(0, END)
self.box1.insert(END, valido[0][0])
self.box2.insert(END, valido[1][0])
self.box3.insert(END, valido[2][0])
self.box4.insert(END, valido[3][0])
self.box5.insert(END, valido[4][0])
self.box6.insert(END, valido[5][0])
self.box7.insert(END, valido[6][0])
self.box8.insert(END, valido[7][0])
self.box9.insert(END, valido[8][0])
self.box10.insert(END, valido[9][0])
self.box11.insert(END, valido[10][0])
self.box12.insert(END, valido[11][0])
self.box13.insert(END, valido[12][0])
self.box14.insert(END, valido[13][0])
self.box15.insert(END, valido[14][0])
self.box16.insert(END, valido[15][0])
self.box17.insert(END, valido[16][0])
self.box18.insert(END, valido[17][0])
self.box19.insert(END, valido[18][0])
self.box20.insert(END, valido[19][0])
self.box21.insert(END, valido[20][0])
self.box22.insert(END, valido[21][0])
self.box23.insert(END, valido[22][0])
self.box24.insert(END, valido[23][0])
self.box25.insert(END, valido[24][0])
self.box26.insert(END, valido[25][0])
self.box27.insert(END, valido[26][0])
self.box28.insert(END, valido[27][0])
self.box29.insert(END, valido[28][0])
self.box30.insert(END, valido[29][0])
self.box31.insert(END, valido[30][0])
self.box32.insert(END, valido[31][0])
self.box33.insert(END, valido[32][0])
self.box34.insert(END, valido[33][0])
self.box35.insert(END, valido[34][0])
self.box36.insert(END, valido[35][0])
self.box37.insert(END, valido[36][0])
self.box38.insert(END, valido[37][0])
self.box39.insert(END, valido[38][0])
self.box40.insert(END, valido[39][0])
self.box41.insert(END, valido[40][0])
except: print('errou aqui')
#Configurando imagem da foto de perfil do funcionário
self.arquivoIMG = "FotoPerfil.png"
self.cookie = PhotoImage(file="FotoPerfil.png")
self.foto['image'] = self.cookie
#Alterando ícone de abrir ou editar imagem
self.imgAdd = PhotoImage(file='image/lapis.png')
self.add['image'] = self.imgAdd
self.add.place(relx=0.955, rely=0.700)
#Inserindo botão cancelar
self.imgCancelar = PhotoImage(file='image/cancelar.png')
self.botCancelar = Button(self.aba3, image=self.imgCancelar, bg='white', activebackground='white', border=0, command=lambda:self.limpar_aba_cadastrar('fim_alteração'))
self.botCancelar.place(relx=0.72, rely=0.90)
#Escondendo botão confirmar
self.botaoConfirmar.place_forget()
#Inserindo botão Alterar
self.imgAlterar = PhotoImage(file='image/Alterar.png')
self.botAlterar = Button(self.aba3, image=self.imgAlterar, bg='white', activebackground='white', border=0, command=lambda:self.verificar_campos_cadastro('alterar'))
self.botAlterar.place(relx=0.82, rely=0.90)
def limpar_aba_cadastrar(self, parametro):
#Deletando conteudo dos entry
self.campoNome.delete(0, END)
self.campoNome.focus_force()
self.campoCPF.delete(0, END)
self.campoConfirmaSenha.delete(0, END)
self.campoSenha.delete(0, END)
self.campoFuncao.current(0)
self.campoFone.delete(0, END)
self.campoNasc.delete(0, END)
#Reformulando dados padrões dos selects
self.box1.delete(0, END)
self.box2.delete(0, END)
self.box3.delete(0, END)
self.box4.delete(0, END)
self.box5.delete(0, END)
self.box6.delete(0, END)
self.box7.delete(0, END)
self.box8.delete(0, END)
self.box9.delete(0, END)
self.box10.delete(0, END)
self.box11.delete(0, END)
self.box12.delete(0, END)
self.box13.delete(0, END)
self.box14.delete(0, END)
self.box15.delete(0, END)
self.box16.delete(0, END)
self.box17.delete(0, END)
self.box18.delete(0, END)
self.box19.delete(0, END)
self.box20.delete(0, END)
self.box21.delete(0, END)
self.box22.delete(0, END)
self.box23.delete(0, END)
self.box24.delete(0, END)
self.box25.delete(0, END)
self.box26.delete(0, END)
self.box27.delete(0, END)
self.box28.delete(0, END)
self.box29.delete(0, END)
self.box30.delete(0, END)
self.box31.delete(0, END)
self.box32.delete(0, END)
self.box33.delete(0, END)
self.box34.delete(0, END)
self.box35.delete(0, END)
self.box36.delete(0, END)
self.box37.delete(0, END)
self.box38.delete(0, END)
self.box39.delete(0, END)
self.box40.delete(0, END)
self.box41.delete(0, END)
#Adicionando 0 ao limpar campos
self.box1.insert(END, '0')
self.box2.insert(END, '0')
self.box3.insert(END, '0')
self.box4.insert(END, '0')
self.box5.insert(END, '0')
self.box6.insert(END, '0')
self.box7.insert(END, '0')
self.box8.insert(END, '0')
self.box9.insert(END, '0')
self.box10.insert(END, '0')
self.box11.insert(END, '0')
self.box12.insert(END, '0')
self.box13.insert(END, '0')
self.box14.insert(END, '0')
self.box15.insert(END, '0')
self.box16.insert(END, '0')
self.box17.insert(END, '0')
self.box18.insert(END, '0')
self.box19.insert(END, '0')
self.box20.insert(END, '0')
self.box21.insert(END, '0')
self.box22.insert(END, '0')
self.box23.insert(END, '0')
self.box24.insert(END, '0')
self.box25.insert(END, '0')
self.box26.insert(END, '0')
self.box27.insert(END, '0')
self.box28.insert(END, '0')
self.box29.insert(END, '0')
self.box30.insert(END, '0')
self.box31.insert(END, '0')
self.box32.insert(END, '0')
self.box33.insert(END, '0')
self.box34.insert(END, '0')
self.box35.insert(END, '0')
self.box36.insert(END, '0')
self.box37.insert(END, '0')
self.box38.insert(END, '0')
self.box39.insert(END, '0')
self.box40.insert(END, '0')
self.box41.insert(END, '0')
#Reformulando imagem de Sem Perfil
self.foto['image'] = self.imgSemPerfil
self.arquivoIMG = 'image/sem_perfil.png'
self.imgAdd = PhotoImage(file='image/abrir.png')
self.add['image'] = self.imgAdd
self.add.place(relx=0.890, rely=0.580)
if parametro == 'fim_alteração':
#Destruindo botões Cancelar e Alterar
self.botCancelar.destroy()
self.botAlterar.destroy()
#Exibindo botão confirmar
self.botaoConfirmar.place(relx=0.82, rely=0.90)
def alterar_dados_funcionarios(self):
#Atribuição dos campos cadastrais nas variáveis
a = self.campoNome.get()
b = self.campoCPF.get()
c = self.campoSenha.get()
d = self.campoConfirmaSenha.get()
e = self.campoFuncao.get()
f = self.campoFone.get()
g = self.campoNasc.get()
#Verificando se o CPF digitado no campo não está cadastrado no banco
try:
self.cursor.execute("select CPF from funcionarios where CPF = '"+b+"' and id <> '"+self.id_funcionario+"'")
valido = self.cursor.fetchall()
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
if messagebox.showerror('Alerta', 'Erro ao realizar busca no Banco de Dados'):
return ''
if len(valido) == 1:
return messagebox.showinfo('Alerta', 'O CPF - '+self.campoCPF.get()+', pertence a outro funcionario')
#Atribuição dos valores dos boxes de seleção nas variáves
v1 = self.box1.get()
v2 = self.box2.get()
v3 = self.box3.get()
v4 = self.box4.get()
v5 = self.box5.get()
v6 = self.box6.get()
v7 = self.box7.get()
v8 = self.box8.get()
v9 = self.box9.get()
v10 = self.box10.get()
v11 = self.box11.get()
v12 = self.box12.get()
v13 = self.box13.get()
v14 = self.box14.get()
v15 = self.box15.get()
v16 = self.box16.get()
v17 = self.box17.get()
v18 = self.box18.get()
v19 = self.box19.get()
v20 = self.box20.get()
v21 = self.box21.get()
v22 = self.box22.get()
v23 = self.box23.get()
v24 = self.box24.get()
v25 = self.box25.get()
v26 = self.box26.get()
v27 = self.box27.get()
v28 = self.box28.get()
v29 = self.box29.get()
v30 = self.box30.get()
v31 = self.box31.get()
v32 = self.box32.get()
v33 = self.box33.get()
v34 = self.box34.get()
v35 = self.box35.get()
v36 = self.box36.get()
v37 = self.box37.get()
v38 = self.box38.get()
v39 = self.box39.get()
v40 = self.box40.get()
v41 = self.box41.get()
listaNivel = [v1,v2,v3,v4,v5,v6,v7,v8,v9,v10,v11,v12,v13,v14,v15,v16,v17,v18,v19,v20,v21,v22,v23,v24,v25,v26,v27,v28,v29,v30,v31,v32,v33,v34,v35,v36,v37,v38,v39,v40,v41]
try:
#Atualizando os dados pessoais da tabela de funcionarios
self.cursor.execute("UPDATE funcionarios SET Nome='"+a+"', CPF='"+b+"', Senha='"+c+"', Funcao='"+e+"', Telefone='"+f+"', Data_Nasc='"+g+"' where CPF = '"+self.cpf_funcionario+"'")
#Atualizando dados das habilidade do funcionario da tabela habilidade_funcionario
self.cursor.execute("select id from competencia where idOperador = '"+self.id_funcionario+"'")
listaId = self.cursor.fetchall()
for n in range(len(listaNivel)):
self.cursor.execute("UPDATE competencia SET Nivel = '"+listaNivel[n]+"' where id = '"+str(listaId[n][0])+"'")
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
if messagebox.showerror('Alerta', 'Erro ao atualizar dados'):
return ''
#Convertendo imagem para binário caso tenha alterado a foto de perfil
with open(self.arquivoIMG, 'rb') as arquivoBinary:
self.imageBinary = arquivoBinary.read()
valores = (self.imageBinary, self.cpf_funcionario)
try:
#Alterando imagem de perfil no banco de dados
self.cursor.execute("UPDATE funcionarios SET Foto = (%s) WHERE CPF = (%s)", valores)
#Deletando todas linhas de funcionários da Treeview
self.viewFuncionarios.delete(*self.viewFuncionarios.get_children())
#Buscando os seguintes dados atualizado apra inserir na Treeview
self.cursor.execute('select id, Nome, Funcao, CPF from funcionarios')
valido = self.cursor.fetchall()
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
if messagebox.showerror('Alerta', 'Erro ao realizar atualização e busca no Banco de Dados'):
return ''
if len(valido) >= 1:
for i in range(len(valido)):
idd = valido[i][0]
nome = valido[i][1]
funcao = valido[i][2]
CPF = valido[i][3]
#Linha após linha cada registro do banco de dado de funcionários
self.viewFuncionarios.insert("", "end", values=(idd, nome, funcao, CPF))
if messagebox.showinfo("Alteração Concluida", "Dados alterado com sucesso!"):
self.limpar_aba_cadastrar('fim_alteração')
def atualiza_valores_funcionario(self):
#Fazendo Consulta MySQL para verificação de quantos Funcionários existem cadastrados
self.cursor.execute("select * from funcionarios where Membro = 'ativo' ")
valido = self.cursor.fetchall()
self.nFunc = len(valido)
self.lbFunc['text'] = str(self.nFunc)
def buscar(self, args):
tipo = self.boxTipo.get()
pesquisa = self.boxPesquisar.get()
campo = self.campoBusca.get().capitalize()
#Deletando todos os registros da treeview
self.viewOrdemServico.delete(*self.viewOrdemServico.get_children())
#Mudando o nome do dado para o jeito que está salvo no banco de dados
if tipo == 'Retrabalho OS':
tipo = 'Retrabalhar OS'
if tipo != 'Tudo':
tipo = "Tipo = '"+tipo+"'"
else:
tipo = "Tipo IN ('Nova OS', 'Retrabalhar OS')"
#Mudando o nome do dado para o jeito que está salvo no banco de dados
if pesquisa == 'Nome': pesquisa = 'Operador'
elif pesquisa == 'CPF': pesquisa = 'CPF'
elif pesquisa == 'Nº Peça': pesquisa = 'CodigoPeca'
elif pesquisa == 'Nº Operação': pesquisa = 'CodigoOperacao'
try:
if campo == '':
self.cursor.execute("select ID, Operador, CodigoPeca, CodigoOperacao, Quant, Tipo, Maquina from concluidas where "+tipo+" and OS = "+self.pegarOS)
else:
self.cursor.execute("select ID, Operador, CodigoPeca, CodigoOperacao, Quant, Tipo, Maquina from concluidas where "+tipo+" and OS = "+self.pegarOS+" and "+pesquisa+" like '%"+campo+"%' ")
valido = self.cursor.fetchall()
if len(valido) == 0:
self.labelerro['text'] = 'Nenhum Registro Encontrado'
self.labelerro.place(relx=0.430, rely=0.500)
return 0
else:
self.labelerro.place_forget()
for i in range (len(valido)):
#extraindo do banco de dados as informações e armazenando nas variáveis
ID = valido[i][0]
Operador = valido[i][1]
CodigoPeca = valido[i][2]
CodigoOperacao = valido[i][3]
Quant = valido[i][4]
Tipo = valido[i][5]
Maquina = valido[i][6]
self.viewOrdemServico.insert("", "end", values=(ID, Operador, CodigoPeca, CodigoOperacao, Quant, Tipo, Maquina))
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
def buscar2(self, args):
#Caso não tenha nehuma OS Pausada, o label que informa 0 pausas
self.lbl.place_forget()
pesquisa = self.boxPesquisar2.get()
campo = self.campoBusca2.get().capitalize()
#Deletando todos os registros da treeview
self.viewPausas.delete(*self.viewPausas.get_children())
#Mudando o nome do dado para o jeito que está salvo no banco de dados
if pesquisa == 'Nome': pesquisa = 'Operador'
elif pesquisa == 'CPF': pesquisa = 'CPF'
try:
if campo == '':
self.cursor.execute("select ID, Operador, MotivoPause, Tipo, Maquina from pausas where OS = "+self.pegarOS)
valido = self.cursor.fetchall()
if len(valido) == 0:
self.labelerro2.place_forget()
self.lbl.place(relx=0.500, rely=0.500, anchor='center')
return 0
else:
self.cursor.execute("select ID, Operador, MotivoPause, Tipo, Maquina from pausas where OS = "+self.pegarOS+" and "+pesquisa+" like '%"+campo+"%' ")
valido = self.cursor.fetchall()
if len(valido) == 0:
self.labelerro2['text'] = 'Nenhum Registro Encontrado'
self.labelerro2.place(relx=0.500, rely=0.500, anchor='center')
return 0
else:
self.labelerro2.place_forget()
for i in range (len(valido)):
#extraindo do banco de dados as informações e armazenando nas variáveis
ID = valido[i][0]
Operador = valido[i][1]
MotivoPause = valido[i][2]
Tipo = valido[i][3]
Maquina = valido[i][4]
self.viewPausas.insert("", "end", values=(ID, Operador, MotivoPause, Tipo, Maquina))
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
def exibir_info_tempo_horas(self, args):
try:
selecionada = self.viewOrdemServico.selection()[0]
idd = self.viewOrdemServico.item(selecionada, "values")
#Ocultando label de informação
self.lblSemInformacao.place_forget()
except: return ''
self.cursor.execute("select TempProgramado, TempOperando, TempGasto, TempGastoExt from concluidas where id = '"+idd[0]+"'")
valido = self.cursor.fetchall()
if valido != []:
#Configurando texto e formatando
self.dadosTempoProgramado['text'] = valido[0][0]
self.dadosTempoOperando['text'] = valido[0][1]
self.dadosTempoGasto['text'] = valido[0][2]
self.dadosTempoExtra['text'] = valido[0][3]
else:
self.dadosTempoProgramado['text'] = '0:00:00'
self.dadosTempoOperando['text'] = '0:00:00'
self.dadosTempoGasto['text'] = '0:00:00'
self.dadosTempoExtra['text'] = '0:00:00'
self.dadosTempoProgramado['font'] = ('arial', 13)
self.dadosTempoOperando['font'] = ('arial', 13)
self.dadosTempoGasto['font'] = ('arial', 13)
self.dadosTempoExtra['font'] = ('arial', 13)
#Posições dos Label's informando o que o determinado espaço representa
self.lb1.place(relx=0.010, rely=0.150)
self.lb2.place(relx=0.010, rely=0.350)
self.lb3.place(relx=0.010, rely=0.550)
#Posições dos ícones com simbolo de check
self.img1.place(relx=0.740, rely=0.150)
self.img2.place(relx=0.800, rely=0.350)
self.img3.place(relx=0.620, rely=0.550)
#Posições dos dados ao serem selecionado para informatizar a hora/tempo
self.dadosTempoProgramado.place(relx=0.300, rely=0.250)
self.dadosTempoOperando.place(relx=0.300, rely=0.450)
self.dadosTempoGasto.place(relx=0.300, rely=0.650)
#Exibindo Tempo Extra apenas se tiver Tempo Extra
try:
if str(valido[0][3]) != '0:00:00':
self.lb4.place(relx=0.010, rely=0.750)
self.img4.place(relx=0.600, rely=0.750)
self.dadosTempoExtra.place(relx=0.300, rely=0.850)
else:
self.lb4.place_forget()
self.img4.place_forget()
self.dadosTempoExtra.place_forget()
except: pass
def classificar_coluna(self, por, sinal):
atributo = por
try:
if sinal == 0:
self.sinal = 1
ordem = 'DESC'
elif sinal == 1:
self.sinal = 0
ordem = 'ASC'
self.cursor.execute("select ID, Operador, CodigoPeca, CodigoOperacao, Quant, Tipo, Maquina from concluidas where OS = "+self.pegarOS+" order by "+str(atributo)+" "+ordem+" ")
valido = self.cursor.fetchall()
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
#Deletando todos os registros da treeview
self.viewOrdemServico.delete(*self.viewOrdemServico.get_children())
messagebox.showerror(parent=self.janelaInicial, title='Alerta', message='Erro ao comunicar-se com Banco de Dados')
return ''
#Deletando todos os registros da treeview
self.viewOrdemServico.delete(*self.viewOrdemServico.get_children())
for i in range (len(valido)):
#extraindo do banco de dados as informações e armazenando nas variáveis
ID = valido[i][0]
Operador = valido[i][1]
CodigoPeca = valido[i][2]
CodigoOperacao = valido[i][3]
Quant = valido[i][4]
Tipo = valido[i][5]
Maquina = valido[i][6]
self.viewOrdemServico.insert("", "end", values=(ID, Operador, CodigoPeca, CodigoOperacao, Quant, Tipo, Maquina))
def remover_focus(self, event, objeto):
#Configurando objeto para retirar a seleção
objeto.selection_set()
try:
#Exibindo posição de label com a mensagem "Sem informações"
self.lblSemInformacao.place(relx=0.5, rely=0.5, anchor="center")
self.dadosTempoProgramado.place_forget()
self.dadosTempoOperando.place_forget()
self.dadosTempoGasto.place_forget()
self.dadosTempoExtra.place_forget()
#Escondendo Labels cabeçários
self.lb1.place_forget()
self.lb2.place_forget()
self.lb3.place_forget()
self.img4.place_forget()
#Escondendo labels com imagens de checagem
self.img1.place_forget()
self.img2.place_forget()
self.img3.place_forget()
self.lb4.place_forget()
except:
pass
def transformar_tempo_decimal(self, thora, tminu, tsegu):
if int(thora) > 0 and int(thora) < 10:
A = int(thora) / 100
B = str(A)
final1 = B[2:]
elif int(thora) == 0:
final1 = '00'
else:
final1 = str(thora)
if int(tminu) > 0 and int(tminu) < 10:
A = int(tminu) / 100
B = str(A)
final2 = B[2:]
elif int(tminu) == 0:
final2 = '00'
else:
final2 = str(tminu)
if int(tsegu) > 0 and int(tsegu) < 10:
A = int(tsegu) / 100
B = str(A)
final3 = B[2:]
elif int(tsegu) == 0:
final3 = '00'
else:
final3 = str(tsegu)
return final1+':'+final2+':'+final3
def somar_total_horas_gastas_os(self, parametro, args):
#Variáveis auxiliar
contDia = 0
#Lista para armazenar o resultado total da Hora, Minuto, Segundo
result = [0,0,0]
try:
#Selecionada a O.S enviada por parametro pora obter seu tempo total de duração
self.cursor.execute(parametro)
totalHoraGastaOS = self.cursor.fetchall()
except:
print("Não conseguimos Obter o Tempo Total desta O.S :'(")
#Se o argumento for 1 então irá somar 1 coluna de várias linhas do banco de dados
if args == 1:
for valor in totalHoraGastaOS:
#SOMANDO O TOTAL DE HORAS DE CADA OPEÇÃO PARA O OBTER O TEMPO BRUTO
horario1 = str(valor[0])
if 'day' in horario1:
horario1 = horario1.replace(':', ' ').split()
contDia += int(horario1[0])
hora = int(horario1[2])
minu = int(horario1[3])
sec = int(horario1[4])
else:
horario1 = horario1.replace(':', ' ').split()
hora = int(horario1[0])
minu = int(horario1[1])
sec = int(horario1[2])
result[2] += sec
if result[2] >= 60:
result[2] -= 60
result[1] += 1
result[1] += minu
if result[1] >= 60:
result[1] -= 60
result[0] += 1
result[0] += hora
#Se o argumento for 2 então irá somar 2 coluna de várias linhas do banco de dados
elif args == 2:
for valor in totalHoraGastaOS:
#SOMANDO OS O TEMPO GASTO E O TEMPO EXTRA DE CADA 0PERAÇÃO REFERIDA A O.S SELECIOANDA
horario1 = str(valor[0])
horario1 = horario1.replace(':', ' ').split()
hora = int(horario1[0])
minu = int(horario1[1])
sec = int(horario1[2])
horario2 = str(valor[1])
horario2 = horario2.replace(':', ' ').split()
hora2 = int(horario2[0])
minu2 = int(horario2[1])
sec2 = int(horario2[2])
result[2] = result[2] + sec + sec2
if result[2] >= 60:
result[2] -= 60
result[1] += 1
result[1] = result[1] + minu + minu2
if result[1] >= 60:
result[1] -= 60
result[0] += 1
result[0] = result[0] + hora + hora2
resultadoTempo = self.transformar_tempo_decimal(result[0], result[1], result[2])
if contDia != 0:
if contDia > 0:
a1 = 'dias'
else: a1 = 'dia'
resultadoTempo = (f"{str(contDia)} {a1} e {resultadoTempo}")
return resultadoTempo
def centraliza_tela(self, larg, alt, jane):
# Dimensões da Janela
largura = larg
altura = alt
# Resolução do Sistema
largura_screen = jane.winfo_screenwidth()
altura_screen = jane.winfo_screenheight()
# Definição da Janela
posicaoX = largura_screen / 2 - largura / 2
posicaoY = altura_screen / 2 - altura / 2
# Posicão da Tela
return jane.geometry('%dx%d+%d+%d' % (largura, altura, posicaoX, posicaoY))
def sumir_widgets(self):
try:
self.frameExibirTotalOperacao.place_forget()
self.frameDadosTempo.place_forget()
self.frameDadosTreeview.place_forget()
self.remover_focus('', self.viewOrdemServico)
self.labelTipo.place_forget()
self.boxTipo.place_forget()
self.labelPesquisar.place_forget()
self.boxPesquisar.place_forget()
self.campoBusca.place_forget()
self.botBuscar.place_forget()
self.botAtribuirOper.place_forget()
except: pass
try:
self.frameDadosTreeviewPause.place_forget()
self.remover_focus('', self.viewPausas)
self.labelTipo2.place_forget()
self.boxTipo2.place_forget()
self.labelPesquisar2.place_forget()
self.boxPesquisar2.place_forget()
self.campoBusca2.place_forget()
self.botBuscar2.place_forget()
except:
pass
try:
self.botAddPeca.place_forget()
self.frameDadosTreeviewPecas.place_forget()
except: pass
try:
self.frameInfoRelatorio.place_forget()
except: pass
def botao_exibir_inicio(self):
self.sumir_widgets()
#self.frameExibirTotalOperacao.place(relx=0.010, rely=0.050, relwidth=0.160, relheight=0.200)
#self.frameDadosTempo.place(relx=0.010, rely=0.300, relwidth=0.160, relheight=0.650)
self.frameDadosTreeview.place(relx=0.400, rely=0.680, relwidth=0.750, relheight=0.600, anchor='center')
'''
self.labelTipo.place(relx=0.200, rely=0.300)
self.boxTipo.place(relx=0.200, rely=0.340, relwidth=0.070)
self.labelPesquisar.place(relx=0.300, rely=0.300)
self.boxPesquisar.place(relx=0.300, rely=0.340, relwidth=0.070)
self.campoBusca.place(relx=0.400, rely=0.340, relwidth=0.210)
self.botBuscar.place(relx=0.610, rely=0.335)
'''
def botao_exibir_pausas(self):
self.sumir_widgets()
'''
self.labelTipo2.place(relx=0.070, rely=0.300)
self.boxTipo2.place(relx=0.070, rely=0.340, relwidth=0.070)
self.labelPesquisar2.place(relx=0.170, rely=0.300)
self.boxPesquisar2.place(relx=0.170, rely=0.340, relwidth=0.070)
self.campoBusca2.place(relx=0.270, rely=0.340, relwidth=0.210)
self.botBuscar2.place(relx=0.480, rely=0.335)
'''
self.frameDadosTreeviewPause.place(relx=0.400, rely=0.680, relwidth=0.750, relheight=0.600, anchor='center')
def botao_exibir_pecas(self):
self.sumir_widgets()
#self.botAtribuirOper.place(relx=0.100, rely=0.340)
self.botAddPeca.place(relx=0.885, rely=0.025)
self.frameDadosTreeviewPecas.place(relx=0.400, rely=0.680, relwidth=0.750, relheight=0.600, anchor='center')
def botao_exibir_sobre(self):
self.sumir_widgets()
self.frameInfoRelatorio.place(relx=0.500, rely=0.600, anchor="center", relheight=0.600)
def chamar_proxima_folha(self):
if self.prox < 3:
self.prox += 1
if self.prox == 1:
self.frameAt1.place_forget()
self.frameAt2.place_forget()
self.acessoSimultaneoOperacao.place_forget()
self.botAnterior.place(relx=0.580, rely=0.900)
self.frameAt3.place(relx=0.100, rely=0.200, relwidth=0.800, relheight=0.580)
self.acessoSimultaneoMaquina.place(relx=0.540, rely=0.780)
self.botProximo['state'] = DISABLED
self.botProximo.place(relx=0.770, rely=0.900)
self.verificar_frame1('')
if self.acessOperacao.get() != 1:
self.acessoSimultaneoMaquina['state'] = DISABLED
else:
self.acessoSimultaneoMaquina['state'] = NORMAL
elif self.prox == 2:
self.frameAt3.place_forget()
self.acessoSimultaneoMaquina.place_forget()
self.frameAt4.place(relx=0.100, rely=0.200, relwidth=0.800, relheight=0.580)
self.acessoSimultaneoOperador.place(relx=0.540, rely=0.780)
self.botProximo['state'] = DISABLED
self.botProximo.place(relx=0.770, rely=0.900)
self.verificar_frame1('')
if self.acessOperacao.get() != 1:
self.acessoSimultaneoOperador['state'] = DISABLED
else:
self.acessoSimultaneoOperador['state'] = NORMAL
elif self.prox == 3:
self.frameAt4.place_forget()
self.acessoSimultaneoOperador.place_forget()
self.botProximo.place_forget()
self.frameAt5.place(relx=0.100, rely=0.200, relwidth=0.800, relheight=0.580)
self.botConcluir.place(relx=0.770, rely=0.900)
#FORMATANDO TEMPO POR OPERAÇÃO
tempA = int(self.campoHora.get())
tempB = int(self.campoMinuto.get())
tempC = int(self.campoSegundo.get())
self.tempPorOper = self.transformar_tempo_decimal(tempA, tempB, tempC)
#CALCULANDO TEMPO BRUTO A SER PROGRAMADO DADO POR CADA OPERAÇÃO
tempH = 0
tempS = 0
tempM = 0
for q in range(int(self.campoQuant.get())):
tempH += int(self.campoHora.get())
tempS += int(self.campoSegundo.get())
tempM += int(self.campoMinuto.get())
#Convertendo minutos em horas caso os minutos passarem de 60
if tempS >= 60:
tempM += 1
tempS = tempS - 60
if tempM >= 60:
tempH += 1
tempM = tempM - 60
self.tempProgramado = self.transformar_tempo_decimal(tempH, tempM, tempS)
#EXBINDO AS MÁQUINAS SELECIONADAS PARA A OPERAÇÃO ESCOLHIDA
self.dados1['text'] = self.pegarOS
self.dados2['text'] = self.campoPeca.get()
self.dados3['text'] = self.operacaoSelect.get()
self.dados4['text'] = self.campoQuant.get()
self.dados5['text'] = self.tempProgramado
textFormat = ''
for i in self.listaMaquina:
if i[1].get() == '1':
try:
self.cursor.execute("select Maquina from maquinas where Codigo = '"+str(i[0])+"'")
maquinaSelect = self.cursor.fetchall()
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
textFormat += ', '+ maquinaSelect[0][0]
self.text1['state'] = NORMAL
self.text1.delete("1.0", "end")
self.text1.insert(END, textFormat[2:])
self.text1['state'] = DISABLED
#EXBINDO OS OPERADORES SELECIONADOS PARA A OPERAÇÃO ESCOLHIDA
textFormat2 = ''
for i in self.listaFuncionario:
if i[0].get() == '1':
try:
self.cursor.execute("select Nome from Funcionarios where CPF = '"+str(i[1])+"'")
funcionarioSelect = self.cursor.fetchall()
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
textFormat2 += ', '+ funcionarioSelect[0][0]
self.text2['state'] = NORMAL
self.text2.delete("1.0", "end")
self.text2.insert(END, textFormat2[2:])
self.text2['state'] = DISABLED
def voltar_folha_anterior(self):
if self.prox > 0:
self.prox -= 1
if self.prox == 0:
self.frameAt1.place(relx=0.100, rely=0.200, relwidth=0.800, relheight=0.280)
self.frameAt2.place(relx=0.100, rely=0.530, relwidth=0.800, relheight=0.250)
self.acessoSimultaneoOperacao.place(relx=0.540, rely=0.780)
self.botAnterior.place_forget()
self.frameAt3.place_forget()
self.acessoSimultaneoMaquina.place_forget()
self.verificar_frame1('')
if self.prox == 1:
self.frameAt3.place(relx=0.100, rely=0.200, relwidth=0.800, relheight=0.580)
self.acessoSimultaneoMaquina.place(relx=0.540, rely=0.780)
self.frameAt4.place_forget()
self.acessoSimultaneoOperador.place_forget()
self.verificar_frame1('')
elif self.prox == 2:
self.frameAt4.place(relx=0.100, rely=0.200, relwidth=0.800, relheight=0.580)
self.acessoSimultaneoOperador.place(relx=0.540, rely=0.780)
self.botProximo.place(relx=0.770, rely=0.900)
self.frameAt5.place_forget()
self.botConcluir.place_forget()
self.verificar_frame1('')
def inserir_atribuicao(self):
os = self.pegarOS
peca = self.campoPeca.get()
operacao = self.operacaoSelect.get()[:5]
quant = self.campoQuant.get()
tempPorOper = self.tempPorOper
tempProgramado = self.tempProgramado
global dataAberta
dataAberta = str(datetime.now())
try:
self.cursor.execute('select id from operacao where Codigo_Operacao ='+operacao)
idOperacao = self.cursor.fetchall()[0][0]
except:
if messagebox.showerror(parent=self.janelaAtribuir, title='Alerta', message='Erro ao tentar localizar Operação no Banco de Dados'):
return ''
codigo = ''
try:
for c in range(1000000, 10000000):
codigo = str(randint(1000000, 9999999))
self.cursor.execute("select id from ordem_processo where Codigo ="+codigo)
checagem = self.cursor.fetchall()
if len(checagem) == 0:
break
except:
if messagebox.showerror(parent=self.janelaAtribuir, title='Alerta', message='Falha ao executar ação com Banco de Dados'):
return ''
listIdMaquina = list()
try:
for i in self.listaMaquina:
if i[1].get() == '1':
self.cursor.execute("select id from maquinas where Codigo = '"+str(i[0])+"'")
maquinaSelect = self.cursor.fetchall()[0][0]
listIdMaquina.append(maquinaSelect)
except:
if messagebox.showerror(parent=self.janelaAtribuir, title='Alerta', message='Não foi possível buscar por Id da Máquina'):
return ''
listIdFuncionario = list()
try:
for i in self.listaFuncionario:
if i[0].get() == '1':
self.cursor.execute("select id from Funcionarios where CPF = '"+str(i[1])+"'")
funcionarioSelect = self.cursor.fetchall()[0][0]
listIdFuncionario.append(funcionarioSelect)
except:
if messagebox.showerror(parent=self.janelaAtribuir, title='Alerta', message='Não foi possível buscar por Id do Operador'):
return ''
for idMaquina in listIdMaquina:
for idOperador in listIdFuncionario:
#print(f"codigo:{codigo}, os:{os}, peca:{peca}, quant:{quant}, idOperacao:{idOperacao}, idMaquina:{idMaquina}, idOperador:{idOperador}, tempPorOper:{tempPorOper}, tempProgramado:{tempProgramado}, dataAberta:{dataAberta}, Estado: aberta")
#print('')
try:
self.cursor.execute("insert into ordem_processo (Codigo, OS, Peca, Quant, idOperacao, idMaquina, idOperador, TempoPorOperacao, TempoProgramado, DataAberta, DataIniciada, DataFinalizada, Estado) VALUES ('"+codigo+"', '"+os+"', '"+peca+"', '"+quant+"', '"+str(idOperacao)+"', '"+str(idMaquina)+"', '"+str(idOperador)+"', '"+tempPorOper+"', '"+tempProgramado+"', '"+dataAberta+"', NULL, NULL, DEFAULT)")
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
if messagebox.showerror(parent=self.janelaAtribuir, title='Alerta', message='Falha ao enviar dados para atribuição'):
return ''
self.cursor.execute("select a.id, b.Nome, a.Peca, c.Processo_Usinagem, a.Quant, d.Maquina, a.Estado from ordem_processo as a join funcionarios as b on b.id = a.idOperador join operacao as c on c.id = a.idOperacao join maquinas as d on d.id = a.idMaquina where OS ="+self.pegarOS+" order by id desc limit 1")
valido = self.cursor.fetchall()
self.janelaAtribuir.destroy()
if messagebox.showinfo(parent=self.janelaDetalhesOS, title='Alerta', message='Atribuição realizada com sucesso!'):
#extraindo do banco de dados as informações e armazenando nas variáveis
ID = valido[0][0]
Operador = valido[0][1]
CodigoPeca = valido[0][2]
CodigoOperacao = valido[0][3]
Quant = valido[0][4]
Maquina = valido[0][5]
Estado = valido[0][6]
self.viewOrdemServico.insert("", "end", values=(ID, Operador, CodigoPeca, CodigoOperacao, Quant, Maquina, Estado))
def verificar_frame1(self, args):
try:
if len(self.campoPeca.get()) >=1 and self.campoQuant.get() != '0' and self.operacaoSelect.get() != 'Parado' and len(self.campoHora.get()) >= 1 and len(self.campoMinuto.get()) >= 1 and self.prox == 0:
try:
if int(self.campoHora.get()) == 0 and int(self.campoMinuto.get()) == 0:
self.botProximo['state'] = DISABLED
else:
self.botProximo['state'] = NORMAL
except:
pass
elif self.prox == 1:
v = 0 #variável que ao ser 1 significa que um ou mais de checkbutton foram selecionados
#contando os valores da lista
for i in range(len(self.listaMaquina)):
#se um valor da lista for 1 significa que o checkbutton foi marcado
if self.listaMaquina[i][1].get() == '1':
v = 1 #variável passará a ser 1 porque um ou mais checkbutton foi selecionado
self.controleCheck += 1
#se o controle check for igual a 1 irá armazenar o primeiro checkbutton selecionado
if self.controleCheck == 1:
self.controleOne = self.listaMaquina[i] #armazenando o primeiro checkbutton selecionado
#se v for True então irá habilitar o botão de próximo pois foi selecionado um checkbutton
if v != 0:
self.botProximo['state'] = NORMAL
#Senão o botão continuará desabiitado e controleCheck voltará a ser 0 pois não checkbutton marcado
else:
self.botProximo['state'] = DISABLED
self.controleCheck = 0
#se o botão de acesso simultaneo não estiver ativado não poderá selecionar mais de uma opção
if self.acessMaquina.get() != 1:
#se mais de uma máquina for selecionada
if self.controleCheck > 1:
for i in range(len(self.listaMaquina)):
#Todas irá ser desmarcada, menos o primeiro
if self.listaMaquina[i] != self.controleOne:
self.listaMaquina[i][1].set(0)
elif self.prox == 2:
v = 0
for i in range(len(self.listaFuncionario)):
if self.listaFuncionario[i][0].get() == '1':
v = 1
self.controleCheck2 += 1
#se o controle check for igual a 1 irá armazenar o primeiro funcionário selecionado
if self.controleCheck2 == 1:
self.controleOne2 = self.listaFuncionario[i] #armazenando o primeiro funcionário selecionado
if v != 0:
self.botProximo['state'] = NORMAL
else:
self.botProximo['state'] = DISABLED
self.controleCheck2 = 0
#se o botão de acesso simultaneo não estiver ativado não poderá selecionar mais de uma opção
if self.acessOperador.get() != 1:
#se mais de um operador for selecionado
if self.controleCheck2 > 1:
for i in range(len(self.listaFuncionario)):
#Todos irá ser desmarcado, menos o primeiro
if self.listaFuncionario[i] != self.controleOne2:
self.listaFuncionario[i][0].set(0)
else:
self.botProximo['state'] = DISABLED
except Exception as erro: print(f'{erro}, {(erro.__class__)}')
def confirmar_login(self, event):
self.labelError = Label(self.frameLogin, text='', fg='#bf0606', bg='white', width=40, font=('arial', 10))
self.labelError.place(relx=0.180, rely=0.620)
#caso o campo "login" esteja vazio
if self.campSenha.get() == '':
self.labelError['text'] = 'Preencha o campo!'
return ''
#verificando se o campo "login" é numérico e possui 11 caracteres
if str(self.campLogin.get()).isnumeric() and len(self.campLogin.get()) == 11:
self.user = self.campLogin.get()
#verificando se a senha é númerica e possui 4 caracteres
if str(self.campSenha.get()).isnumeric() and len(self.campSenha.get()) == 8:
self.password = self.campSenha.get()
try:
#Tentando buscar usuário que se enquadre ao CPF e SENHA digitado e armazenado nas variáveis a seguir
self.cursor.execute("select Nome from Administrador where CPF = '"+self.user+"' and Senha = '"+self.password+"'")
valido = self.cursor.fetchall()
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
return messagebox.showerror(parent=self.janelaFuncio, title='03-Error-Servidor', message='03-Error: Não acesso ao servidor.')
#pegando hora atual de login caso encontrar resultado na busca
if len(valido) == 1:
self.campLogin.delete(0, END)
self.campSenha.delete(0, END)
self.janelaFuncio.withdraw()
self.janela_raiz()
#alerta caso o usuário não seja encontrado
else:
return messagebox.showinfo(parent=self.janelaFuncio, title='Alerta', message='Login não Existe!')
#caso o campo "senha" diferentee de 11 caracteres
else:
self.labelError['text'] = 'Usuário ou Senha Incorreta!'
#se caso o campo "login" seja diferente de 11 caracteres
else:
self.labelError['text']= 'Usuário ou Senha Incorreta!'
def sair(self):
if messagebox.askokcancel(parent=self.janelaInicial, title='Alerta', message='Deseja Realmente Sair?'):
'''
try:
#self.encerrando_conexao_database()
print('Conexão com MySQL fechada.')
except:
print('Não foi possível fechar a Conexão com MySQL antes de sair.')
'''
self.janelaInicial.destroy()
self.campLogin.focus_force()
self.janelaFuncio.deiconify()
class Ordem_Servico():
#funções de verificar campos e enviar informações ao banco de dados
def confirmarCamposOrdemServico(self):
if self.campoCliente.get() == '' or self.campoNumOS.get() == '' or self.campoProduto.get() == '' or self.campoQTDE.get() == '' or self.campoDataPrevista.get() == '' or self.campoOrdemAbertura.get() == '' or self.tipoOS.get() == 'Select':
messagebox.showinfo(parent=self.janelaCriarOS, title='Alerta', message='Verifique os Campos')
self.exibir_primeira_aba()
return False
elif len(self.listaPecas.get_children()) == 0:
messagebox.showinfo(parent=self.janelaCriarOS, title='Itens não adcionado', message='É necessário adcionar no mínimo 1 item a lista.')
return False
else:
return True
def inseririndo_dados_os(self):
dataPrevista = self.campoDataPrevista.get()
dataPrevista = dataPrevista[6:]+'-'+dataPrevista[3:5]+'-'+dataPrevista[0:2]
dataAbertura = str(datetime.now())
self.cursor.execute("INSERT INTO ordem_servico VALUES (NULL, '"+self.campoNumOS.get()+"','"+self.campoCliente.get().capitalize()+"','"+self.campoProduto.get().capitalize()+"','"+self.campoQTDE.get()+"','"+self.tipoOS.get()+"','"+dataPrevista+"','"+self.idOrdemAbertura+"','"+dataAbertura+"', NULL, NULL, 'Aberto','"+self.campoComplemento.get('1.0', 'end')+"','"+self.campoObservação.get('1.0', 'end')+"')")
self.cursor.execute('SELECT id FROM ordem_servico WHERE OS = '+self.campoNumOS.get())
idOS = str(self.cursor.fetchall()[0][0])
listaPecaCod = self.listaPecas.get_children()
for item in listaPecaCod:
peca = self.listaPecas.item(item, 'values')
self.cursor.execute("INSERT INTO ordem_pecas VALUES (NULL, '"+idOS+"', '"+peca[0]+"','"+peca[3]+"', 'Novo Item', NULL,DEFAULT)")
def confirmar_tela_os(self):
if not self.confirmarCamposOrdemServico():
return ''
try:
self.inseririndo_dados_os()
except mysql.connector.errors.IntegrityError:
messagebox.showwarning(parent=self.janelaCriarOS, title='Alerta', message='Nº de O.S já existente')
return ''
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
messagebox.showerror(parent=self.janelaCriarOS, title='Alerta', message='Erro ao tentar inserir dados')
return ''
else:
self.exibir_os_treeview()
#self.OrdemServico.insert("", "end", values=(ID, osDistintas[0][0], DataInicial, HoraInicial, contagemOperacoes, contagemRetrabalho, horaTotal))
if messagebox.showinfo(parent=self.janelaCriarOS, title='Alerta', message='Ordem de Serviço aberta com sucesso'):
self.janelaCriarOS.destroy()
def exibir_os_treeview(self):
try:
ultimoID = self.OrdemServico.get_children()[-1]
ultimoID = vez = self.OrdemServico.item(ultimoID)["values"]
ultimoID = ultimoID[0]
except:
ultimoID = 0
self.cursor.execute("select a.id, a.OS, a.Cliente, a.Produto, a.QTDE, a.TipoOS, a.DataPrevista, a.Estado from ordem_servico as a order by id desc limit 1")
osBuscada = self.cursor.fetchall()
#self.cursor.execute("select a.id, a.OS, b.Nome, a.Produto, a.QTDE, a.TipoOS, a.DataPrevista, a.Estado from ordem_servico as a join funcionarios as b on a.OrdemAbertura = b.id order by id desc limit 1")
#osBuscada = self.cursor.fetchall()
a = int(ultimoID)
b = int(osBuscada[0][0])
if a != b:
self.OrdemServico.insert("", "end", values=(osBuscada[0][0], osBuscada[0][1], osBuscada[0][2], osBuscada[0][3], osBuscada[0][4], osBuscada[0][5], osBuscada[0][6], osBuscada[0][7]))
'''
try:
#Consultoro no banco de dados as O.S finalizadas de modo distintas
self.cursor.execute("select OS, id from ordem_servico group by OS order by id desc limit 1")
osDistintas = self.cursor.fetchall()
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
a = int(ultimoID[0])
b = int(osDistintas[0][1])
if a != b:
self.cursor.execute("select id, DataInicial from concluidas where OS ="+str(osDistintas[0][0])+" limit 1")
linha = self.cursor.fetchall()
self.cursor.execute("select count(*) from concluidas where OS ="+str(osDistintas[0][0]))
contagemOperacoes = self.cursor.fetchall()
self.cursor.execute("select count(*) from concluidas where Tipo = 'Retrabalhar OS' and OS = "+str(osDistintas[0][0]))
contagemRetrabalho = self.cursor.fetchall()
#extraindo do banco de dados as informações e armazenando nas variáveis
ID = osDistintas[0][1]
if linha == []:
DataInicial = 'Não Disponível'
DataInicial = 'Não Disponível'
HoraInicial = 'Não Disponível'
HoraInicial = 'Não Disponível'
else:
DataInicial = str(linha[0][1])
DataInicial = DataInicial.split()[0]
HoraInicial = str(linha[0][1])
HoraInicial = HoraInicial.split()[1]
#Consulta SQL a ser feita por parametro
SQL = ("select TempGasto, TempGastoExt from concluidas where OS = "+str(osDistintas[0][0]))
horaTotal = self.somar_total_horas_gastas_os(SQL, 2)
#Adicionando as colunas da respectiva O.S na Treeview
self.OrdemServico.insert("", "end", values=(ID, osDistintas[0][0], DataInicial, HoraInicial, contagemOperacoes, contagemRetrabalho, horaTotal))
'''
#função dos botões de adcionar e remover itens da lista
def buscar_peca(self, event):
try:
self.cursor.execute("select * from pecas where CodPeca like '%"+self.campoCP.get()+"%'")
self.pecas = self.cursor.fetchall()
except: return ''
if self.pecas != []:
self.campoN['state'] = NORMAL
self.campoM['state'] = NORMAL
self.campoT['state'] = NORMAL
self.campoN.delete(0, END)
self.campoM.delete(0, END)
self.campoT.delete(0, END)
self.campoN.insert('end', self.pecas[0][1])
self.campoM.insert('end', self.pecas[0][3])
self.campoT.insert('end', self.pecas[0][4])
self.campoN['state'] = DISABLED
self.campoM['state'] = DISABLED
self.campoT['state'] = DISABLED
else:
self.campoN['state'] = NORMAL
self.campoM['state'] = NORMAL
self.campoT['state'] = NORMAL
self.campoN.delete(0, END)
self.campoM.delete(0, END)
self.campoT.delete(0, END)
self.campoN['state'] = DISABLED
self.campoM['state'] = DISABLED
self.campoT['state'] = DISABLED
def adcionar_peca_lista(self):
try:
if not int(self.campoQ.get()) >= 1:
return messagebox.showinfo(parent=self.janelaCriarOS, title='Quantidade não válida', message='Informe a quantidade de peça.')
except: return messagebox.showinfo(parent=self.janelaCriarOS, title='Quantidade não válida', message='Informe a quantidade de peça.')
#Verificando se o mesmo item já foi adcionado antes, caso não irá inserir agora
if not self.pecas[0][0] in self.count_lista:
self.count_lista.append(self.pecas[0][0])
self.listaPecas.insert("", "end", values=(self.pecas[0][0], self.pecas[0][1], self.pecas[0][2], self.campoQ.get(), self.pecas[0][3], self.pecas[0][4], self.pecas[0][5]))
self.campoN['state'] = NORMAL
self.campoM['state'] = NORMAL
self.campoT['state'] = NORMAL
self.campoCP.delete(0, END)
self.campoN.delete(0, END)
self.campoM.delete(0, END)
self.campoT.delete(0, END)
self.campoN['state'] = DISABLED
self.campoM['state'] = DISABLED
self.campoT['state'] = DISABLED
else: messagebox.showwarning(parent=self.janelaCriarOS, title='Alerta', message='Este item já foi adcionado.')
def remover_peca_lista(self):
try:
#atribuindo item a ser removido
itemSelecionado = self.listaPecas.selection()[0]
#removendo item da lista
indice = self.listaPecas.item(itemSelecionado, 'values')[0]
self.count_lista.remove(int(indice))
#removendo item da treeview
self.listaPecas.delete(itemSelecionado)
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
messagebox.showinfo(parent=self.janelaCriarOS, title='Linha não selecionada', message='Elemento não selecionado')
#funções que farão a troca entre a primeira parte e a segunda da janela
def exibir_primeira_aba(self):
try:
self.botFinalizarOS.place_forget()
self.frameOS4.place_forget()
self.frameOS5.place_forget()
except: pass
self.frameOS2.place(relx=0.020, rely=0.030, relwidth=0.960, relheight=0.400)
self.frameOS3.place(relx=0.020, rely=0.450, relwidth=0.960, relheight=0.450)
self.botProximoOS.place(relx=0.850, rely=0.920)
def exibir_segunda_aba(self):
self.frameOS2.place_forget()
self.frameOS3.place_forget()
self.botProximoOS.place_forget()
self.botFinalizarOS.place(relx=0.850, rely=0.920)
self.frameOS4.place(relx=0.020, rely=0.030, relwidth=0.960, relheight=0.300)
self.frameOS5.place(relx=0.020, rely=0.350, relwidth=0.960, relheight=0.550)
#labels e outros widgets que compoem a janela
def primeira_aba(self):
try:
self.cursor.execute('select id, Nome from funcionarios where CPF ='+self.user)
busca = self.cursor.fetchall()
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
if messagebox.showerror(parent=self.janelaInicial, title='Verifique a conexão', message='Sem conexão com Banco de Dados'):
return 0
else:
self.idOrdemAbertura = str(busca[0][0])
nome = busca[0][1]
self.frameOS1 = Frame(self.janelaCriarOS, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
self.frameOS1.place(relx=0.020, rely=0.030, relwidth=0.960, relheight=0.940)
self.frameOS2 = Frame(self.frameOS1, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
self.frameOS3 = Frame(self.frameOS1 ,highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
lbl = Label(self.frameOS2, font=('arial', 10), text='Cliente', bg='white')
lbl.place(relx=0.040, rely=0.030)
def format_campo_cliente(*args):
mask = varCampoCliente.get()
varCampoCliente.set(mask[:40])
def format_campo_dataP(*args):
mask = dataPrevi.get()
dataPrevi.set(mask[:10])
if len(mask) >= 1 and mask[-1] != '/' and not mask[-1].isnumeric():
dataPrevi.set(mask[0:len(mask) - 1])
#Se houver 2 dígitos no campo, e eles forem númericos...
if len(mask) == 2 and mask.isnumeric():
self.campoDataPrevista.delete(0, END) #Delete todo o campo
self.campoDataPrevista.insert(END, mask+'/') #E acrescente parênteses com o valor obtido dentro
elif len(mask) == 5 and mask[3:].isnumeric():
self.campoDataPrevista.delete(0, END) #Delete todo o campo
self.campoDataPrevista.insert(END, mask+'/') #E acrescente parênteses com o valor obtido dentro
def format_campo_os(*args):
mask = varCampoOS.get()
varCampoOS.set(mask[:11])
if len(mask) >= 1 and not mask[-1].isnumeric():
varCampoOS.set(mask[0:len(mask) - 1])
def format_campo_produto(*args):
mask = varProduto.get()
varProduto.set(mask[:30])
def format_campo_qtde(*args):
mask = varQTDE.get()
varQTDE.set(mask[:4])
if len(mask) >= 1 and not mask[-1].isnumeric():
varQTDE.set(mask[0:len(mask) - 1])
varCampoCliente = StringVar()
varCampoCliente.trace('w', format_campo_cliente)
dataPrevi = StringVar()
dataPrevi.trace('w', format_campo_dataP)
varCampoOS = StringVar()
varCampoOS.trace('w', format_campo_os)
varProduto = StringVar()
varProduto.trace('w', format_campo_produto)
varQTDE = StringVar()
varQTDE.trace('w', format_campo_qtde)
self.campoCliente = Entry(self.frameOS2, font=('arial', 12), relief=GROOVE, border=2, textvariable=varCampoCliente)
self.campoCliente.place(relx=0.040, rely=0.160, relwidth=0.600)
self.campoCliente.focus_force()
lbl = Label(self.frameOS2, font=('arial', 10), text='O.S', bg='white')
lbl.place(relx=0.780, rely=0.030)
self.campoNumOS = Entry(self.frameOS2, font=('arial', 12), relief=GROOVE, border=2, textvariable=varCampoOS)
self.campoNumOS.place(relx=0.780, rely=0.160, relwidth=0.180)
lbl = Label(self.frameOS2, font=('arial', 10), text='Produto', bg='white')
lbl.place(relx=0.040, rely=0.370)
self.campoProduto = Entry(self.frameOS2, font=('arial', 12), relief=GROOVE, border=2, textvariable=varProduto)
self.campoProduto.place(relx=0.040, rely=0.490, relwidth=0.500)
lbl = Label(self.frameOS2, font=('arial', 10), text='QTDE', bg='white')
lbl.place(relx=0.600, rely=0.370)
self.campoQTDE = Entry(self.frameOS2, font=('arial', 12), relief=GROOVE, border=2, textvariable=varQTDE)
self.campoQTDE.place(relx=0.600, rely=0.490, relwidth=0.080)
lbl = Label(self.frameOS2, font=('arial', 10), text='Data Prevista', bg='white')
lbl.place(relx=0.800, rely=0.370)
self.campoDataPrevista = Entry(self.frameOS2, font=('arial', 12), relief=GROOVE, border=2, textvariable=dataPrevi)
self.campoDataPrevista.place(relx=0.800, rely=0.490, relwidth=0.180)
lbl = Label(self.frameOS2, font=('arial', 10), text='Ord. de Abertura', bg='white')
lbl.place(relx=0.040, rely=0.700)
self.campoOrdemAbertura = Entry(self.frameOS2, font=('arial', 12), relief=GROOVE, border=2)
self.campoOrdemAbertura.place(relx=0.040, rely=0.820, relwidth=0.500)
self.campoOrdemAbertura.insert('end', nome)
self.campoOrdemAbertura['state'] = DISABLED
lbl = Label(self.frameOS2, font=('arial', 10), text='Tipo de Pedido', bg='white')
lbl.place(relx=0.700, rely=0.700)
self.tipoOS = ttk.Combobox(self.frameOS2, font=('arial',10), state='readonly')
self.tipoOS['values'] = ('Select', 'Comum', 'Urgente', 'Interno', 'Corretivo', 'Preventivo')
self.tipoOS.current(0)
self.tipoOS.place(relx=0.700, rely=0.820)
lbl = Label(self.frameOS3, font=('arial', 10), text='Complemento', bg='white')
lbl.place(relx=0.040, rely=0.060)
self.campoComplemento = Text(self.frameOS3, font=('arial', 13), relief=GROOVE, border=2)
self.campoComplemento.place(relx=0.040, rely=0.160, relwidth=0.900, relheight=0.300)
lbl = Label(self.frameOS3, font=('arial', 10), text='Observação', bg='white')
lbl.place(relx=0.040, rely=0.480)
self.campoObservação = Text(self.frameOS3, font=('arial', 13), relief=GROOVE, border=2)
self.campoObservação.place(relx=0.040, rely=0.570, relwidth=0.900, relheight=0.300)
global imgProximo
imgProximo = PhotoImage(file='image/proximo.png')
self.botProximoOS = Button(self.frameOS1, image=imgProximo, border=0, relief=FLAT, bg='white', command=self.exibir_segunda_aba)
self.botProximoOS.place(relx=0.850, rely=0.920)
def segunda_aba(self):
self.frameOS4 = Frame(self.frameOS1, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
self.frameOS5 = Frame(self.frameOS1 ,highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
lbl = Label(self.frameOS4, font=('arial', 10), text='Código da Peça', bg='white')
lbl.place(relx=0.040, rely=0.050)
self.campoCP = Entry(self.frameOS4, font=('arial', 12), relief=GROOVE, border=2)
self.campoCP.place(relx=0.040, rely=0.220, relwidth=0.200)
self.campoCP.bind("<Return>", self.buscar_peca)
botaoBuscar = Button(self.frameOS4, text='Ok', border=2, command = lambda: self.buscar_peca(''))
botaoBuscar.place(relx=0.242, rely=0.215)
lbl = Label(self.frameOS4, font=('arial', 10), text='Nome da Peça', bg='white')
lbl.place(relx=0.450, rely=0.050)
self.campoN = Entry(self.frameOS4, font=('arial', 12), relief=GROOVE, border=2, state=DISABLED)
self.campoN.place(relx=0.450, rely=0.220, relwidth=0.500)
lbl = Label(self.frameOS4, font=('arial', 10), text='QTDE', bg='white')
lbl.place(relx=0.040, rely=0.500)
self.campoQ = Spinbox(self.frameOS4, from_=1, to=1000, font=('arial', 12), relief=GROOVE, border=2)
self.campoQ.place(relx=0.040, rely=0.680, relwidth=0.080)
lbl = Label(self.frameOS4, font=('arial', 10), text='Material', bg='white')
lbl.place(relx=0.450, rely=0.420)
self.campoM = Entry(self.frameOS4, font=('arial', 12), relief=GROOVE, border=2, state=DISABLED)
self.campoM.place(relx=0.450, rely=0.620, relwidth=0.200)
lbl = Label(self.frameOS4, font=('arial', 10), text='Tratamento', bg='white')
lbl.place(relx=0.680, rely=0.420)
self.campoT = Entry(self.frameOS4, font=('arial', 12), relief=GROOVE, border=2, state=DISABLED)
self.campoT.place(relx=0.680, rely=0.620, relwidth=0.300)
botaAdd = Button(self.frameOS4, text='Adcionar', border=2, command=self.adcionar_peca_lista)
botaAdd.place(relx=0.310, rely=0.320)
botaRemove = Button(self.frameOS4, text='Remover', border=2, command=self.remover_peca_lista)
botaRemove.place(relx=0.310, rely=0.550)
self.listaPecas = ttk.Treeview(self.frameOS5, column=('1','2','3','4','5','6','7'), show='headings')
self.listaPecas.heading('1', text='ID')
self.listaPecas.heading('2', text='Nome da Peça')
self.listaPecas.heading('3', text='Cód. Peça')
self.listaPecas.heading('4', text='QTDE')
self.listaPecas.heading('5', text='Material')
self.listaPecas.heading('6', text='Tratamento')
self.listaPecas.heading('7', text='Cód. Desenho')
self.listaPecas.column("1", width=-20, anchor='n')
self.listaPecas.column("2", width=200, anchor='n')
self.listaPecas.column("3", width=40, anchor='n')
self.listaPecas.column("4", width=10, anchor='n')
self.listaPecas.column("5", width=90, anchor='n')
self.listaPecas.column("6", width=80, anchor='n')
self.listaPecas.column("7", width=80, anchor='n')
self.listaPecas.place(relx=0, rely=0, relwidth=0.975, relheight=0.999)
self.listaPecas.bind("<Escape>", lambda event: self.remover_focus(event, self.listaPecas))
scrollbar = Scrollbar(self.frameOS5, orient="vertical", command=self.listaPecas.yview)
self.listaPecas.configure(yscrollcommand=scrollbar.set)
scrollbar.place(relx=0.975, rely=0, relwidth=0.025, relheight=0.999)
global imgfinalizar
imgfinalizar = PhotoImage(file='image/finalizar.png')
self.botFinalizarOS = Button(self.frameOS1, image=imgfinalizar, border=0, relief=FLAT, bg='white', command=self.confirmar_tela_os)
def criar_ordem_de_servico(self):
self.janelaCriarOS = Toplevel()
self.janelaCriarOS.title('Criar Ordem de Serviço')
self.centraliza_tela(800, 500, self.janelaCriarOS)
self.janelaCriarOS['bg'] = 'white'
self.count_lista = list()
self.primeira_aba()
self.exibir_primeira_aba()
self.segunda_aba()
self.janelaCriarOS.transient(self.aba4)
self.janelaCriarOS.focus_force()
self.janelaCriarOS.grab_set()
self.janelaCriarOS.mainloop()
class Pecas:
#Bloco cadastro de peça manualmente e respectivas funções de inserção
def confirmarCamposCadPeca(self):
#Armazenando nas variáveis informações da peça a ser cadastrada
self.campP = self.descriPeca.get().upper()
self.campM = self.campoMaterial.get().upper()
self.campC = self.campoCodPeca.get()
self.campT = self.campoTratamento.get().upper()
self.campD = self.campoCodDesenho.get().upper()
#Realizando verificação de algum campo deixado em branco
if self.descriPeca.get() == '':
messagebox.showinfo(parent=self.janelaCadPeca, title='Alerta', message='A descrição de Peça é obrigatório.')
return False
elif self.campoCodPeca.get() == '':
messagebox.showinfo(parent=self.janelaCadPeca, title='Alerta', message='O código de Peça é obrigatório.')
return False
if self.campoMaterial.get() == '':
self.campM = 'Não informado'
if self.campoTratamento.get() == '':
self.campT = 'Não informado'
if self.campoCodDesenho.get() == '':
self.campD = 'Não informado'
return True
def salvar_peca(self):
dataCadastro = str(datetime.now())
try:
self.cursor.execute("INSERT INTO pecas VALUES (NULL, '"+self.campP+"','"+self.campC+"','"+self.campM+"','"+self.campT+"','"+self.campD+"','"+dataCadastro+"')")
except mysql.connector.errors.IntegrityError:
messagebox.showinfo(parent=self.janelaCadPeca, title='Alerta', message='Nº de peça já existente')
return False
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
messagebox.showerror(parent=self.janelaCadPeca, title='Alerta', message='Erro ao tentar inserir dados')
return False
else:
return True
def confirmar_cad_peca(self):
if not self.confirmarCamposCadPeca():
return ''
if not self.salvar_peca():
return ''
if messagebox.showinfo(parent=self.janelaCadPeca, title='Alerta', message='Peça cadastrada com sucesso'):
self.janelaCadPeca.destroy()
def janela_cadastrar_peca(self):
self.janelaCadPeca = Toplevel()
self.janelaCadPeca.title('Cadastrar Peça')
self.centraliza_tela(600, 300, self.janelaCadPeca)
self.janelaCadPeca['bg'] = 'white'
frame1 = Frame(self.janelaCadPeca, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
frame1.place(relx=0.020, rely=0.030, relwidth=0.960, relheight=0.940)
frame2 = Frame(frame1, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
frame2.place(relx=0.020, rely=0.050, relwidth=0.960, relheight=0.700)
def format_nome_peca(*args):
mask = varDescriPeca.get()
varDescriPeca.set(mask[:170])
def format_cod_peca(*args):
mask = varCodPeca.get()
varCodPeca.set(mask[:13])
if len(mask) >= 1 and not mask[-1].isnumeric():
varCodPeca.set(mask[0:len(mask) - 1])
def format_campo_material(*args):
mask = varMaterial.get()
varMaterial.set(mask[:30])
def format_tratamento(*args):
mask = varTratamento.get()
varTratamento.set(mask[:30])
def format_cod_desenho(*args):
mask = varCodDesenho.get()
varCodDesenho.set(mask[:15])
varDescriPeca = StringVar()
varDescriPeca.trace('w', format_nome_peca)
varMaterial = StringVar()
varMaterial.trace('w', format_campo_material)
varCodPeca = StringVar()
varCodPeca.trace('w', format_cod_peca)
varTratamento = StringVar()
varTratamento.trace('w', format_tratamento)
varCodDesenho = StringVar()
varCodDesenho.trace('w', format_cod_desenho)
lbl = Label(frame2, font=('arial', 10), text='Descrição da Peça **', bg='white', fg='#344f84')
lbl.place(relx=0.040, rely=0.030)
self.descriPeca = Entry(frame2, font=('arial', 12), relief=GROOVE, border=2, textvariable=varDescriPeca)
self.descriPeca.place(relx=0.040, rely=0.160, relwidth=0.920)
self.descriPeca.focus_force()
lbl = Label(frame2, font=('arial', 10), text='Cód. da Peça **', bg='white', fg='#344f84')
lbl.place(relx=0.040, rely=0.370)
self.campoCodPeca = Entry(frame2, font=('arial', 12), relief=GROOVE, border=2, textvariable=varCodPeca)
self.campoCodPeca.place(relx=0.040, rely=0.490, relwidth=0.200)
lbl = Label(frame2, font=('arial', 10), text='Material (Opcional)', bg='white', fg='#344f84')
lbl.place(relx=0.350, rely=0.370)
self.campoMaterial = Entry(frame2, font=('arial', 12), relief=GROOVE, border=2, textvariable=varMaterial)
self.campoMaterial.place(relx=0.350, rely=0.490, relwidth=0.600)
lbl = Label(frame2, font=('arial', 10), text='Tratamento (Opcional)', bg='white', fg='#344f84')
lbl.place(relx=0.040, rely=0.700)
self.campoTratamento = Entry(frame2, font=('arial', 12), relief=GROOVE, border=2, textvariable=varTratamento)
self.campoTratamento.place(relx=0.040, rely=0.820, relwidth=0.500)
lbl = Label(frame2, font=('arial', 10), text='Cód. do Desenho (Opcional)', bg='white', fg='#344f84')
lbl.place(relx=0.680, rely=0.700)
self.campoCodDesenho = Entry(frame2, font=('arial', 12), relief=GROOVE, border=2, textvariable=varCodDesenho)
self.campoCodDesenho.place(relx=0.680, rely=0.820, relwidth=0.260)
imgfinalizar = PhotoImage(file='image/finalizar2.png')
botFinalizar = Button(frame1, image=imgfinalizar, border=0, relief=FLAT, bg='white', command=self.confirmar_cad_peca)
botFinalizar.place(relx=0.820, rely=0.850)
self.janelaCadPeca.transient(self.janelaPeca)
self.janelaCadPeca.focus_force()
self.janelaCadPeca.grab_set()
self.janelaCadPeca.mainloop()
#Bloco de exibição das peças e opções de cadastrar ou importar mais
def exibir_pecas_importacao(self):
#Lista para itens que não se encaixe entre os padrões de peça
listaItem = list()
idItem = 0
cont = 0
for dado in range(len(self.excel['Referência'])):
#armazenando colunas do excel nas variáveis
a = self.excel['Referência']
b = self.excel['Descrição']
#recebendo a quantidade de dados para fazer o progressbar se locomover
self.progressbar['value'] = dado
#verificando os itens que começam com 9 por serem peças
if a[dado][0] == '9':
#verificando quais dessas peças se encaixam no padrão de 13 caracteres
if len(a[dado]) == 13:
#exibindo informações das peças nas variávels
self.ll1['text'] = f'Referência: {a[dado]}'
self.ll2['text'] = f'Descrição: {b[dado]}'
try:
cont += 1
if cont <= 10:
idItem += 1
self.cursor.execute("INSERT INTO pecas VALUES ("+str(idItem)+", '"+b[dado]+"', '"+a[dado]+"', DEFAULT, DEFAULT, DEFAULT, '"+str(self.dataImport)+"')")
self.treeviewPecas.insert("", "end", values=(idItem, b[dado], a[dado], 'Não informado', 'Não informado', 'Não informado', str(self.dataImport)))
except mysql.connector.errors.IntegrityError: #Excessão caso o valor seja repetido
print('passou')
except mysql.connector.errors.OperationalError: #Excessão caso a conexão seja perdida no meio do processo
if messagebox.showerror(parent=self.janelaInicial, title='Conexão perdida', message='Erro ao importar dados, conexão perdida com Banco MYSQL.'):
self.janelaProcesso.destroy()
return 0
except Exception as erro: #Excessão global caso algum outro erro ocorra
print(f'{erro}, {(erro.__class__)}')
if messagebox.showerror(parent=self.janelaInicial, title='Falha na importação', message='Uma falha inesperada aconteceu na importação.'):
self.janelaProcesso.destroy()
return 0
#Caso as peças não tenham o valor padrão irá armazenar na lista
else:
listaItem.append(dado)
#Enviando messagem após operação bem sucedida
if messagebox.showinfo(parent=self.janelaInicial, title='Importação concluída', message='Importação concluída com sucesso!'):
self.janelaProcesso.destroy()
def abrir_arquivo(self):
#Abrindo arquivo XML
try:
self.caminhoXLS = filedialog.askopenfilename(title='Selecione o arquivo desejado', filetypes=(('Arquivos XLS', '*.xls'), ('All files', '*.*')))
#Lendo arquivo XML
self.excel = pd.read_excel(self.caminhoXLS)
except:
return 0
#Chamando função para Exibir os dados a serem tranferindo
self.janela_exibir_dados_importacao_xml()
def janela_exibir_dados_importacao_xml(self):
#Janela de exibição dos dados de importação XML
self.janelaProcesso = Toplevel()
self.janelaProcesso.title('Importando dados')
self.centraliza_tela(500, 200, self.janelaProcesso)
self.janelaProcesso['bg'] = 'white'
lbl = Label(self.janelaProcesso, text=f'Arquivo: {self.caminhoXLS}', font=('arial', 10), bg='white')
lbl.place(relx=0.0, rely=0.130)
self.ll1 = Label(self.janelaProcesso, text='Referência:', font=('arial', 10), bg='white')
self.ll1.place(relx=0.0, rely=0.300)
self.ll2 = Label(self.janelaProcesso, text='Descrição:', font=('arial', 10), bg='white')
self.ll2.place(relx=0.0, rely=0.400)
self.dataImport = datetime.now()
data = self.dataImport.strftime('Data de Importação: %H:%M:%S do %d/%m/%Y')
ll3 = Label(self.janelaProcesso, text=data, font=('arial', 10), bg='white')
ll3.place(relx=0.0, rely=0.500)
#Criando objeto de barra progressiva para exibir o carregamento
self.progressbar = ttk.Progressbar(self.janelaProcesso, orient='horizontal', mode='determinate', maximum=10483)
self.progressbar.place(relx=0.0, rely=0.800, relwidth=0.999)
#Thread que fará a importação do dados em XML e irá exibir
threading.Thread(target=self.exibir_pecas_importacao,).start()
self.janelaProcesso.transient(self.aba4)
self.janelaProcesso.focus_force()
self.janelaProcesso.grab_set()
self.janelaProcesso.mainloop()
def janela_exibir_pecas(self):
self.janelaPeca = Toplevel()
self.janelaPeca.title('Cadastrar Peça')
self.centraliza_tela(800, 500, self.janelaPeca)
self.janelaPeca['bg'] = 'white'
global img01
img01 = PhotoImage(file='image/ferramenta.png')
botCadPeca = Button(self.janelaPeca, text='Cad. Peça', image=img01, compound=TOP, font=('arial', 9), bg='white', fg='black', relief=SOLID, border=0, command=self.janela_cadastrar_peca)
botCadPeca.place(relx=0.050, rely=0.030)
global img02
img02 = PhotoImage(file='image/importar.png')
botImport = Button(self.janelaPeca, text='Import Dados', image=img02, compound=TOP, font=('arial', 9), bg='white', fg='black', relief=SOLID, border=0, command=self.abrir_arquivo)
botImport.place(relx=0.200, rely=0.030)
canvas = Canvas(self.janelaPeca, bg='#e1e1e1')
canvas.place(relx=0.050, rely=0.190, relwidth=0.900, relheight=0.005)
self.treeviewPecas = ttk.Treeview(self.janelaPeca, column=('1','2','3','4','5','6','7'), show='headings')
self.treeviewPecas.heading('1', text='ID')
self.treeviewPecas.heading('2', text='Descrição')
self.treeviewPecas.heading('3', text='Cod. Peça')
self.treeviewPecas.heading('4', text='Material')
self.treeviewPecas.heading('5', text='Tratamento')
self.treeviewPecas.heading('6', text='Cod. Desenho')
self.treeviewPecas.heading('7', text='Data Cadastrada')
self.treeviewPecas.column("1", width=1, anchor='n')
self.treeviewPecas.column("2", width=200, anchor='n')
self.treeviewPecas.column("3", width=100, anchor='n')
self.treeviewPecas.column("4", width=100, anchor='n')
self.treeviewPecas.column("5", width=100, anchor='n')
self.treeviewPecas.column("6", width=100, anchor='n')
self.treeviewPecas.column("7", width=100, anchor='n')
self.treeviewPecas.place(relx=0, rely=0.350, relwidth=0.975, relheight=0.640)
self.treeviewPecas.bind("<Escape>", lambda event: self.remover_focus(event, self.treeviewPecas))
scrollbar = Scrollbar(self.janelaPeca, orient="vertical", command=self.treeviewPecas.yview)
self.treeviewPecas.configure(yscrollcommand=scrollbar.set)
scrollbar.place(relx=0.975, rely=0.350, relwidth=0.025, relheight=0.640)
self.cursor.execute("select id, Descricao, CodPeca, Material, Tratamento, CodDesenho, DataCadastro from pecas")
listaItem = self.cursor.fetchall()
for peca in listaItem:
self.treeviewPecas.insert("", "end", values=(peca[0], peca[1], peca[2], peca[3], peca[4], peca[5], peca[6]))
self.janelaPeca.transient(self.aba4)
self.janelaPeca.focus_force()
self.janelaPeca.grab_set()
self.janelaPeca.mainloop()
class Application(Funcs, Database_Server, Ordem_Servico, Pecas):
def __init__(self):
self.janelaFuncio = tix.Tk()
self.janelaFuncio.title('Login Admin')
self.janelaFuncio.configure(background='white')
self.janelaFuncio.minsize(500, 400)
self.janelaFuncio.attributes('-alpha', 0.0)
#Chamando Função Para Centralizar a Tela
self.centraliza_tela(600, 600, self.janelaFuncio)
self.frameLogin = Frame(self.janelaFuncio, highlightbackground='white', highlightcolor='white', highlightthickness=5, bg='white')
self.frameLogin.place(relx=0.100, rely=0.150, relwidth=0.780, relheight=0.730)
logo = PhotoImage(file='image/logoMultimoldes.png')
lblLogo = Label(self.frameLogin, image=logo, bg='white')
lblLogo.place(relx=0.500, rely=0.130, anchor='center')
self.labelLogin = Label(self.frameLogin, text='Usuário', bg='white', fg='#344f84', font=('arial',15,'bold'))
self.labelLogin.place(relx=0.070, rely=0.420)
#Função local que verificará os campos de login colocando limites de capacidade
def limite_campos_login(*args):
varCPF = cLogin.get()
varSenha = cSenha.get()
if len(varCPF) > 11:
cLogin.set(varCPF[:-1])
if not varCPF.isnumeric():
cLogin.set(varCPF[:-1])
if len(varSenha) > 8:
cSenha.set(varSenha[:-1])
if not varSenha.isnumeric():
cSenha.set(varSenha[:-1])
#Configurando caracteres quando estiverem inserido nos campos
cLogin = StringVar()
cLogin.trace('w', limite_campos_login)
cSenha = StringVar()
cSenha.trace('w', limite_campos_login)
self.campLogin = Entry(self.frameLogin, font=('arial', 14), textvariable=cLogin, border=2, relief=GROOVE)
self.campLogin.place(relx=0.290, rely=0.430, relwidth=0.500)
self.campLogin.bind("<Return>", self.confirmar_login)
self.labelSenha = Label(self.frameLogin, text='Senha', bg='white', fg='#344f84', font=('arial',15,'bold'))
self.labelSenha.place(relx=0.070, rely=0.540)
self.campSenha = Entry(self.frameLogin, show='l', font=('wingdings', 14, 'bold'), textvariable=cSenha, border=2, relief=GROOVE)
self.campSenha.place(relx=0.290, rely=0.550, relwidth=0.500)
self.campSenha.bind("<Return>", self.confirmar_login)
self.botao = Button(self.frameLogin, text='Confirmar', fg='white', activeforeground='white', bg='#344f84', activebackground='#344f84', border=0, font=('arial', 13, 'bold'), width=10, command = lambda: self.confirmar_login(self.confirmar_login))
self.botao.place(relx=0.390, rely=0.750)
self.botao.bind("<Return>", self.confirmar_login)
self.janelaFuncio.after(3000, self.janelaFuncio.attributes, "-alpha", 0.93)
self.connection_database()
if not self.bancoConnect:
messagebox.showerror('Verifique a conexão', 'Sem conexão com Banco de Dados')
self.campLogin.focus_force()
self.janelaFuncio.mainloop()
def janela_raiz(self):
self.janelaInicial = Toplevel()
self.janelaInicial.title('Multimoldes Admin')
self.janelaInicial.iconbitmap('image/icone.ico')
self.janelaInicial.resizable(0,0)
self.sistemaOperacional = system()
#Configurando o ambiente para se maximizado de acordo com o sistema operacional
if self.sistemaOperacional == 'Windows':
self.janelaInicial.state('zoomed')
else:
self.janelaInicial.attributes('-zoomed', True)
corPadrao = self.janelaInicial['bg']
menubar = Menu(self.janelaInicial)
self.janelaInicial.config(menu=menubar)
filemenu = Menu(menubar)
filemenu2 = Menu(menubar)
menubar.add_cascade(label='Opções', menu=filemenu)
menubar.add_cascade(label='Configurações', menu=filemenu2)
menubar.add_cascade(label='Relatório', menu=filemenu2)
menubar.add_cascade(label='Sobre', menu=filemenu2)
filemenu.add_command(label='Abrir OS', command=self.criar_ordem_de_servico)
filemenu.add_command(label='Cad. Peça', command=self.janela_cadastrar_peca)
filemenu.add_command(label='Sair', command=self.sair)
#Criando e adicionando abas
self.abas = ttk.Notebook(self.janelaInicial)
self.aba1 = Frame(self.abas, bg='white')
self.aba2 = Frame(self.abas, bg='white')
self.aba3 = Frame(self.abas, bg='white')
self.aba4 = Frame(self.abas, bg='white')
self.abas.add(self.aba1, text='Principal')
self.abas.add(self.aba2, text='Funcionários')
self.abas.add(self.aba3, text='Cadastrar')
self.abas.add(self.aba4, text='Ord. de Serviço')
self.abas.place(relx=0, rely=0, relwidth=1, relheight=1)
self.framePri = Frame(self.aba4, bg='white', highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2)
self.framePri.place(relx=0.045, rely=0.010, relwidth=0.910, relheight=0.970)
#Configurando Imagem da Logo Multimoldes na aba Cadastro
image = PhotoImage(file='image/logo-multimoldes.png')
self.logo = Label(self.aba3, image=image, bg='white')
self.logo.pack()
self.aba_principal()
self.aba_funcionarios()
self.aba_cadastro()
self.aba_ordem_de_servico()
self.janelaInicial.protocol('WM_DELETE_WINDOW', self.sair)
self.janelaInicial.mainloop()
#Abas com as funcionalidades do programa
def aba_principal(self):
#Labels de indicação
linfo1 = Label(self.aba1, text='Itens produzidos', font=('arial', 14), bg='white', fg='black')
linfo1.place(relx=0.100, rely=0.055)
ldados1 = Label(self.aba1, font=('arial', 20), bg='white', fg='#45ccce')
ldados1.place(relx=0.220, rely=0.045)
try:
self.dataHoje = str(datetime.now().date())
self.cursor.execute("SELECT DISTINCT peca FROM ordem_processo WHERE DataFinalizada LIKE '%"+self.dataHoje+"%' AND Estado = 'fechada'")
self.pecas = self.cursor.fetchall()
forma = ''
for v in self.pecas:
forma += str(v[0])+','
c = len(forma)
c -= 1
forma = forma[:c]
self.cursor.execute("SELECT DISTINCT peca FROM ordem_processo WHERE Peca IN ("+forma+") AND Estado <> 'fechada' AND DataFinalizada LIKE '%"+self.dataHoje+"%'")
a = self.cursor.fetchall()
if a != []:
a = a[0]
forma = ''
for v in a:
forma += str(v)+','
c = len(forma)
c -= 1
forma = forma[:c]
self.cursor.execute("SELECT count(DISTINCT peca) FROM ordem_processo WHERE Peca NOT IN ("+forma+") AND Estado = 'fechada' AND DataFinalizada LIKE '%"+self.dataHoje+"%'")
a = self.cursor.fetchall()[0]
else:
self.cursor.execute("SELECT COUNT(DISTINCT peca) FROM ordem_processo WHERE Peca IN ("+forma+") AND Estado = 'fechada' AND DataFinalizada LIKE '%"+self.dataHoje+"%'")
a = self.cursor.fetchall()[0]
ldados1['text'] = a[0]
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
pass
linfo2 = Label(self.aba1, text='Itens em andamento', font=('arial', 14), bg='white', fg='black')
linfo2.place(relx=0.320, rely=0.055)
linfo3 = Label(self.aba1, text='Itens retrabalhados', font=('arial', 14), bg='white', fg='black')
linfo3.place(relx=0.540, rely=0.055)
ldados3 = Label(self.aba1, font=('arial', 20), bg='white', fg='red')
ldados3.place(relx=0.680, rely=0.045)
try:
self.cursor.execute("SELECT COUNT(Tipo) FROM ordem_processo WHERE Tipo = 'Retrabalhar OS' AND Estado = 'fechada' AND DataFinalizada LIKE '%"+self.dataHoje+"%'")
ldados3['text'] = self.cursor.fetchall()[0][0]
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
pass
linfo4 = Label(self.aba1, text='Paradas de máquinas', font=('arial', 14), bg='white', fg='black')
linfo4.place(relx=0.760, rely=0.055)
ldados4 = Label(self.aba1, font=('arial', 20), bg='white', fg='orange')
ldados4.place(relx=0.920, rely=0.045)
try:
self.cursor.execute("SELECT COUNT(id) FROM pausas WHERE DataPause LIKE '%"+self.dataHoje+"%' ")
ldados4['text'] = self.cursor.fetchall()[0][0]
except:
pass
#Frame de Visualização de dados Os Finalizada
frameDetalhe1 = Frame(self.aba1, bg='white', highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=0)
frameDetalhe1.place(relx=0.010, rely=0.120, relwidth=0.450, relheight=0.400)
def buscar_dados():
a = C.itemcget(2, 'extent')
a = float(a)
b = int(a) - 1.0
C.itemconfig(2, extent=b) #fazendo a barra progredir no sentido horário
inteiro = int(b * (-1)) #atribuindo o valor do grau convertendo-o para positivo e inteiro
porc = str(round(((inteiro / 360) * 100),2)) #arredodando o grau e convertendo para porcentagem
C.itemconfig(4, text=porc+'%') #exibindo resultado
frameDetalhe1.after(100, buscar_dados)
C = Canvas(frameDetalhe1, bg="white")
circle1 = 0, 0, 200, 200
oval1 = C.create_oval(circle1, outline='#e6e6e6', fill="#e6e6e6")
C.move(oval1, 10,10)
coord = 0, 0, 200, 200
arc = C.create_arc(coord, start=90, extent=0, outline='#45ccce', fill="#45ccce")
C.move(arc, 10, 10)
circle = 0, 0, 150, 150
oval = C.create_oval(circle, outline='white', fill="white")
C.move(oval, 35,35)
textPrincipal = C.create_text(110, 110, font="Arial 22")
textLabel = C.create_text(110, 110, text="Qtd Finalizada", font="Arial 12")
C.move(textLabel, 270,-30)
textTotal = C.create_text(110, 110, text="795", font="Arial 30", fill='#45ccce')
C.move(textTotal, 270,10)
textLabel2 = C.create_text(110, 110, text="Operações", font="Arial 12")
C.move(textLabel2, 270, 40)
C.place(relx=0.500, rely=0.500, anchor='center', relwidth=0.800, relheight=0.780)
buscar_dados()
#Labels Informativos de dados do Frame de Visualização de Os Finalizada
corPadrao = self.janelaInicial['bg']
#Frame de Visualização de dados Os Pausadas
frameDetalhe2 = Frame(self.aba1, bg='white', highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=0)
frameDetalhe2.place(relx=0.520, rely=0.120, relwidth=0.450, relheight=0.400)
def buscar_dados2():
a = C2.itemcget(2, 'extent')
a = float(a)
b = int(a) - 1.0
C2.itemconfig(2, extent=b) #fazendo a barra progredir no sentido horário
inteiro = int(b * (-1)) #atribuindo o valor do grau convertendo-o para positivo e inteiro
porc = str(round(((inteiro / 360) * 100),2)) #arredodando o grau e convertendo para porcentagem
C2.itemconfig(4, text=porc+'%') #exibindo resultado
frameDetalhe2.after(100, buscar_dados2)
C2 = Canvas(frameDetalhe2, bg="white")
varCircle1 = 0, 0, 200, 200
varOval1 = C2.create_oval(varCircle1, outline='#e6e6e6', fill="#e6e6e6")
C2.move(varOval1, 10,10)
coord = 0, 0, 200, 200
varArc = C2.create_arc(coord, start=90, extent=0, outline='yellow', fill="yellow")
C2.move(varArc, 10, 10)
varCircle = 0, 0, 150, 150
oval = C2.create_oval(varCircle, outline='white', fill="white")
C2.move(oval, 35,35)
varTextPrincipal = C2.create_text(110, 110, font="Arial 22")
varTextLabel = C2.create_text(110, 110, text="Qtd Pausada", font="Arial 12")
C2.move(varTextLabel, 270,-30)
varTextTotal = C2.create_text(110, 110, text="795", font="Arial 30", fill="yellow")
C2.move(varTextTotal, 270,10)
varTextLabel2 = C2.create_text(110, 110, text="Operações", font="Arial 12")
C2.move(varTextLabel2, 270, 40)
C2.place(relx=0.500, rely=0.500, anchor='center', relwidth=0.800, relheight=0.780)
buscar_dados2()
#Formatando estilo de Tree view
#style = ttk.Style()
#style.configure('Treeview.Heading', font=('arial', 9), foreground='#344f84')
#Criando Treeview para visualização dos dados de OS Finalizados
C3 = Canvas(self.aba1, bg="white")
textTitleC3 = C3.create_text(0, 0, text="Ranking de Operadores", font="Arial 10")
C3.move(textTitleC3, 80,20)
line1 = C3.create_rectangle(300, 5, 00, 00, fill='#f65d66', outline='#f65d66', width=2)
x1 = 0
y1 = 50
C3.move(line1, x1,y1)
tx = int(C3.coords(2)[2])
textLabelC3 = C3.create_text(0, 0, text="100%", font="Arial 10")
C3.move(textLabelC3, tx+x1+20,y1)
line2 = C3.create_rectangle(300, 5, 00, 00, fill='#f65d66', outline='#f65d66', width=2)
C3.move(line2, 0,80)
line3 = C3.create_rectangle(300, 5, 00, 00, fill='#f65d66', outline='#f65d66', width=2)
C3.move(line3, 0,110)
line4 = C3.create_rectangle(300, 5, 00, 00, fill='#f65d66', outline='#f65d66', width=2)
C3.move(line4, 0,140)
line5 = C3.create_rectangle(300, 5, 00, 00, fill='#f65d66', outline='#f65d66', width=2)
C3.move(line5, 0,170)
line6 = C3.create_rectangle(300, 5, 00, 00, fill='#f65d66', outline='#f65d66', width=2)
C3.move(line6, 0,200)
line7 = C3.create_rectangle(300, 5, 00, 00, fill='#f65d66', outline='#f65d66', width=2)
C3.move(line7, 0,230)
lineDiv = C3.create_line(0, 150, 0, 0, width=2, fill='#e6e6e6')
C3.move(lineDiv, 370,70)
textNome1 = C3.create_text(0, 0, text="Marcos", font="Arial 10")
C3.move(textNome1,450,50)
C3.place(relx=0.250, rely=0.700, anchor='center', relwidth=0.400, relheight=0.350)
def aba_funcionarios(self):
global image
#Frame 1 que exibirá os funcionários
self.frameFuncionarios = Frame(self.aba2, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
self.frameFuncionarios.place(relx=0.040, rely=0.040, relwidth=0.650, relheight=0.930)
#Imagem Logo da Empresa
image = PhotoImage(file='image/logo-multimoldes.png')
logo = Label(self.frameFuncionarios, image=image, bg='white')
logo.pack()
lb1 = Label(self.frameFuncionarios, text='Total Funci', font=('arial black', 11, 'bold'), fg='#4c78ff', bg='white')
lb1.place(relx=0.010, rely=0.010)
self.lbFunc = Label(self.frameFuncionarios, text='0', font=('arial black', 11, 'bold'), fg='green', bg='white')
self.lbFunc.place(relx=0.140, rely=0.010)
valido = 0
if self.bancoConnect:
self.atualiza_valores_funcionario()
#Tree view onde exibirá os funcionários
self.viewFuncionarios = ttk.Treeview(self.frameFuncionarios, column=('1','2','3','4'), show='headings')
self.viewFuncionarios.heading('1', text='ID')
self.viewFuncionarios.heading('2', text='Nome')
self.viewFuncionarios.heading('3', text='Cargo')
self.viewFuncionarios.heading('4', text='CPF')
self.viewFuncionarios.column("1", width=-90, anchor='n')
self.viewFuncionarios.column("2", width=120, anchor='n')
self.viewFuncionarios.column("3", width=30, anchor='n')
self.viewFuncionarios.column("4", width=30, anchor='n')
self.viewFuncionarios.place(relx=0.0, rely=0.300, relwidth=0.961, relheight=0.700)
self.viewFuncionarios.bind('<Double -1>', self.exibir_perfil_funcionarios)
self.viewFuncionarios.bind('<Return>', self.exibir_perfil_funcionarios)
self.viewFuncionarios.bind("<Escape>", lambda event: self.remover_focus(event, self.viewFuncionarios))
scrollbar = Scrollbar(self.frameFuncionarios, orient="vertical", command=self.viewFuncionarios)
self.viewFuncionarios.configure(yscrollcommand=scrollbar.set)
scrollbar.place(relx=0.960, rely=0.300, relwidth=0.040, relheight=0.700)
#Chamando função para exibir os funcionários cadastrados
self.exibir_funcionarios('funcionarios')
#Frame 2 que mostrará os dados dos funcionários
self.framePerfil = Frame(self.aba2, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
self.framePerfil.place(relx=0.750, rely=0.040, relwidth=0.200, relheight=0.930)
self.imgPerfil = PhotoImage(file='image/sem_perfil.png')
self.lbPerfil = Label(self.framePerfil, image=self.imgPerfil, bg='white', relief=FLAT)
self.lbPerfil.place(relx=0.320, rely=0.040)
global imgEditar
imgEditar = PhotoImage(file='image/editar.png')
buttonEditar = Button(self.framePerfil, image=imgEditar, bg='white', relief=FLAT, border=0, activebackground='white', command=self.editar_perfil_funcionario)
buttonEditar.place(relx=0.040, rely=0.070)
global imgExcluir
imgExcluir = PhotoImage(file='image/excluir.png')
buttonExcluir = Button(self.framePerfil, image=imgExcluir, bg='white', relief=FLAT, border=0, activebackground='white', command=self.deletar_perfil_funcionario)
buttonExcluir.place(relx=0.040, rely=0.135)
global imgLimpar
imgLimpar = PhotoImage(file='image/limpar2.png')
buttonLimpar = Button(self.framePerfil, image=imgLimpar, bg='white', relief=FLAT, border=0, activebackground='white', command=self.limpar_perfil_funcionario)
buttonLimpar.place(relx=0.040, rely=0.200)
#Label aviso
self.labelAviso = Label(self.framePerfil, text='''
Selecione um Perfil
para exibir
os seus Dados''', justify=CENTER, anchor='w', bg='white', font=('arial', 11))
self.labelAviso.place(relx=0.150, rely=0.400)
#Label dados pessoais
self.lNome = Label(self.framePerfil, font=('arial black', 10, 'bold'), justify=CENTER, fg='#4c78ff', bg='white')
self.lIdade = Label(self.framePerfil, font=('arial black', 10, 'bold'), justify=CENTER, fg='#4c78ff', bg='white')
self.lFone = Label(self.framePerfil, font=('arial black', 10, 'bold'), justify=CENTER, fg='#4c78ff', bg='white')
#Label totalizando OS's Concluídas e Pendentes
self.l_OS_Con = Label(self.framePerfil, text='OS Concluídas', font=('arial black', 9), fg='#4c78ff', bg='white')
self.l_OS_Pen = Label(self.framePerfil, text='OS Pendentes', font=('arial black', 9), fg='#4c78ff', bg='white')
self.l_OS_Dados1 = Label(self.framePerfil, font=('arial black', 9), fg='green', bg='white')
self.l_OS_Dados2 = Label(self.framePerfil, font=('arial black', 9), fg='red', bg='white')
self.botVer = Button(self.framePerfil, text='Ver Habilidade', font=('arial black', 10), fg='white', bg='#4c78ff', border=1, relief=SOLID, command=self.exibir_habilidade)
def aba_cadastro(self):
#Frame de Login dos registros de conta do usuário
self.frameDadosLogin = Frame(self.aba3, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
self.frameDadosLogin.place(relx=0.100, rely=0.150, relwidth=0.800, relheight=0.270)
#Label de inserção de foto do Funcionário
self.imgSemPerfil = PhotoImage(file='image/sem_perfil.png')
self.foto = Label(self.frameDadosLogin, image=self.imgSemPerfil, width=150, height=150, bg='white')
self.foto.place(relx=0.830, rely=0.060)
self.arquivoIMG = 'image/sem_perfil.png'
def selecionar_imagem():
#Abrindo arquivo de imagem para foto de perfil
self.caminhoBackup = self.arquivoIMG
self.arquivoIMG = filedialog.askopenfilename(title='Selecione imagem de perfil', filetypes=(('Imagem PNG', '*.png'), ('All files', '*.*')))
#Se for zero nenhuma foto foi selecionada
if len(self.arquivoIMG) == 0:
#Tentando verificar se existe cookie de imagem
try:
self.foto['image'] = self.cookie
self.arquivoIMG = self.caminhoBackup
except: pass
return ''
#Armazenando a foto selecionada.
self.imgSelecionada = PhotoImage(file=self.arquivoIMG)
#Verificando se imagem não excede o comprimento máximo permitido
if self.imgSelecionada.width() > 150 or self.imgSelecionada.height() > 150:
return messagebox.showinfo('Tamanho não permitido', 'A imagem selecionada possui comprimento grande demais')
self.cookie = self.imgSelecionada
#Configurando Labels parar exibir imagem de selecionar e botão de editar
self.foto['image'] = self.imgSelecionada
self.imgAdd = PhotoImage(file='image/lapis.png')
self.add['image'] = self.imgAdd
self.add.place(relx=0.955, rely=0.700)
self.imgAdd = PhotoImage(file='image/abrir.png')
self.add = Button(self.frameDadosLogin, image=self.imgAdd, bg='white', relief=FLAT, activebackground='white', border=0, command=selecionar_imagem)
self.add.place(relx=0.890, rely=0.580)
#labels referente aos campos de login
self.lbDados = Label(self.frameDadosLogin, text='Dados', font=('arial black', 14), bg='white', fg='#4c78ff')
self.lbDados.place(relx=0.010, rely=0.010)
self.lbNome = Label(self.frameDadosLogin, text='Nome', font=('arial',12, 'bold'), bg='white', fg='#4c78ff')
self.lbCPF = Label(self.frameDadosLogin, text='CPF', font=('arial',12, 'bold'), bg='white', fg='#4c78ff')
self.lbFuncao = Label(self.frameDadosLogin, text='Função', font=('arial',12, 'bold'), bg='white', fg='#4c78ff')
self.lbFone = Label(self.frameDadosLogin, text='Fone', font=('arial',12, 'bold'), bg='white', fg='#4c78ff')
self.lbNasc = Label(self.frameDadosLogin, text='Data de Nasc.', font=('arial',12, 'bold'), bg='white', fg='#4c78ff')
self.lbSenha = Label(self.frameDadosLogin, text='Senha', font=('arial',12, 'bold'), bg='white', fg='#4c78ff')
self.lbSenhaErro = Label(self.frameDadosLogin, text='', font=('arial', 10, 'bold'), fg='red', bg='white')
self.lbConfirmaSenha = Label(self.frameDadosLogin, text='Confirme Senha', font=('arial',12, 'bold'), bg='white', fg='#4c78ff')
self.lbConfirmaSenhaErro = Label(self.frameDadosLogin, text='', font=('arial', 10, 'bold'), fg='red', bg='white')
self.lbNome.place(relx=0.020, rely=0.220)
self.lbCPF.place(relx=0.470, rely=0.220)
self.lbFuncao.place(relx=0.020, rely=0.450)
self.lbFone.place(relx=0.300, rely=0.450)
self.lbNasc.place(relx=0.570, rely=0.450)
self.lbSenha.place(relx=0.020, rely=0.700)
self.lbSenhaErro.place(relx=0.110, rely=0.750)
self.lbConfirmaSenha.place(relx=0.300, rely=0.700)
self.lbConfirmaSenhaErro.place(relx=0.440, rely=0.750)
#Função que impedirá que o usuário digite valores diferentes do que o campos propõe
def verifica_campo(*args):
value = strNome.get()
if len(value) > 0:
if value[-1].isnumeric():
strNome.set(value[:-1])
else:
strNome.set(value[:50])
value2 = nCPF.get()
if len(value2) > 0:
if not value2[-1].isnumeric():
nCPF.set(value2[:-1])
else:
nCPF.set(value2[:11])
value3 = nSenha.get()
if len(value3) > 0:
if not value3[-1].isnumeric():
nSenha.set(value3[:-1])
else:
nSenha.set(value3[0:8])
if len(value3) >= 8:
self.campoConfirmaSenha.configure(state=NORMAL)
else:
self.campoConfirmaSenha.configure(state=DISABLED)
else:
self.lbConfirmaSenhaErro['text'] = ''
self.campoConfirmaSenha.configure(state=DISABLED)
value4 = nConfirmaSenha.get()
if len(value4) > 0:
if len(value4) == 8 and value4 != value3:
self.lbConfirmaSenhaErro['text'] = 'As senhas não coincidem'
elif len(value4) == 8 and value4 == value3:
self.lbConfirmaSenhaErro['text'] = ''
if not value4[-1].isnumeric():
nConfirmaSenha.set(value4[:-1])
else:
nConfirmaSenha.set(value4[:8])
else:
self.lbConfirmaSenhaErro['text'] = ''
def format_campo_fone(*args):
mask = nFone.get()
nFone.set(mask[:15])
#Se houver 2 dígitos no campo, e eles forem númericos...
if len(mask) == 2 and mask.isnumeric():
#Delete todo o campo
self.campoFone.delete(0, END)
#E acrescente parênteses com o valor obtido dentro
self.campoFone.insert(END,'('+mask+')')
#Se houver 9 dígitos no campo, e os últimos 4 forem númericos...
if len(mask) == 9 and mask[4:].isnumeric():
#Delete todo o campo
self.campoFone.delete(0, END)
#pegue os primeiros quatro dígitos e acrescente um espaço no final
a = mask[0:4]+' '
#com os primeiros dígitos acrescentado com o espaço, concatene com o resto
a = a+mask[4:]
#Devolvendo o valor ajustado para a variável principal
mask = a
#Insira no campo os números com a máscara pronta
self.campoFone.insert(END, mask+'-')
def format_campo_nasc(*args):
mask = nNasc.get()
nNasc.set(mask[:10])
#Se houver 2 dígitos no campo, e eles forem númericos...
if len(mask) == 2 and mask.isnumeric():
#Delete todo o campo
self.campoNasc.delete(0, END)
#E acrescente parênteses com o valor obtido dentro
self.campoNasc.insert(END, mask+'/')
elif len(mask) == 5 and mask[3:].isnumeric():
#Delete todo o campo
self.campoNasc.delete(0, END)
#E acrescente parênteses com o valor obtido dentro
self.campoNasc.insert(END, mask+'/')
#Variáveis que será utilizadas para verificação dos campos
strNome = StringVar()
strNome.trace('w', verifica_campo)
nCPF = StringVar()
nCPF.trace('w', verifica_campo)
nFone = StringVar()
nFone.trace('w', format_campo_fone)
nNasc = StringVar()
nNasc.trace('w', format_campo_nasc)
nSenha = StringVar()
nSenha.trace('w', verifica_campo)
nConfirmaSenha = StringVar()
nConfirmaSenha.trace('w', verifica_campo)
#Campos de preenchimento dos dados de login
self.campoNome = Entry(self.frameDadosLogin, font=('arial',12), textvariable=strNome, border=2, relief=GROOVE)
self.campoNome.focus_force()
self.campoCPF = Entry(self.frameDadosLogin, font=('arial',12), textvariable=nCPF, border=2, relief=GROOVE)
self.campoFuncao = ttk.Combobox(self.frameDadosLogin, font=('arial',12), state='readonly')
self.campoFuncao['values'] = ('Selecione', 'Frezador', 'Soldador', 'Torneiro', 'Caldereiro', 'Tec. Usinagem', 'Operador CNC', 'Operador/Programador CNC', 'Tec. em Mecânica', 'Desenhista', 'Eletrotécnica')
self.campoFuncao.current(0)
self.campoFone = Entry(self.frameDadosLogin, font=('arial',12), textvariable=nFone, border=2, relief=GROOVE)
self.campoNasc = Entry(self.frameDadosLogin, font=('arial',12), textvariable=nNasc, border=2, relief=GROOVE)
self.campoSenha = Entry(self.frameDadosLogin, font=('arial',12), show='*', textvariable=nSenha, border=2, relief=GROOVE)
self.campoConfirmaSenha = Entry(self.frameDadosLogin, font=('arial',12), show='*', textvariable=nConfirmaSenha,state=DISABLED, border=2, relief=GROOVE)
self.campoNome.place(relx=0.080, rely=0.220, relwidth=0.350)
self.campoCPF.place(relx=0.518, rely=0.220, relwidth=0.175)
self.campoFuncao.place(relx=0.080, rely=0.450)
self.campoFone.place(relx=0.350, rely=0.450)
self.campoNasc.place(relx=0.680, rely=0.450, relwidth=0.130)
self.campoSenha.place(relx=0.085, rely=0.700, relwidth=0.175)
self.campoConfirmaSenha.place(relx=0.430, rely=0.700, relwidth=0.175)
def mostrar_senha():
if self.senhaVisible == False:
self.campoSenha['show'] = ''
self.campoConfirmaSenha['show'] = ''
self.senhaVisible = True
self.cadeado = PhotoImage(file='image/cadeado_aberto.png')
self.check['image'] = self.cadeado
else:
self.campoSenha['show'] = '*'
self.campoConfirmaSenha['show'] = '*'
self.senhaVisible = False
self.cadeado = PhotoImage(file='image/cadeado.png')
self.check['image'] = self.cadeado
self.senhaVisible = False
self.cadeado = PhotoImage(file='image/cadeado.png')
self.check = Button(self.frameDadosLogin, image=self.cadeado, bg='white', activebackground='white', border=0, command=mostrar_senha)
self.check.place(relx=0.620, rely=0.680)
#Frame de atribuição das habilidades dos funcionários
self.frameAtribuicao = Frame(self.aba3, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
self.frameAtribuicao.place(relx=0.100, rely=0.450, relwidth=0.800, relheight=0.430)
#labels referente aos campos de Atribuição
self.lbAtribuicao = Label(self.frameAtribuicao, text='Competência ', font=('arial black', 14), bg='white', fg='#4c78ff')
self.lbAtribuicao.place(relx=0.010, rely=0.010)
self.lbAtribuicao1 = Label(self.frameAtribuicao, text='Desenhar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao2 = Label(self.frameAtribuicao, text='Revisar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao3 = Label(self.frameAtribuicao, text='Serrar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao4 = Label(self.frameAtribuicao, text='Furar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao5 = Label(self.frameAtribuicao, text='Estampar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao6 = Label(self.frameAtribuicao, text='Prensar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao7 = Label(self.frameAtribuicao, text='Rosquear', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao8 = Label(self.frameAtribuicao, text='Tornear 1º', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao9 = Label(self.frameAtribuicao, text='Tornear 2º', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao10 = Label(self.frameAtribuicao, text='Tornear Única', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao11 = Label(self.frameAtribuicao, text='Fresar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao12 = Label(self.frameAtribuicao, text='Retificar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao13 = Label(self.frameAtribuicao, text='Erosão Penetração', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao14 = Label(self.frameAtribuicao, text='Erosão Fio', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao15 = Label(self.frameAtribuicao, text='Tratamento Térmico', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao16 = Label(self.frameAtribuicao, text='Oxidação Negra', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao17 = Label(self.frameAtribuicao, text='Solda', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao18 = Label(self.frameAtribuicao, text='Solda Ponto', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao19 = Label(self.frameAtribuicao, text='Solda Indução', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao20 = Label(self.frameAtribuicao, text='Lixar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao21 = Label(self.frameAtribuicao, text='Esmerilhar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao22 = Label(self.frameAtribuicao, text='Jatear', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao23 = Label(self.frameAtribuicao, text='Polir', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao24 = Label(self.frameAtribuicao, text='Lavar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao25 = Label(self.frameAtribuicao, text='Embalar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao26 = Label(self.frameAtribuicao, text='Medir', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao27 = Label(self.frameAtribuicao, text='Rebarbar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao28 = Label(self.frameAtribuicao, text='Rosquear Manualmente', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao29 = Label(self.frameAtribuicao, text='Pintar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao30 = Label(self.frameAtribuicao, text='Montar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao31 = Label(self.frameAtribuicao, text='Escarear', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao32 = Label(self.frameAtribuicao, text='Afiar Ferramenta', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao33 = Label(self.frameAtribuicao, text='Dobrar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao34 = Label(self.frameAtribuicao, text='Chanfrar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao35 = Label(self.frameAtribuicao, text='Soldar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao36 = Label(self.frameAtribuicao, text='Cortar c/lixadeira', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao37 = Label(self.frameAtribuicao, text='Cortar c/maçarico', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao38 = Label(self.frameAtribuicao, text='Aquecer com Maçarico', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao39 = Label(self.frameAtribuicao, text='Temperar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao40 = Label(self.frameAtribuicao, text='Revenir', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao41 = Label(self.frameAtribuicao, text='Desempenar', font=('arial', 10, 'bold'), bg='white', fg='#4c78ff')
self.lbAtribuicao1.place(relx=0.020, rely=0.130, relwidth=0.160)
self.lbAtribuicao2.place(relx=0.260, rely=0.130, relwidth=0.160)
self.lbAtribuicao3.place(relx=0.510, rely=0.130, relwidth=0.160)
self.lbAtribuicao4.place(relx=0.760, rely=0.130, relwidth=0.160)
self.lbAtribuicao5.place(relx=0.020, rely=0.210, relwidth=0.160)
self.lbAtribuicao6.place(relx=0.260, rely=0.210, relwidth=0.160)
self.lbAtribuicao7.place(relx=0.510, rely=0.210, relwidth=0.160)
self.lbAtribuicao8.place(relx=0.760, rely=0.210, relwidth=0.160)
self.lbAtribuicao9.place(relx=0.020, rely=0.290, relwidth=0.160)
self.lbAtribuicao10.place(relx=0.260, rely=0.290, relwidth=0.160)
self.lbAtribuicao11.place(relx=0.510, rely=0.290, relwidth=0.160)
self.lbAtribuicao12.place(relx=0.760, rely=0.290, relwidth=0.160)
self.lbAtribuicao13.place(relx=0.020, rely=0.370, relwidth=0.160)
self.lbAtribuicao14.place(relx=0.260, rely=0.370, relwidth=0.160)
self.lbAtribuicao15.place(relx=0.510, rely=0.370, relwidth=0.160)
self.lbAtribuicao16.place(relx=0.760, rely=0.370, relwidth=0.160)
self.lbAtribuicao17.place(relx=0.020, rely=0.450, relwidth=0.160)
self.lbAtribuicao18.place(relx=0.260, rely=0.450, relwidth=0.160)
self.lbAtribuicao19.place(relx=0.510, rely=0.450, relwidth=0.160)
self.lbAtribuicao20.place(relx=0.760, rely=0.450, relwidth=0.160)
self.lbAtribuicao21.place(relx=0.020, rely=0.530, relwidth=0.160)
self.lbAtribuicao22.place(relx=0.260, rely=0.530, relwidth=0.160)
self.lbAtribuicao23.place(relx=0.510, rely=0.530, relwidth=0.160)
self.lbAtribuicao24.place(relx=0.760, rely=0.530, relwidth=0.160)
self.lbAtribuicao25.place(relx=0.020, rely=0.610, relwidth=0.160)
self.lbAtribuicao26.place(relx=0.260, rely=0.610, relwidth=0.160)
self.lbAtribuicao27.place(relx=0.510, rely=0.610, relwidth=0.160)
self.lbAtribuicao28.place(relx=0.760, rely=0.610, relwidth=0.160)
self.lbAtribuicao29.place(relx=0.020, rely=0.690, relwidth=0.160)
self.lbAtribuicao30.place(relx=0.260, rely=0.690, relwidth=0.160)
self.lbAtribuicao31.place(relx=0.510, rely=0.690, relwidth=0.160)
self.lbAtribuicao32.place(relx=0.760, rely=0.690, relwidth=0.160)
self.lbAtribuicao33.place(relx=0.020, rely=0.770, relwidth=0.160)
self.lbAtribuicao34.place(relx=0.260, rely=0.770, relwidth=0.160)
self.lbAtribuicao35.place(relx=0.510, rely=0.770, relwidth=0.160)
self.lbAtribuicao36.place(relx=0.760, rely=0.770, relwidth=0.160)
self.lbAtribuicao37.place(relx=0.020, rely=0.850, relwidth=0.160)
self.lbAtribuicao38.place(relx=0.260, rely=0.850, relwidth=0.160)
self.lbAtribuicao39.place(relx=0.510, rely=0.850, relwidth=0.160)
self.lbAtribuicao40.place(relx=0.760, rely=0.850, relwidth=0.160)
self.lbAtribuicao41.place(relx=0.020, rely=0.930, relwidth=0.160)
#Boxes de seleção para o nível de habilidades do usuário em cada operação
self.box1 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box2 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box3 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box4 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box5 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box6 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box7 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box8 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box9 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box10 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box11 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box12 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box13 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box14 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box15 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box16 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box17 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box18 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box19 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box20 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box21 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box22 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box23 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box24 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box25 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box26 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box27 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box28 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box29 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box30 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box31 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box32 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box33 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box34 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box35 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box36 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box37 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box38 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box39 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box40 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box41 = Spinbox(self.frameAtribuicao, from_=0, to=4, font=('arial',10), relief=GROOVE, border=2)
self.box1.place(relx=0.190, rely=0.130, relwidth=0.06)
self.box2.place(relx=0.430, rely=0.130, relwidth=0.06)
self.box3.place(relx=0.680, rely=0.130, relwidth=0.06)
self.box4.place(relx=0.930, rely=0.130, relwidth=0.06)
self.box5.place(relx=0.190, rely=0.210, relwidth=0.06)
self.box6.place(relx=0.430, rely=0.210, relwidth=0.06)
self.box7.place(relx=0.680, rely=0.210, relwidth=0.06)
self.box8.place(relx=0.930, rely=0.210, relwidth=0.06)
self.box9.place(relx=0.190, rely=0.290, relwidth=0.06)
self.box10.place(relx=0.430, rely=0.290, relwidth=0.06)
self.box11.place(relx=0.680, rely=0.290, relwidth=0.06)
self.box12.place(relx=0.930, rely=0.290, relwidth=0.06)
self.box13.place(relx=0.190, rely=0.370, relwidth=0.06)
self.box14.place(relx=0.430, rely=0.370, relwidth=0.06)
self.box15.place(relx=0.680, rely=0.370, relwidth=0.06)
self.box16.place(relx=0.930, rely=0.370, relwidth=0.06)
self.box17.place(relx=0.190, rely=0.450, relwidth=0.06)
self.box18.place(relx=0.430, rely=0.450, relwidth=0.06)
self.box19.place(relx=0.680, rely=0.450, relwidth=0.06)
self.box20.place(relx=0.930, rely=0.450, relwidth=0.06)
self.box21.place(relx=0.190, rely=0.530, relwidth=0.06)
self.box22.place(relx=0.430, rely=0.530, relwidth=0.06)
self.box23.place(relx=0.680, rely=0.530, relwidth=0.06)
self.box24.place(relx=0.930, rely=0.530, relwidth=0.06)
self.box25.place(relx=0.190, rely=0.610, relwidth=0.06)
self.box26.place(relx=0.430, rely=0.610, relwidth=0.06)
self.box27.place(relx=0.680, rely=0.610, relwidth=0.06)
self.box28.place(relx=0.930, rely=0.610, relwidth=0.06)
self.box29.place(relx=0.190, rely=0.690, relwidth=0.06)
self.box30.place(relx=0.430, rely=0.690, relwidth=0.06)
self.box31.place(relx=0.680, rely=0.690, relwidth=0.06)
self.box32.place(relx=0.930, rely=0.690, relwidth=0.06)
self.box33.place(relx=0.190, rely=0.770, relwidth=0.06)
self.box34.place(relx=0.430, rely=0.770, relwidth=0.06)
self.box35.place(relx=0.680, rely=0.770, relwidth=0.06)
self.box36.place(relx=0.930, rely=0.770, relwidth=0.06)
self.box37.place(relx=0.190, rely=0.850, relwidth=0.06)
self.box38.place(relx=0.430, rely=0.850, relwidth=0.06)
self.box39.place(relx=0.680, rely=0.850, relwidth=0.06)
self.box40.place(relx=0.930, rely=0.850, relwidth=0.06)
self.box41.place(relx=0.190, rely=0.930, relwidth=0.06)
#Botão que confirmará os dados quando solicitado
self.imgConfirmar = PhotoImage(file='image/confirmar.png')
self.botaoConfirmar = Button(self.aba3, image=self.imgConfirmar, border=0, bg='white', activebackground='white', command=lambda:self.verificar_campos_cadastro('cadastrar'))
self.botaoConfirmar.place(relx=0.82, rely=0.90)
def aba_ordem_de_servico(self):
global img
img = PhotoImage(file='image/nova.png')
botAltribuirOS = Button(self.framePri, text='Novo O.S', image=img, compound=TOP, font=('arial', 9), bg='white', fg='black', relief=SOLID, border=0, command=self.criar_ordem_de_servico)
botAltribuirOS.place(relx=0.070, rely=0.040)
global img2
img2 = PhotoImage(file='image/alterar2.png')
botEditarOS = Button(self.framePri, text='Editar', image=img2, compound=TOP, font=('arial', 9), bg='white', fg='black', relief=SOLID, border=0)
botEditarOS.place(relx=0.160, rely=0.040)
global img3
img3 = PhotoImage(file='image/ferramenta.png')
botAlterarOS = Button(self.framePri, text='Cad. Peça', image=img3, compound=TOP, font=('arial', 9), bg='white', fg='black', relief=SOLID, border=0, command=self.janela_exibir_pecas)
botAlterarOS.place(relx=0.230, rely=0.040)
self.frameBuscar = Frame(self.framePri, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
self.frameBuscar.place(relx=0.060, rely=0.190, relwidth=0.880, relheight=0.180)
#Label e Combobox para escolher qual tipo de pesquisa será feita
labelPesquisar = Label(self.frameBuscar, text='Buscar por', font=('arial', 10), bg='white', fg='#344f84')
labelPesquisar.place(relx=0.060, rely=0.250)
self.boxPesquisar = ttk.Combobox(self.frameBuscar, font=('arial',10), state='readonly')
self.boxPesquisar['values'] = ('O.S', 'Cliente')
self.boxPesquisar.place(relx=0.130, rely=0.250, relwidth=0.070)
self.boxPesquisar.current(0)
#Barra de busca e botão para pesquisar
self.strCampoBusca = StringVar()
self.strCampoBusca.trace('w', self.buscar)
self.campoBusca = Entry(self.frameBuscar, font=('arial', 12), border=2, relief=GROOVE)
self.campoBusca.place(relx=0.050, rely=0.505, relwidth=0.140)
self.campoBusca.bind("<Return>", self.buscar)
self.campoBusca.focus_force()
self.imgLupa = PhotoImage(file="image/lupa.png")
botBuscar = Button(self.frameBuscar, image=self.imgLupa, bg='white', border=0, activebackground='white')
botBuscar.place(relx=0.190, rely=0.502)
#Filtrar campo de tempo
labelDateDe = Label(self.frameBuscar, text='Data de', font=('arial', 10), bg='white', fg='#344f84')
labelDateDe.place(relx=0.460, rely=0.250)
dia = Entry(self.frameBuscar, font=('arial',10), width=3, border=2, relief=GROOVE)
dia.place(relx=0.470, rely=0.505)
barra = Label(self.frameBuscar, text='/', font=('arial',10), bg='white', fg='#344f84')
barra.place(relx=0.497, rely=0.505)
mes = Entry(self.frameBuscar, font=('arial',10), width=3, border=2, relief=GROOVE)
mes.place(relx=0.510, rely=0.505)
barra = Label(self.frameBuscar, text='/', font=('arial',10), bg='white', fg='#344f84')
barra.place(relx=0.537, rely=0.505)
ano = Entry(self.frameBuscar, font=('arial',10), width=5, border=2, relief=GROOVE)
ano.place(relx=0.547, rely=0.505)
labelDateDe = Label(self.frameBuscar, text='Até', font=('arial', 10), bg='white', fg='#344f84')
labelDateDe.place(relx=0.660, rely=0.250)
dia2 = Entry(self.frameBuscar, font=('arial',10), width=3, border=2, relief=GROOVE)
dia2.place(relx=0.670, rely=0.505)
barra = Label(self.frameBuscar, text='/', font=('arial',10), bg='white', fg='#344f84')
barra.place(relx=0.697, rely=0.505)
mes2 = Entry(self.frameBuscar, font=('arial',10), width=3, border=2, relief=GROOVE)
mes2.place(relx=0.710, rely=0.505)
barra = Label(self.frameBuscar, text='/', font=('arial',10), bg='white', fg='#344f84')
barra.place(relx=0.737, rely=0.505)
ano2 = Entry(self.frameBuscar, font=('arial',10), width=5, border=2, relief=GROOVE)
ano2.place(relx=0.747, rely=0.505)
#Frame onde ficará contagens de Retrabalhos do dia
self.frameTotUrgente = Frame(self.framePri, highlightbackground='white', highlightthickness=2, bg='white')
self.frameTotUrgente.place(relx=0.550, rely=0.020, relwidth=0.120, relheight=0.150)
labelTitle1 = Label(self.frameTotUrgente, text='Urgente', bg='white', fg='red', font=('arial', 13, 'bold'))
labelTitle1.place(relx=0.5, rely=0.150, relheight=0.210, anchor='center')
lbOSUrgente = Label(self.frameTotUrgente, bg='white', fg='red', font=('arial', 25, 'bold'))
lbOSUrgente.place(relx=0.5, rely=0.5, anchor="center")
labelHoje = Label(self.frameTotUrgente, text='Total', bg='white', fg='red', font=('arial', 10, 'bold'))
labelHoje.place(relx=0.5, rely=0.800, anchor='center')
#Criando balão de mensagem
balao_info = tix.Balloon(self.aba4, bg='#e6f9ff')
balao_info.bind_widget(self.frameTotUrgente, balloonmsg='Número de O.S aberta com urgência.')
#Configurando cores de fundo do balão
balao_info.subwidgets_all()[1].config(bg='#e6f9ff')
balao_info.subwidgets_all()[2].config(bg='#e6f9ff')
balao_info.subwidgets_all()[3].config(bg='#e6f9ff')
balao_info.subwidgets_all()[4].config(bg='#e6f9ff')
#Removendo seta padrão do balão de mensagem
balao_info.subwidget('label')['image'] = BitmapImage()
self.dataHoje = datetime.now().date()
try:
self.cursor.execute("select count(*) from ordem_servico where TipoOS = 'Urgente' and Estado = 'Aberto'")
lbOSUrgente['text'] = self.cursor.fetchall()[0][0]
except:
pass
#Frame onde ficará contagens de Operações do dia
self.frameTotAtrasada = Frame(self.framePri, highlightbackground='white', highlightthickness=2, bg='white')
self.frameTotAtrasada.place(relx=0.700, rely=0.020, relwidth=0.120, relheight=0.150)
labelTitle1 = Label(self.frameTotAtrasada, text='Atrasadas', bg='white', fg='yellow', font=('arial', 13, 'bold'))
labelTitle1.place(relx=0.5, rely=0.150, relheight=0.210, anchor='center')
lbOSAtrada = Label(self.frameTotAtrasada, bg='white', fg='yellow', font=('arial', 25, 'bold'))
lbOSAtrada.place(relx=0.5, rely=0.5, anchor="center")
labelHoje = Label(self.frameTotAtrasada, text='Total', bg='white', fg='yellow', font=('arial', 10, 'bold'))
labelHoje.place(relx=0.5, rely=0.800, anchor='center')
#Criando balão de mensagem
balao_info = tix.Balloon(self.aba4, bg='#e6f9ff')
balao_info.bind_widget(self.frameTotAtrasada, balloonmsg='Número de O.S aberta com atrasos.')
#Configurando cores de fundo do balão
balao_info.subwidgets_all()[1].config(bg='#e6f9ff')
balao_info.subwidgets_all()[2].config(bg='#e6f9ff')
balao_info.subwidgets_all()[3].config(bg='#e6f9ff')
balao_info.subwidgets_all()[4].config(bg='#e6f9ff')
#Removendo seta padrão do balão de mensagem
balao_info.subwidget('label')['image'] = BitmapImage()
try:
self.cursor.execute("select count(*) from ordem_servico where DataPrevista < '"+str(self.dataHoje)+"' and Estado = 'Aberto'")
lbOSAtrada['text'] = self.cursor.fetchall()[0][0]
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
#Frame onde ficará contagens de OS
self.frameTotPrazo = Frame(self.framePri, highlightbackground='white', highlightthickness=2, bg='white')
self.frameTotPrazo.place(relx=0.840, rely=0.020, relwidth=0.120, relheight=0.150)
labelTitle1 = Label(self.frameTotPrazo, text='No Prazo', bg='white', fg='#32cd32', font=('arial', 13, 'bold'))
labelTitle1.place(relx=0.5, rely=0.150, relheight=0.210, anchor='center')
lbOSPrazo = Label(self.frameTotPrazo, bg='white', fg='#32cd32', font=('arial', 25, 'bold'))
lbOSPrazo.place(relx=0.5, rely=0.5, anchor="center")
labelTotal = Label(self.frameTotPrazo, text='Total', bg='white', fg='#32cd32', font=('arial', 10, 'bold'))
labelTotal.place(relx=0.5, rely=0.800, anchor='center')
#Criando balão de mensagem
balao_info = tix.Balloon(self.aba4, bg='#e6f9ff')
balao_info.bind_widget(self.frameTotPrazo, balloonmsg='Número de O.S aberta dentro da data estipulada.')
#Configurando cores de fundo do balão
balao_info.subwidgets_all()[1].config(bg='#e6f9ff')
balao_info.subwidgets_all()[2].config(bg='#e6f9ff')
balao_info.subwidgets_all()[3].config(bg='#e6f9ff')
balao_info.subwidgets_all()[4].config(bg='#e6f9ff')
#Removendo seta padrão do balão de mensagem
balao_info.subwidget('label')['image'] = BitmapImage()
try:
#Consultando no banco de dados as O.S Pausada de modo distintas
self.cursor.execute("select count(*) from ordem_servico where DataPrevista >= '"+str(self.dataHoje)+"' and Estado = 'Aberto'")
lbOSPrazo['text'] = self.cursor.fetchall()[0][0]
except: pass
labelTitle1 = Label(self.framePri, text='Ordem de Serviço', bg='white', fg='grey', font=('arial', 13))
labelTitle1.place(relx=0.060, rely=0.403)
#Frame onde ficará os resultado das buscas feitas ao Banco de Dados
self.frameDados = Frame(self.framePri, highlightbackground='#e6e6e6', highlightcolor='white', highlightthickness=1, bg='white')
self.frameDados.place(relx=0.060, rely=0.450, relwidth=0.880, relheight=0.530)
#Posiçãao estática para classificação das colunas
self.sinal = 0
#Tree view onde exibirá as Ordem de Serviços
self.OrdemServico = ttk.Treeview(self.frameDados, column=('1','2','3','4','5','6','7','8'), show='headings')
'''
self.OrdemServico.heading('1', text='ID')
self.OrdemServico.heading('2', text='Ordem de Serviço')
self.OrdemServico.heading('3', text='Data de Abertura')
self.OrdemServico.heading('4', text='Nº de Peça')
self.OrdemServico.heading('5', text='Operações Realizada')
self.OrdemServico.heading('6', text='Retrabalhos')
self.OrdemServico.heading('7', text='Total de Horas')
'''
self.OrdemServico.heading('1', text='ID')
self.OrdemServico.heading('2', text='OS')
self.OrdemServico.heading('3', text='Cliente')
self.OrdemServico.heading('4', text='Produto')
self.OrdemServico.heading('5', text='QTDE')
self.OrdemServico.heading('6', text='Tipo/OS')
self.OrdemServico.heading('7', text='Data Prevista')
self.OrdemServico.heading('8', text='Situação')
self.OrdemServico.column("1", width=1, anchor='n')
self.OrdemServico.column("2", width=100, anchor='n')
self.OrdemServico.column("3", width=200, anchor='n')
self.OrdemServico.column("4", width=200, anchor='n')
self.OrdemServico.column("5", width=50, anchor='n')
self.OrdemServico.column("6", width=100, anchor='n')
self.OrdemServico.column("7", width=100, anchor='n')
self.OrdemServico.column("8", width=100, anchor='n')
self.OrdemServico.place(relx=0, rely=0, relwidth=0.975, relheight=0.999)
self.OrdemServico.bind("<Double-1>", self.exibir_toplevel_inicio)
self.OrdemServico.bind("<Return>", self.exibir_toplevel_inicio)
self.OrdemServico.bind("<Escape>", lambda event: self.remover_focus(event, self.OrdemServico))
scrollbar = Scrollbar(self.frameDados, orient="vertical", command=self.OrdemServico.yview)
self.OrdemServico.configure(yscrollcommand=scrollbar.set)
scrollbar.place(relx=0.975, rely=0, relwidth=0.025, relheight=0.999)
self.cursor.execute("select a.id, a.OS, a.Cliente, a.Produto, a.QTDE, a.TipoOS, a.DataPrevista, a.Estado from ordem_servico as a")
osBuscada = self.cursor.fetchall()
for os in osBuscada:
self.OrdemServico.insert("", "end", values=(os[0], os[1], os[2], os[3], os[4], os[5], os[6], os[7]))
'''
try:
#Consultando no banco de dados as O.S finalizadas de modo distintas
self.cursor.execute("select OS, id, DataAberta, Peca from ordem_processo group by OS order by id asc")
osDistintas = self.cursor.fetchall()
#Para cada linha de O.S selecionada, irá armazenar as colunas nas segintes variáveis
for os in osDistintas:
self.cursor.execute("select id, DataInicial from concluidas where OS ="+str(os[0])+" limit 1")
linha = self.cursor.fetchall()
self.cursor.execute("select count(*) from concluidas where OS ="+str(os[0]))
contagemOperacoes = self.cursor.fetchall()
self.cursor.execute("select count(*) from concluidas where Tipo = 'Retrabalhar OS' and OS = "+str(os[0]))
contagemRetrabalho = self.cursor.fetchall()
self.cursor.execute("select count(Peca) from ordem_processo where OS = '"+str(os[0])+"' and Peca = '"+str(os[3])+"' group by Peca order by id asc")
nPecas = self.cursor.fetchall()
#extraindo do banco de dados as informações e armazenando nas variáveis
ID = os[1]
if osDistintas == []:
dataAberta = 'Não Disponível'
nPecas = 'Não Disponível'
else:
dataAberta = os[2].strftime('%d/%m/%Y às %H:%M:%S')
#Consulta SQL a ser feita por parametro
SQL = ("select TempGasto, TempGastoExt from concluidas where OS = "+str(os[0]))
horaTotal = self.somar_total_horas_gastas_os(SQL, 2)
#Adicionando as colunas da respectiva O.S na Treeview
self.OrdemServico.insert("", "end", values=(ID, os[0], dataAberta, nPecas, contagemOperacoes, contagemRetrabalho, horaTotal))
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
'''
def exibir_habilidade(self):
self.janelaHabilidade = Toplevel()
self.centraliza_tela(900, 500, self.janelaHabilidade)
selecionada = self.viewFuncionarios.selection()[0]
x = self.viewFuncionarios.item(selecionada, "values")
idd = x[0]
frame = Frame(self.janelaHabilidade, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=1, bg='white')
frame.place(relx=0.050, rely=0.200, relwidth=0.900, relheight=0.650)
linhaCabecario = Label(frame, bg='black', fg='white', text='Niveis Funcionário', font=('arial',12,'bold'))
linhaCabecario.place(relx=0, rely=0, relwidth=1, relheight=0.080)
lbHabilidade1 = Label(frame, text='Desenhar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade2 = Label(frame, text='Revisar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade3 = Label(frame, text='Serrar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade4 = Label(frame, text='Furar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade5 = Label(frame, text='Estampar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade6 = Label(frame, text='Prensar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade7 = Label(frame, text='Rosquear', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade8 = Label(frame, text='Tornear 1º', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade9 = Label(frame, text='Tornear 2º', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade10 = Label(frame, text='Tornear Única', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade11 = Label(frame, text='Fresar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade12 = Label(frame, text='Retificar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade13 = Label(frame, text='Erosão Penetração', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade14 = Label(frame, text='Erosão Fio', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade15 = Label(frame, text='Tratamento Térmico', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade16 = Label(frame, text='Oxidação Negra', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade17 = Label(frame, text='Solda', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade18 = Label(frame, text='Tornear Ponto', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade19 = Label(frame, text='Solda Indução', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade20 = Label(frame, text='Lixar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade21 = Label(frame, text='Esmerilhar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade22 = Label(frame, text='Jatear', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade23 = Label(frame, text='Polir', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade24 = Label(frame, text='Lavar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade25 = Label(frame, text='Embalar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade26 = Label(frame, text='Medir', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade27 = Label(frame, text='Rebarbar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade28 = Label(frame, text='Rosquear Manualmente', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade29 = Label(frame, text='Pintar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade30 = Label(frame, text='Montar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade31 = Label(frame, text='Escarear', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade32 = Label(frame, text='Afiar Ferramenta', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade33 = Label(frame, text='Dobrar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade34 = Label(frame, text='Chanfrar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade35 = Label(frame, text='Soldar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade36 = Label(frame, text='Cortar c/lixadeira', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade37 = Label(frame, text='Cortar c/maçarico', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade38 = Label(frame, text='Aquecer com Maçarico', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade39 = Label(frame, text='Temperar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade40 = Label(frame, text='Revenir', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade41 = Label(frame, text='Desempenar', font=('arial', 10, 'bold'), bg='white', fg='black')
lbHabilidade1.place(relx=0.020, rely=0.130, relwidth=0.180)
lbHabilidade2.place(relx=0.260, rely=0.130, relwidth=0.180)
lbHabilidade3.place(relx=0.510, rely=0.130, relwidth=0.180)
lbHabilidade4.place(relx=0.760, rely=0.130, relwidth=0.180)
lbHabilidade5.place(relx=0.020, rely=0.210, relwidth=0.180)
lbHabilidade6.place(relx=0.260, rely=0.210, relwidth=0.180)
lbHabilidade7.place(relx=0.510, rely=0.210, relwidth=0.180)
lbHabilidade8.place(relx=0.760, rely=0.210, relwidth=0.180)
lbHabilidade9.place(relx=0.020, rely=0.290, relwidth=0.180)
lbHabilidade10.place(relx=0.260, rely=0.290, relwidth=0.180)
lbHabilidade11.place(relx=0.510, rely=0.290, relwidth=0.180)
lbHabilidade12.place(relx=0.760, rely=0.290, relwidth=0.180)
lbHabilidade13.place(relx=0.020, rely=0.370, relwidth=0.180)
lbHabilidade14.place(relx=0.260, rely=0.370, relwidth=0.180)
lbHabilidade15.place(relx=0.510, rely=0.370, relwidth=0.180)
lbHabilidade16.place(relx=0.760, rely=0.370, relwidth=0.180)
lbHabilidade17.place(relx=0.020, rely=0.450, relwidth=0.180)
lbHabilidade18.place(relx=0.260, rely=0.450, relwidth=0.180)
lbHabilidade19.place(relx=0.510, rely=0.450, relwidth=0.180)
lbHabilidade20.place(relx=0.760, rely=0.450, relwidth=0.180)
lbHabilidade21.place(relx=0.020, rely=0.530, relwidth=0.180)
lbHabilidade22.place(relx=0.260, rely=0.530, relwidth=0.180)
lbHabilidade23.place(relx=0.510, rely=0.530, relwidth=0.180)
lbHabilidade24.place(relx=0.760, rely=0.530, relwidth=0.180)
lbHabilidade25.place(relx=0.020, rely=0.610, relwidth=0.180)
lbHabilidade26.place(relx=0.260, rely=0.610, relwidth=0.180)
lbHabilidade27.place(relx=0.510, rely=0.610, relwidth=0.180)
lbHabilidade28.place(relx=0.760, rely=0.610, relwidth=0.180)
lbHabilidade29.place(relx=0.020, rely=0.690, relwidth=0.180)
lbHabilidade30.place(relx=0.260, rely=0.690, relwidth=0.180)
lbHabilidade31.place(relx=0.510, rely=0.690, relwidth=0.180)
lbHabilidade32.place(relx=0.760, rely=0.690, relwidth=0.180)
lbHabilidade33.place(relx=0.020, rely=0.770, relwidth=0.180)
lbHabilidade34.place(relx=0.260, rely=0.770, relwidth=0.180)
lbHabilidade35.place(relx=0.510, rely=0.770, relwidth=0.180)
lbHabilidade36.place(relx=0.760, rely=0.770, relwidth=0.180)
lbHabilidade37.place(relx=0.020, rely=0.850, relwidth=0.180)
lbHabilidade38.place(relx=0.260, rely=0.850, relwidth=0.180)
lbHabilidade39.place(relx=0.510, rely=0.850, relwidth=0.180)
lbHabilidade40.place(relx=0.760, rely=0.850, relwidth=0.180)
lbHabilidade41.place(relx=0.020, rely=0.930, relwidth=0.180)
lbDados1 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados2 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados3 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados4 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados5 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados6 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados7 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados8 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados9 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados10 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados11 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados12 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados13 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados14 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados15 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados16 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados17 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados18 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados19 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados20 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados21 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados22 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados23 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados24 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados25 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados26 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados27 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados28 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados29 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados30 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados31 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados32 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados33 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados34 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados35 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados36 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados37 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados38 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados39 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados40 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados41 = Label(frame, font=('arial', 10, 'bold'), bg='white', fg='black')
lbDados1.place(relx=0.205, rely=0.130, relwidth=0.02)
lbDados2.place(relx=0.445, rely=0.130, relwidth=0.02)
lbDados3.place(relx=0.695, rely=0.130, relwidth=0.02)
lbDados4.place(relx=0.945, rely=0.130, relwidth=0.02)
lbDados5.place(relx=0.205, rely=0.210, relwidth=0.02)
lbDados6.place(relx=0.445, rely=0.210, relwidth=0.02)
lbDados7.place(relx=0.695, rely=0.210, relwidth=0.02)
lbDados8.place(relx=0.945, rely=0.210, relwidth=0.02)
lbDados9.place(relx=0.205, rely=0.290, relwidth=0.02)
lbDados10.place(relx=0.445, rely=0.290, relwidth=0.02)
lbDados11.place(relx=0.695, rely=0.290, relwidth=0.02)
lbDados12.place(relx=0.945, rely=0.290, relwidth=0.02)
lbDados13.place(relx=0.205, rely=0.370, relwidth=0.02)
lbDados14.place(relx=0.445, rely=0.370, relwidth=0.02)
lbDados15.place(relx=0.695, rely=0.370, relwidth=0.02)
lbDados16.place(relx=0.945, rely=0.370, relwidth=0.02)
lbDados17.place(relx=0.205, rely=0.450, relwidth=0.02)
lbDados18.place(relx=0.445, rely=0.450, relwidth=0.02)
lbDados19.place(relx=0.695, rely=0.450, relwidth=0.02)
lbDados20.place(relx=0.945, rely=0.450, relwidth=0.02)
lbDados21.place(relx=0.205, rely=0.530, relwidth=0.02)
lbDados22.place(relx=0.445, rely=0.530, relwidth=0.02)
lbDados23.place(relx=0.695, rely=0.530, relwidth=0.02)
lbDados24.place(relx=0.945, rely=0.530, relwidth=0.02)
lbDados25.place(relx=0.205, rely=0.610, relwidth=0.02)
lbDados26.place(relx=0.445, rely=0.610, relwidth=0.02)
lbDados27.place(relx=0.695, rely=0.610, relwidth=0.02)
lbDados28.place(relx=0.945, rely=0.610, relwidth=0.02)
lbDados29.place(relx=0.205, rely=0.690, relwidth=0.02)
lbDados30.place(relx=0.445, rely=0.690, relwidth=0.02)
lbDados31.place(relx=0.695, rely=0.690, relwidth=0.02)
lbDados32.place(relx=0.945, rely=0.690, relwidth=0.02)
lbDados33.place(relx=0.205, rely=0.770, relwidth=0.02)
lbDados34.place(relx=0.445, rely=0.770, relwidth=0.02)
lbDados35.place(relx=0.695, rely=0.770, relwidth=0.02)
lbDados36.place(relx=0.945, rely=0.770, relwidth=0.02)
lbDados37.place(relx=0.205, rely=0.850, relwidth=0.02)
lbDados38.place(relx=0.445, rely=0.850, relwidth=0.02)
lbDados39.place(relx=0.695, rely=0.850, relwidth=0.02)
lbDados40.place(relx=0.945, rely=0.850, relwidth=0.02)
lbDados41.place(relx=0.205, rely=0.930, relwidth=0.02)
#Exibindo habilidades do funcionário
try:
self.cursor.execute("select Nivel from competencia where idOperador = '"+idd+"' ")
listaNivel = self.cursor.fetchall()
lista = []
for nivel in range(len(listaNivel)):
lista.append(listaNivel[nivel][0])
lbDados1['text'], lbDados2['text'], lbDados3['text'], lbDados4['text'], lbDados5['text'], lbDados6['text'], lbDados7['text'], lbDados8['text'], lbDados9['text'], lbDados10['text'], lbDados11['text'], lbDados12['text'], lbDados13['text'], lbDados14['text'], lbDados15['text'], lbDados16['text'], lbDados17['text'], lbDados18['text'], lbDados19['text'], lbDados20['text'], lbDados21['text'], lbDados22['text'], lbDados23['text'], lbDados24['text'], lbDados25['text'], lbDados26['text'], lbDados27['text'], lbDados28['text'], lbDados29['text'], lbDados30['text'], lbDados31['text'], lbDados32['text'], lbDados33['text'], lbDados34['text'], lbDados35['text'], lbDados36['text'], lbDados37['text'], lbDados38['text'], lbDados39['text'], lbDados40['text'], lbDados41['text'] = lista
except Exception as erro:
print(f'errou 1 {erro}, {(erro.__class__)}')
return messagebox.showerror('Alerta', 'Verifique a conexão com o Servidor')
self.janelaHabilidade.mainloop()
def atribuir_tarefa(self):
self.janelaAtribuir = Toplevel()
self.janelaAtribuir.title('Atribuição de tarefa')
self.janelaAtribuir['bg'] = 'white'
self.centraliza_tela(400, 400, self.janelaAtribuir)
self.prox = 0
#BOTÕES PARA PROSSEGUIR, VOLTAR A TELA E CONCLUIR
self.botAnterior = Button(self.janelaAtribuir, text='Anterior', font=('arial', 10, 'bold'), fg='#344f84', border=1, relief=SOLID, command=self.voltar_folha_anterior)
self.botProximo = Button(self.janelaAtribuir, text='Próximo', font=('arial', 10, 'bold'), fg='#344f84', border=1, relief=SOLID, command=self.chamar_proxima_folha, state=DISABLED)
self.botProximo.place(relx=0.770, rely=0.900)
self.botConcluir = Button(self.janelaAtribuir, text='Concluir', font=('arial', 10, 'bold'), fg='#344f84', border=1, relief=SOLID, command=self.inserir_atribuicao)
#FRAME 1 DA TELA 1 ONDE FICARÁ CONFIGURAÇÕES DE OS, PEÇA, TEMPO E OPERAÇÃO
def verificarPeca(*args):
h = self.campoPeca.get()
if len(h) >= 1:
if not h.isnumeric():
self.campoPeca.delete(0, END)
if h.isnumeric():
if len(h) > 13:
varPeca.set(h[:13])
self.verificar_frame1('')
def verificarQuant(*args):
h = varQuant.get()
if len(h) >= 1:
if not h.isnumeric():
self.campoQuant.delete(0, END)
if h.isnumeric():
if int(h) > 1000:
varQuant.set(varQuant.get()[:-1])
self.verificar_frame1('')
def verificarSelect(*args):
self.verificar_frame1('')
varPeca = StringVar()
varPeca.trace('w', verificarPeca)
varQuant = StringVar()
varQuant.trace('w', verificarQuant)
varSelect = StringVar()
varSelect.trace('w', verificarSelect)
self.frameAt1 = Frame(self.janelaAtribuir, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=1, bg='white')
self.frameAt1.place(relx=0.100, rely=0.200, relwidth=0.800, relheight=0.280)
lbl = Label(self.janelaAtribuir, text='ATRIBUIR PROCESSO', font=('arial', 15, 'bold'), fg='#344f84', bg='white')
lbl.place(relx=0.500, rely=0.070, anchor='center')
lbl = Label(self.frameAt1, text='Peça', font=('arial', 13, 'bold'), fg='#344f84', bg='white')
lbl.place(relx=0.035, rely=0.600)
self.campoPeca = Entry(self.frameAt1, font=('arial', 12), relief=GROOVE, border=2, textvariable=varPeca)
self.campoPeca.place(relx=0.210, rely=0.600, relwidth=0.300)
lbl = Label(self.frameAt1, text='Quant.', font=('arial', 13, 'bold'), fg='#344f84', bg='white')
lbl.place(relx=0.570, rely=0.600)
self.campoQuant = Spinbox(self.frameAt1, from_=0, to=1000, font=('arial', 12), relief=GROOVE, border=2, textvariable=varQuant)
self.campoQuant.place(relx=0.780, rely=0.600, relwidth=0.200)
self.frameAt2 = Frame(self.janelaAtribuir, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=1, bg='white')
self.frameAt2.place(relx=0.100, rely=0.530, relwidth=0.800, relheight=0.250)
lbl = Label(self.frameAt2, text='Operação', font=('arial', 11, 'bold'), fg='#344f84', bg='white')
lbl.place(relx=0.010, rely=0.100)
#Buscando Operações no Banco de dados para inseri-las como opções de seleção
self.cursor.execute('select Codigo_Operacao, Processo_Usinagem from operacao')
show = self.cursor.fetchall()
ls = ['Parado']
for oper in show:
ls.append((f'{oper[0]} {oper[1]}'))
self.operacaoSelect = ttk.Combobox(self.frameAt2, font=('arial',11), state="readonly", textvariable=varSelect)
self.operacaoSelect['values'] = ls
self.operacaoSelect.current(0)
self.operacaoSelect.place(relx=0.290, rely=0.100, relwidth=0.500)
lbl = Label(self.frameAt2, text='Temp/P Oper.', font=('arial', 11, 'bold'), fg='#344f84', bg='white')
lbl.place(relx=0.010, rely=0.490)
def verificarH(*args):
h = self.campoHora.get()
if len(h) >= 1:
if not h.isnumeric():
self.campoHora.delete(0, END)
if h.isnumeric():
if int(h) >= 60:
varH.set(self.campoHora.get()[:-1])
self.verificar_frame1('')
def verificarM(*args):
m = self.campoMinuto.get()
if len(m) >= 1:
if not m.isnumeric():
self.campoMinuto.delete(0, END)
if m.isnumeric():
if int(m) >= 60:
varM.set(self.campoMinuto.get()[:-1])
self.verificar_frame1('')
def verificarS(*args):
s = self.campoSegundo.get()
if len(s) >= 1:
if not s.isnumeric():
self.campoSegundo.delete(0, END)
if s.isnumeric():
if int(s) >= 60:
varS.set(self.campoSegundo.get()[:-1])
self.verificar_frame1('')
varH = StringVar()
varH.trace('w', verificarH)
varM = StringVar()
varM.trace('w', verificarM)
varS = StringVar()
varS.trace('w', verificarS)
fHoraPorOperacao = Frame(self.frameAt2, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=1, width=95, height=23, bg='white')
fHoraPorOperacao.place(relx=0.380, rely=0.480)
self.campoHora = Entry(fHoraPorOperacao, font=('arial', 10), width=2, relief=GROOVE, border=0, textvariable=varH)
self.campoHora.place(x=0, y=1)
self.campoHora.insert(0, "00")
lbl = Label(fHoraPorOperacao, text=':', font=('arial', 10, 'bold'), bg='white', border=0)
lbl.place(x=22, y=0)
self.campoMinuto = Entry(fHoraPorOperacao, font=('arial', 10), width=2, relief=GROOVE, border=0, textvariable=varM)
self.campoMinuto.place(x=34, y=1)
lbl = Label(fHoraPorOperacao, text=':', font=('arial', 10, 'bold'), bg='white', border=0)
lbl.place(x=57, y=0)
self.campoMinuto.insert(0, "00")
self.campoSegundo = Entry(fHoraPorOperacao, font=('arial', 10), width=2, relief=GROOVE, border=0, textvariable=varS)
self.campoSegundo.place(x=70, y=1)
self.campoSegundo.insert(0, "00")
self.acessOperacao = IntVar()
self.acessoSimultaneoOperacao = Checkbutton(self.janelaAtribuir, text='Acesso Simultâneo', font=('arial', 9), justify=LEFT, variable=self.acessOperacao, bg='white', activebackground='white')
self.acessoSimultaneoOperacao.place(relx=0.540, rely=0.780)
#=========================================== PARTE 2 ======================================
self.frameAt3 = Frame(self.janelaAtribuir, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=1, bg='white')
lbl = Label(self.frameAt3, text='Selecione a Máquina:', font=('arial', 12, 'bold'), fg='#344f84', bg='white')
lbl.place(relx=0.020, rely=0.050)
try:
self.cursor.execute('select Codigo, Maquina from maquinas')
tuplaMaquina = self.cursor.fetchall()
except:
if messagebox.showwarning(parent=self.janelaAtribuir, title='Alerta', message='Não foi possível consultar o Banco de Dados'):
self.janelaAtribuir.destroy()
sf = ScrolledFrame(self.frameAt3, width=280, height=140)
sf.place(relx=0.030, rely=0.200, relwidth=0.950, relheight=0.799)
frame = sf.display_widget(Frame)
def irr():
pass
#print(self.listaMaquina[0][1].get())
self.listaMaquina = list()
for valor in tuplaMaquina:
self.listaMaquina.append([valor[0], valor[1]])
for i in self.listaMaquina:
vCod = i[0]
vMaq = i[1]
i[1] = Variable()
i[1].set(0)
self.maquinaCheck = Checkbutton(frame, text=vCod+" "+vMaq, font=('arial', 9), justify=LEFT, variable=i[1], command = lambda: self.verificar_frame1(''), indicatoron=True, bg='white', width=37, anchor='w')
self.maquinaCheck.pack(side=TOP, anchor='w')
self.acessMaquina = IntVar()
self.controleCheck = 0 #esta variável fará a contagem de quantas Máquinas foi selecionada
self.controleOne = '' #esta variável Armazenará a primeira Máquina que foi selecionada
self.acessoSimultaneoMaquina = Checkbutton(self.janelaAtribuir, text='Acesso Simultâneo', font=('arial', 9), justify=LEFT, variable=self.acessMaquina, bg='white', activebackground='white')
#=========================================== PARTE 3 ======================================
self.frameAt4 = Frame(self.janelaAtribuir, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=1, bg='white')
lbl = Label(self.frameAt4, text='Selecione o Operador:', font=('arial', 12, 'bold'), fg='#344f84', bg='white')
lbl.place(relx=0.020, rely=0.050)
try:
self.cursor.execute("select Nome, CPF from funcionarios where Membro = 'ativo'")
tuplaFuncionario = self.cursor.fetchall()
except:
if messagebox.showwarning(parent=self.janelaAtribuir, title='Alerta', message='Não foi possível consultar o Banco de Dados'):
self.janelaAtribuir.destroy()
sf2 = ScrolledFrame(self.frameAt4, width=280, height=140)
sf2.place(relx=0.030, rely=0.200, relwidth=0.950, relheight=0.799)
frame = sf2.display_widget(Frame)
def ir():
pass
#print(self.listaFuncionario[0][0].get())
self.listaFuncionario = list()
for valor in tuplaFuncionario:
self.listaFuncionario.append([valor[0], valor[1]])
for i in self.listaFuncionario:
vNome = i[0]
vCPF = str(i[1])
i[0] = Variable()
i[0].set(0)
Operador3 = Checkbutton(frame, text=vNome+" "+vCPF, font=('arial', 9), justify=LEFT, variable=i[0], command=lambda:self.verificar_frame1(''), indicatoron=True, bg='white', width=37, anchor='w')
Operador3.pack(side=TOP, anchor='w')
self.acessOperador = IntVar()
self.controleCheck2 = 0 #esta variável fará a contagem de quantos Operador foi selecionado
self.controleOne2 = '' #esta variável Armazenará o primeiro Operador que foi selecionado
self.acessoSimultaneoOperador = Checkbutton(self.janelaAtribuir, text='Acesso Simultâneo', font=('arial', 9), justify=LEFT, variable=self.acessOperador, bg='white', activebackground='white')
#=========================================== PARTE 4 ======================================
def onFrameConfigure(canvas):
canvas.configure(scrollregion=canvas.bbox('all'))
self.frameAt5 = Frame(self.janelaAtribuir, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=1, bg='white')
canvas = Canvas(self.frameAt5, borderwidth=0, background='white')
frame4 = Frame(canvas, background='white')
scroll = Scrollbar(self.frameAt5, orient='vertical', command=canvas.yview)
canvas.configure(yscrollcommand=scroll.set)
scroll.pack(side='right', fill='y')
canvas.pack(side='left', fill='both', expand=True)
canvas.create_window((8,8), window=frame4, anchor='nw')
frame4.bind('<Configure>', lambda event, canvas=canvas: onFrameConfigure(canvas))
self.lbl1 = Label(frame4, text='O.S:', font=('arial', 11, 'bold'), fg='#344f84', bg='white', width=30, anchor=W)
self.lbl1.grid(row=1, column=0, pady=(20,2), padx=(0,0))
self.dados1 = Label(frame4, text='tr', font=('arial', 11, 'bold'), fg='green', bg='white', width=18, anchor=W)
self.dados1.grid(row=1, column=0, pady=(20,2), padx=(1,0))
#self.lbl1.pack(side='top', anchor='w')
self.lbl2 = Label(frame4, text='Peça:', font=('arial', 11, 'bold'), fg='#344f84', bg='white', width=30, anchor=W)
self.lbl2.grid(row=2, column=0, sticky=N, pady=3)
##self.lbl2.pack(side='top', anchor='w')
self.dados2 = Label(frame4, text='tr', font=('arial', 11, 'bold'), fg='green', bg='white', width=18, anchor=W)
self.dados2.grid(row=2, column=0, pady=3, padx=(10,0))
self.lbl3 = Label(frame4, text='Operação:', font=('arial', 11, 'bold'), fg='#344f84', bg='white', width=30, anchor=W)
self.lbl3.grid(row=3, column=0, sticky=N, pady=3)
##self.lbl3.pack(side='top', anchor='w')
self.dados3 = Label(frame4, text='tr', font=('arial', 10, 'bold'), fg='green', bg='white', width=24, anchor=W)
self.dados3.grid(row=3, column=0, pady=3, padx=(84,0))
self.lbl4 = Label(frame4, text='Quantidade:', font=('arial', 11, 'bold'), fg='#344f84', bg='white', width=30, anchor=W)
self.lbl4.grid(row=4, column=0, sticky=N, pady=2)
##self.lbl4.pack(side='top', anchor='w')
self.dados4 = Label(frame4, text='tr', font=('arial', 11, 'bold'), fg='green', bg='white', width=15, anchor=W)
self.dados4.grid(row=4, column=0, pady=2, padx=(80,0))
self.lbl5 = Label(frame4, text='Tempo da Peça:', font=('arial', 11, 'bold'), fg='#344f84', bg='white', width=30, anchor=W)
self.lbl5.grid(row=5, column=0, sticky=N, pady=2)
##self.lbl5.pack(side='top', anchor='w')
self.dados5 = Label(frame4, text='tr', font=('arial', 11, 'bold'), fg='green', bg='white', width=15, anchor=W)
self.dados5.grid(row=5, column=0, pady=2, padx=(140,0))
self.lbl6 = Label(frame4, text="Máquina Selecionada:", font=('arial', 11, 'bold'), fg='#344f84', bg='white', width=30, anchor=W)
self.lbl6.grid(row=6, column=0, sticky=N, pady=2)
##self.lbl6.pack(side='top', anchor='w')
self.text1 = Text(frame4, font=('arial', 10), border=1, relief=SOLID, bg='white', state=DISABLED, width=37, height=5)
self.text1.grid(row=7, column=0, pady=2)
##self.lbl6.place(relx=0.020, rely=0.650, relwidth=0.950, relheight=0.200)
self.lbl7 = Label(frame4, text="Operador Selecionado:", font=('arial', 11, 'bold'), fg='#344f84', bg='white', width=30, anchor=W)
self.lbl7.grid(row=8, column=0, sticky=N, pady=2)
#self.lbl7.pack(side='top', anchor='w')
self.text2 = Text(frame4, font=('arial', 10), border=1, relief=SOLID, bg='white', state=DISABLED, width=37, height=5)
self.text2.grid(row=9, column=0, pady=2)
self.janelaAtribuir.transient(self.aba4)
self.janelaAtribuir.focus_force()
self.janelaAtribuir.grab_set()
self.janelaAtribuir.mainloop()
#Janela com as exibições de cada botões
def exibir_toplevel_inicio(self, event):
self.janelaDetalhesOS = Toplevel()
self.janelaDetalhesOS.title('Informações de processo')
self.janelaDetalhesOS['bg'] = 'white'
self.centraliza_tela(1000, 600, self.janelaDetalhesOS)
#Pegando o Número de O.S que foi selecionada ao abrir janela
selecionada = self.OrdemServico.selection()[0]
self.pegarOS = self.OrdemServico.item(selecionada, "values")
self.pegarOS = self.pegarOS[1]
label= Label(self.janelaDetalhesOS, text='Nº', font=('arial', 10, 'bold'), fg='#344f84', bg='white')
label.place(relx=0.870, rely=0.030)
label= Label(self.janelaDetalhesOS, text=self.pegarOS, font=('arial', 15, 'bold'), fg='red', bg='white')
label.place(relx=0.900, rely=0.020)
#Frame onde ficará contagens de OS
self.frameExibirTotalOperacao = Frame(self.janelaDetalhesOS, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
linhaCabecario = Label(self.frameExibirTotalOperacao, bg='#344f84')
linhaCabecario.place(relx=0, rely=0, relwidth=1, relheight=0.220)
labelTitle1 = Label(self.frameExibirTotalOperacao, text='Total Operações', bg='#344f84', fg='white', font=('arial', 13, 'bold'))
labelTitle1.place(relx=0, rely=0, relheight=0.210)
lbOSConcluidas = Label(self.frameExibirTotalOperacao, text='Concluídas:', bg='white', fg='black', font=('arial', 10,'bold'))
lbOSConcluidas.place(relx=0.020, rely=0.350)
lbOSConcluidasDados = Label(self.frameExibirTotalOperacao, bg='white', fg='green', font=('arial', 15, 'bold'))
lbOSConcluidasDados.place(relx=0.670, rely=0.340)
lbOSRetrabalho = Label(self.frameExibirTotalOperacao, text='Retrabalhos:', bg='white', fg='black', font=('arial', 10,'bold'))
lbOSRetrabalho.place(relx=0.020, rely=0.650)
lbOSRetrabalhoDados = Label(self.frameExibirTotalOperacao, bg='white', fg='red', font=('arial', 15, 'bold'))
lbOSRetrabalhoDados.place(relx=0.670, rely=0.640)
self.homeImg = PhotoImage(file="image/home.png")
botHome = Button(self.janelaDetalhesOS, text='Início', image=self.homeImg, bg='white', activebackground='white', compound=TOP, border=0, command=self.botao_exibir_inicio)
botHome.place(relx=0.250, rely=0.070)
self.trocarFuncioanrioImg = PhotoImage(file="image/trocar.png")
botTrocar = Button(self.janelaDetalhesOS, text='Trocar', image=self.trocarFuncioanrioImg, bg='white', activebackground='white', compound=TOP, border=0)
botTrocar.place(relx=0.400, rely=0.070)
self.pausadasImg = PhotoImage(file="image/user.png")
botPausadas = Button(self.janelaDetalhesOS, text='Pausas', image=self.pausadasImg, bg='white', activebackground='white', compound=TOP, border=0, command=self.botao_exibir_pausas)
botPausadas.place(relx=0.500, rely=0.070)
self.pecaImg = PhotoImage(file="image/peca.png")
botPeca = Button(self.janelaDetalhesOS, text='Peças util.', image=self.pecaImg, bg='white', activebackground='white', compound=TOP, border=0, command=self.botao_exibir_pecas)
botPeca.place(relx=0.600, rely=0.070)
self.infoImg = PhotoImage(file="image/info.png")
botInfo = Button(self.janelaDetalhesOS, text='Sobre', image=self.infoImg, bg='white', activebackground='white', compound=TOP, border=0, command=self.botao_exibir_sobre)
botInfo.place(relx=0.700, rely=0.070)
#Frame onde ficará as informações referente ao tempo
self.frameDadosTempo = Frame(self.janelaDetalhesOS, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=2, bg='white')
linhaCabecario = Label(self.frameDadosTempo, bg='#344f84')
linhaCabecario.place(relx=0, rely=0, relwidth=1, relheight=0.065)
labelTitle1 = Label(self.frameDadosTempo, text='Tempo/Horas', bg='#344f84', fg='white', font=('arial', 13, 'bold'))
labelTitle1.place(relx=0, rely=0, relheight=0.065)
self.lblSemInformacao = Label(self.frameDadosTempo, text='Sem Informações', font=('arial', 8), bg='white')
self.lblSemInformacao.place(relx=0.5, rely=0.5, anchor="center")
self.checkVisto = PhotoImage(file='image/check.png')
self.lb1 = Label(self.frameDadosTempo, text='Tempo Definido:', font=('arial', 10, 'bold'), bg='white', fg='green')
self.img1 = Label(self.frameDadosTempo, image=self.checkVisto, bg='white')
self.dadosTempoProgramado = Label(self.frameDadosTempo, font=('arial', 8), bg='white')
self.lb2 = Label(self.frameDadosTempo, text='Tempo Operando:', font=('arial', 10, 'bold'), bg='white', fg='green')
self.img2 = Label(self.frameDadosTempo, image=self.checkVisto, bg='white')
self.dadosTempoOperando = Label(self.frameDadosTempo, font=('arial', 8), bg='white')
self.lb3 = Label(self.frameDadosTempo, text='Tempo Gasto:', font=('arial', 10, 'bold'), bg='white', fg='green')
self.img3 = Label(self.frameDadosTempo, image=self.checkVisto, bg='white')
self.dadosTempoGasto = Label(self.frameDadosTempo, font=('arial', 8), bg='white')
self.lb4 = Label(self.frameDadosTempo, text='Tempo Extra:', font=('arial', 10, 'bold'), bg='white', fg='red')
self.check2 = PhotoImage(file='image/check2.png')
self.img4 = Label(self.frameDadosTempo, image=self.check2, bg='white')
self.dadosTempoExtra = Label(self.frameDadosTempo, font=('arial', 8), bg='white', fg='red')
#Frame onde ficará os resultado das buscas feitas ao Banco de Dados
self.frameDadosTreeview = Frame(self.janelaDetalhesOS, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=1, bg='white')
#Posiçãao estática para classificação das colunas
self.sinal = 0
#Label e Combobox para escolher o Tipo de OS a ser pesquisada
self.labelTipo = Label(self.janelaDetalhesOS, text='Tipo', font=('arial', 10, 'bold'), bg='white')
self.boxTipo = ttk.Combobox(self.janelaDetalhesOS, font=('arial',10), state='readonly')
self.boxTipo['values'] = ('Tudo', 'Nova OS', 'Retrabalho OS')
self.boxTipo.current(0)
#Label e Combobox para escolher qual tipo de pesquisa será feita
self.labelPesquisar = Label(self.janelaDetalhesOS, text='Pesquisar por', font=('arial', 10, 'bold'), bg='white')
self.boxPesquisar = ttk.Combobox(self.janelaDetalhesOS, font=('arial',10), state='readonly')
self.boxPesquisar['values'] = ('Nome', 'CPF', 'Nº Peça', 'Nº Operação')
self.boxPesquisar.current(0)
#Barra de busca e botão para pesquisar
self.strCampoBusca = StringVar()
self.strCampoBusca.trace('w', self.buscar)
self.campoBusca = Entry(self.janelaDetalhesOS, font=('arial', 12), border=2, relief=GROOVE)
self.campoBusca.bind("<Return>", self.buscar)
self.campoBusca.focus_force()
self.imgBuscar = PhotoImage(file="image/lupa.png")
self.botBuscar = Button(self.janelaDetalhesOS, image=self.imgBuscar, border=0, command=lambda:self.buscar(0))
#Tree view onde exibirá as Ordem de Serviços
self.viewOrdemServico = ttk.Treeview(self.frameDadosTreeview, column=('1','2','3','4','5','6','7'), show='headings')
self.viewOrdemServico.heading('1', text='ID', command=lambda:self.classificar_coluna('ID', self.sinal))
self.viewOrdemServico.heading('2', text='Operador', command=lambda:self.classificar_coluna('Operador', self.sinal))
self.viewOrdemServico.heading('3', text='Peça', command=lambda:self.classificar_coluna('CodigoPeca', self.sinal))
self.viewOrdemServico.heading('4', text='Operação', command=lambda:self.classificar_coluna('CodigoOperacao', self.sinal))
self.viewOrdemServico.heading('5', text='QTDE', command=lambda:self.classificar_coluna('Quant', self.sinal))
self.viewOrdemServico.heading('6', text='Máquina', command=lambda:self.classificar_coluna('Maquina', self.sinal))
self.viewOrdemServico.heading('7', text='Tipo', command=lambda:self.classificar_coluna('Estado', self.sinal))
self.viewOrdemServico.column("1", width=-90, anchor='n')
self.viewOrdemServico.column("2", width=120, anchor='n')
self.viewOrdemServico.column("3", width=1, anchor='n')
self.viewOrdemServico.column("4", width=20, anchor='n')
self.viewOrdemServico.column("5", width=-50, anchor='n')
self.viewOrdemServico.column("6", width=30, anchor='n')
self.viewOrdemServico.column("7", width=20, anchor='n')
self.viewOrdemServico.place(relx=0.025, rely=0.115, relwidth=0.945, relheight=0.840)
self.viewOrdemServico.bind("<Return>", self.exibir_info_da_operacao)
self.viewOrdemServico.bind("<ButtonRelease-1>", self.exibir_info_tempo_horas)
self.viewOrdemServico.bind("<Double-1>", self.exibir_info_da_operacao)
self.viewOrdemServico.bind("<Escape>", lambda event: self.remover_focus(event, self.viewOrdemServico))
scrollbar = Scrollbar(self.frameDadosTreeview, orient="vertical", command=self.viewOrdemServico.yview)
self.viewOrdemServico.configure(yscrollcommand=scrollbar.set)
scrollbar.place(relx=0.975, rely=0.115, relwidth=0.025, relheight=0.835)
scrollbar2 = Scrollbar(self.frameDadosTreeview, orient="horizontal", command=self.viewOrdemServico.xview)
self.viewOrdemServico.configure(xscrollcommand=scrollbar2.set)
scrollbar2.place(relx=0.025, rely=0.950, relwidth=0.950, relheight=0.045)
self.labelerro = Label(self.frameDadosTreeview, bg='white')
try:
self.cursor.execute("select * from concluidas where Tipo = 'Nova OS' and OS = "+self.pegarOS)
valido = self.cursor.fetchall()
lbOSConcluidasDados['text'] = len(valido)
self.cursor.execute("select * from concluidas where Tipo = 'Retrabalhar OS' and OS = "+self.pegarOS)
valido = self.cursor.fetchall()
lbOSRetrabalhoDados['text'] = len(valido)
self.cursor.execute("select a.id, b.Nome, a.Peca, c.Processo_Usinagem, a.Quant, d.Maquina, a.Estado from ordem_processo as a join funcionarios as b on b.id = a.idOperador join operacao as c on c.id = a.idOperacao join maquinas as d on d.id = a.idMaquina where OS ="+self.pegarOS)
valido = self.cursor.fetchall()
for i in range (len(valido)):
#extraindo do banco de dados as informações e armazenando nas variáveis
ID = valido[i][0]
Operador = valido[i][1]
CodigoPeca = valido[i][2]
CodigoOperacao = valido[i][3]
Quant = valido[i][4]
Maquina = valido[i][5]
Estado = valido[i][6]
self.viewOrdemServico.insert("", "end", values=(ID, Operador, CodigoPeca, CodigoOperacao, Quant, Maquina, Estado))
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
#Chamando widgets com referência aos dados dessa OS
self.botao_exibir_inicio()
self.exibir_toplevel_pausas()
self.exibir_toplevel_pecas()
self.exibir_toplevel_sobre()
self.janelaDetalhesOS.transient(self.aba4)
self.janelaDetalhesOS.focus_force()
self.janelaDetalhesOS.grab_set()
self.janelaDetalhesOS.mainloop()
def exibir_toplevel_pausas(self):
#Label e Combobox para escolher o Tipo de OS a ser pesquisada
self.labelTipo2 = Label(self.janelaDetalhesOS, text='Tipo', font=('arial', 10, 'bold'), bg='white')
self.boxTipo2 = ttk.Combobox(self.janelaDetalhesOS, font=('arial',10), state='readonly')
self.boxTipo2['values'] = ('Tudo', 'Nova OS', 'Retrabalho OS')
self.boxTipo2.current(0)
#Label e Combobox para escolher qual tipo de pesquisa será feita
self.labelPesquisar2 = Label(self.janelaDetalhesOS, text='Pesquisar por', font=('arial', 10, 'bold'), bg='white')
self.boxPesquisar2 = ttk.Combobox(self.janelaDetalhesOS, font=('arial',10), state='readonly')
self.boxPesquisar2['values'] = ('Nome', 'CPF')
self.boxPesquisar2.current(0)
#Barra de busca e botão para pesquisar
self.strCampoBusca2 = StringVar()
self.strCampoBusca2.trace('w', self.buscar2)
self.campoBusca2 = Entry(self.janelaDetalhesOS, font=('arial', 12), border=2, relief=GROOVE)
self.campoBusca2.bind("<Return>", self.buscar2)
self.campoBusca2.focus_force()
self.botBuscar2 = Button(self.janelaDetalhesOS, image=self.imgBuscar, border=0, command=lambda:self.buscar2(0))
self.frameDadosTreeviewPause = Frame(self.janelaDetalhesOS, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=1, bg='white')
#Tree view onde exibirá as Ordem de Serviços Pausadas
self.viewPausas = ttk.Treeview(self.frameDadosTreeviewPause, column=('1','2','3','4','5','6','7','8'), show='headings')
self.viewPausas.heading('1', text='ID')
self.viewPausas.heading('2', text='Operador')
self.viewPausas.heading('3', text='Motivo da Pausa')
self.viewPausas.heading('4', text='Peça')
self.viewPausas.heading('5', text='Tipo')
self.viewPausas.heading('6', text='Duração')
self.viewPausas.heading('7', text='Produzido')
self.viewPausas.heading('8', text='Máquina')
self.viewPausas.column("1", width=-50, anchor='n', minwidth=20)
self.viewPausas.column("2", width=120, anchor='n', minwidth=200)
self.viewPausas.column("3", width=30, anchor='n', minwidth=100)
self.viewPausas.column("4", width=20, anchor='n', minwidth=80)
self.viewPausas.column("5", width=30, anchor='n', minwidth=100)
self.viewPausas.column("6", width=20, anchor='n', minwidth=80)
self.viewPausas.column("7", width=20, anchor='n', minwidth=80)
self.viewPausas.column("8", width=30, anchor='n', minwidth=80)
self.viewPausas.place(relx=0.025, rely=0.115, relwidth=0.945, relheight=0.840)
self.viewPausas.bind("<Return>", self.exibir_info__pausa)
self.viewPausas.bind("<Double-1>", self.exibir_info__pausa)
self.viewPausas.bind("<Escape>", lambda event: self.remover_focus(event, self.viewPausas))
scrollbar3 = Scrollbar(self.frameDadosTreeviewPause, orient="vertical", command=self.viewPausas.yview)
self.viewPausas.configure(yscrollcommand=scrollbar3.set)
scrollbar3.place(relx=0.975, rely=0.115, relwidth=0.025, relheight=0.835)
scrollbar4 = Scrollbar(self.frameDadosTreeviewPause, orient="horizontal", command=self.viewPausas.xview)
self.viewPausas.configure(xscrollcommand=scrollbar4.set)
scrollbar4.place(relx=0.025, rely=0.950, relwidth=0.950, relheight=0.045)
self.labelerro2 = Label(self.frameDadosTreeviewPause, bg='white')
try:
self.cursor.execute("select ID, Operador, MotivoPause, CodigoPeca, Tipo, timediff(DataRetomada, DataPause), Produzido, Maquina, DataRetomada from pausas where OS ="+self.pegarOS)
pausas = self.cursor.fetchall()
if len(pausas) == 0:
self.lbl = Label(self.frameDadosTreeviewPause, text='0 operações pausadas', font=('arial', 10), bg='white')
self.lbl.place(relx=0.500, rely=0.500, anchor='center')
for i in range (len(pausas)):
#extraindo do banco de dados as informações e armazenando nas variáveis
ID = pausas[i][0]
Operador = pausas[i][1]
MotivoPause = pausas[i][2]
Peca = pausas[i][3]
Tipo = pausas[i][4]
Tempo = pausas[i][5]
Produzido = pausas[i][6]
Maquina = pausas[i][7]
if str(pausas[i][8]) == 'None':
Tempo = 'Pausado'
self.viewPausas.insert("", "end", values=(ID, Operador, MotivoPause, Peca, Tipo, Tempo, Produzido, Maquina))
except Exception as erro: print(f'{erro}, {(erro.__class__)}')
def exibir_toplevel_pecas(self):
self.botAtribuirOper = Button(self.janelaDetalhesOS, text='Função Oper', font=('arial', 9), bg='#344f84', fg='white', border=2, command=self.atribuir_tarefa)
self.frameDadosTreeviewPecas = Frame(self.janelaDetalhesOS, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=1, bg='white')
self.Add = PhotoImage(file="image/add.png")
self.botAddPeca = Button(self.frameDadosTreeviewPecas, image=self.Add, compound=RIGHT, text='Add ', font=('arial', 11), bg='white', fg='black', border=2, relief=FLAT, height=15)
self.chave = PhotoImage(file="image/chave.png")
self.lbll = Label(self.frameDadosTreeviewPecas, image=self.chave, compound=LEFT, text=' Peças utilizadas:', font=('arial', 12, 'bold'), bg='white')
self.lbll.place(relx=0.025, rely=0.013)
#Tree view onde exibirá as Ordem de Serviços Pausadas
self.viewPecas = ttk.Treeview(self.frameDadosTreeviewPecas, column=('1','2','3','4','5','6','7'), show='headings')
self.viewPecas.heading('1', text='ID', anchor='w')
self.viewPecas.heading('2', text='Descrição', anchor='w')
self.viewPecas.heading('3', text='Referência', anchor='w')
self.viewPecas.heading('4', text='QTDE', anchor='w')
self.viewPecas.heading('5', text='Material', anchor='w')
self.viewPecas.heading('6', text='Tratamento', anchor='w')
self.viewPecas.heading('7', text='Cod de Desenho', anchor='w')
self.viewPecas.column("1", width=0, anchor='w', minwidth=50)
self.viewPecas.column("2", width=0, anchor='w', minwidth=250)
self.viewPecas.column("3", width=0, anchor='w', minwidth=120)
self.viewPecas.column("4", width=0, anchor='w', minwidth=50)
self.viewPecas.column("5", width=0, anchor='w', minwidth=120)
self.viewPecas.column("6", width=0, anchor='w', minwidth=120)
self.viewPecas.column("7", width=0, anchor='w', minwidth=120)
self.viewPecas.place(relx=0.025, rely=0.115, relwidth=0.945, relheight=0.840)
self.viewPecas.bind("<Return>", self.exibir_info__pausa)
self.viewPecas.bind("<Double-1>", self.exibir_info__pausa)
self.viewPecas.bind("<Escape>", lambda event: self.remover_focus(event, self.viewPecas))
scrollbar3 = Scrollbar(self.frameDadosTreeviewPecas, orient="vertical", command=self.viewPecas.yview)
self.viewPecas.configure(yscrollcommand=scrollbar3.set)
scrollbar3.place(relx=0.975, rely=0.115, relwidth=0.025, relheight=0.835)
scrollbar4 = Scrollbar(self.frameDadosTreeviewPecas, orient="horizontal", command=self.viewPecas.xview)
self.viewPecas.configure(xscrollcommand=scrollbar4.set)
scrollbar4.place(relx=0.025, rely=0.950, relwidth=0.950, relheight=0.045)
self.frameInformacao = Frame(self.janelaDetalhesOS, highlightbackground='white', highlightcolor='white', highlightthickness=1, bg='white')
self.doc = PhotoImage(file="image/doc.png")
self.lblP = Label(self.frameInformacao, image=self.doc, compound=LEFT, text=' Info O.S.', font=('arial', 20, 'bold'), bg='white')
self.lblP.place(relx=0.999, rely=0.120, anchor='e')
self.lblP = Label(self.frameInformacao, text='Nº Peças:', font=('arial', 12, 'bold'), bg='white')
self.lblP.place(relx=0.999, rely=0.300, anchor='e')
self.lblP = Label(self.frameInformacao, font=('arial', 12), bg='white', border=2, relief=GROOVE, anchor='e')
self.lblP.place(relx=0.999, rely=0.370, anchor='e', relwidth=0.950)
try:
self.cursor.execute('select count(Produzido) from concluidas where OS = '+self.pegarOS)
self.lblP['text'] = self.cursor.fetchall()[0][0]
except Exception as erro: print(erro)
self.lblP = Label(self.frameInformacao, text='Nº Operações:', font=('arial', 12, 'bold'), bg='white')
self.lblP.place(relx=0.999, rely=0.460, anchor='e')
self.lblP = Label(self.frameInformacao, font=('arial', 12), bg='white', border=2, relief=GROOVE, anchor='e')
self.lblP.place(relx=0.999, rely=0.530, anchor='e', relwidth=0.950)
try:
self.cursor.execute('select count(id) from ordem_processo where OS = '+self.pegarOS)
self.lblP['text'] = self.cursor.fetchall()[0][0]
except Exception as erro: print(erro)
self.cc = PhotoImage(file="image/check.png")
self.lblP = Label(self.frameInformacao, image=self.cc, compound=LEFT, text=' Aberta', font=('arial', 20, 'bold'), bg='white')
self.lblP.place(relx=0.999, rely=0.910, anchor='e')
self.frameInformacao.place(relx=0.800, rely=0.200, relwidth=0.180, relheight=0.780)
try:
self.cursor.execute("select id from ordem_servico where OS = "+self.pegarOS)
idOS = self.cursor.fetchall()[0][0]
self.cursor.execute('select b.ID, b.Descricao, b.CodPeca, a.QTDE, b.Material, b.Tratamento, b.CodDesenho from ordem_pecas as a join pecas as b on a.idPeca = b.id join ordem_servico as c on a.idOS = c.id where a.idOS ='+str(idOS))
listPecas = self.cursor.fetchall()
for peca in listPecas:
self.viewPecas.insert("", "end", values=peca)
except Exception as erro: print(f'{erro}, {(erro.__class__)}')
def exibir_toplevel_sobre(self):
try:
self.cursor.execute('select count(distinct idOperador) from ordem_processo where OS = '+self.pegarOS)
operador = self.cursor.fetchall()[0][0]
self.cursor.execute('select count(Tipo) from concluidas where Tipo = "Nova OS" and OS = '+self.pegarOS)
totalConcluídas = self.cursor.fetchall()[0][0]
self.cursor.execute('select count(Tipo) from concluidas where Tipo = "Retrabalhar OS" and OS = '+self.pegarOS)
totalRetrabalho = self.cursor.fetchall()[0][0]
self.cursor.execute('select Produzido from concluidas where OS = '+self.pegarOS)
totalPecas = self.cursor.fetchall()
#Contando número de peças produzidas
soma = 0
for produto in totalPecas:
soma += produto[0]
totalPecas = soma
self.cursor.execute('select idOperacao from ordem_processo where OS = '+self.pegarOS)
totalOperacao = len(self.cursor.fetchall())
self.cursor.execute('select count(ID) from pausas where OS = '+self.pegarOS)
totalPausas = self.cursor.fetchall()[0][0]
SQL = ("select timediff(DataRetomada, DataPause) as result from pausas where OS = '"+self.pegarOS+"' and DataRetomada <> 'none' and MotivoPause <> 'Final de Expediente' and MotivoPause <> 'Horário de Almoço' ")
totalTempoPausado = self.somar_total_horas_gastas_os(SQL, 1)
SQL = ('select TempoProgramado from ordem_processo where OS = '+self.pegarOS)
totalTempoProgramado = self.somar_total_horas_gastas_os(SQL, 1)
SQL = ('select TempGastoExt from concluidas where OS = '+self.pegarOS)
totalTempoExtra = self.somar_total_horas_gastas_os(SQL, 1)
SQL = ('select TempGasto, TempGastoExt from concluidas where OS = '+self.pegarOS)
totalTempo = self.somar_total_horas_gastas_os(SQL, 2)
except Exception as erro:
print(f'ERROOOO {erro}, {(erro.__class__)}')
self.frameInfoRelatorio = Frame(self.janelaDetalhesOS, highlightbackground='black', highlightthickness=2, width=700)
self.frameLadoTop = Frame(self.frameInfoRelatorio, highlightbackground='black', highlightthickness=2, bg='white')
self.frameLadoTop.place(relx=0, rely=0, relwidth=1, relheight=0.130)
self.frameLadoEsquerdo = Frame(self.frameInfoRelatorio, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=0)
self.frameLadoEsquerdo.place(relx=0, rely=0.130, relwidth=0.385, relheight=0.870)
self.frameLadoDireito = Frame(self.frameInfoRelatorio, highlightbackground='#e6e6e6', highlightcolor='#e6e6e6', highlightthickness=0)
self.frameLadoDireito.place(relx=0.385, rely=0.130, relwidth=0.615, relheight=0.870)
label= Label(self.frameLadoTop, text='Relatório de O.S', font=('arial', 15, 'bold'), fg='#344f84', bg='white')
label.place(relx=0.400, rely=0.200)
label= Label(self.frameLadoEsquerdo, text='Total de Operadores', font=('arial', 12, 'bold'), border=2, relief=SOLID, width=31, fg='#344f84', bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoEsquerdo, text='Total de Concluídas', font=('arial', 12, 'bold'), border=2, relief=SOLID, width=31, fg='#344f84', bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoEsquerdo, text='Total de Retrabalhos', font=('arial', 12, 'bold'), border=2, relief=SOLID, width=31, fg='#344f84', bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoEsquerdo, text='Total de Peças Produzidas', font=('arial', 12, 'bold'), border=2, relief=SOLID, width=31, fg='#344f84', bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoEsquerdo, text='Total de Operações', font=('arial', 12, 'bold'), border=2, relief=SOLID, width=31, fg='#344f84', bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoEsquerdo, text='Total de Pausas Cometidas', font=('arial', 12, 'bold'), border=2, relief=SOLID, width=31, fg='#344f84', bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoEsquerdo, text='Total de Horas Pausadas', font=('arial', 12, 'bold'), border=2, relief=SOLID, width=31, fg='#344f84', bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoEsquerdo, text='Total de Horas Determinadas', font=('arial', 12, 'bold'), border=2, relief=SOLID, width=31, fg='#344f84', bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoEsquerdo, text='Total de Horas Extras', font=('arial', 12, 'bold'), border=2, relief=SOLID, width=31, fg='#344f84', bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoEsquerdo, text='Total de Tempo', font=('arial', 12, 'bold'), border=2, relief=SOLID, width=31, fg='#344f84', bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
#Label Respotas
label= Label(self.frameLadoDireito, text=operador, font=('arial', 14, 'bold'), border=1, relief=SOLID, width=60, bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoDireito, text=totalConcluídas, font=('arial', 14, 'bold'), border=1, relief=SOLID, width=60, bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoDireito, text=totalRetrabalho, font=('arial', 14, 'bold'), border=1, relief=SOLID, width=60, bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
if totalRetrabalho != 0:
label['fg'] = 'red'
label= Label(self.frameLadoDireito, text=totalPecas, font=('arial', 14, 'bold'), border=1, relief=SOLID, width=60, bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoDireito, text=totalOperacao, font=('arial', 14, 'bold'), border=1, relief=SOLID, width=60, bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoDireito, text=totalPausas, font=('arial', 14, 'bold'), border=1, relief=SOLID, width=60, bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoDireito, text=totalTempoPausado, font=('arial', 14, 'bold'), border=1, relief=SOLID, width=60, bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoDireito, text=totalTempoProgramado, font=('arial', 14, 'bold'), border=1, relief=SOLID, width=60, bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoDireito, text=totalTempoExtra, font=('arial', 14, 'bold'), border=1, relief=SOLID, width=60, bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
label= Label(self.frameLadoDireito, text=totalTempo, font=('arial', 14, 'bold'), border=1, relief=SOLID, width=60, bg='white')
label.pack(side=TOP, anchor='w', fill=Y, expand=YES)
#Janelas que informa as datas das Operações e Pausas respectivas aos seus botões de opções
def exibir_info_da_operacao(self, event):
try:
#Pegando o Número de O.S que foi selecionada ao abrir janela
selecionada = self.viewOrdemServico.selection()[0]
pegarID = self.viewOrdemServico.item(selecionada, "values")
pegarID = pegarID[0]
except: return ''
self.exibir_info_tempo_horas('')
try:
self.cursor.execute("select b.Nome, b.CPF, a.DataAberta, a.DataIniciada, a.DataFinalizada from ordem_processo as a join funcionarios as b on b.id = a.idOperador where a.id ="+pegarID)
result = self.cursor.fetchall()
except Exception as erro:
print(f'{erro}, {(erro.__class__)}')
if messagebox.showerror(parent=self.janelaDetalhesOS, title='Verifique a conexão', message='Sem conexão com Banco de Dados'):
return 0
else:
nome = result[0][0]
cpf = str(result[0][1])
a = cpf[0:3]
b = cpf[3:6]
c = cpf[6:9]
d = cpf[9:]
cpf = a+'.'+b+'.'+c+'-'+d
if result[0][2] != None:
dataAberta = str(result[0][2])
dataAberta = dataAberta.split()[0]
horaAberta = str(result[0][2])
horaAberta = horaAberta.split()[1]
if result[0][3] != None:
dataInicial = str(result[0][3])
dataInicial = dataInicial.split()[0]
horaInicial = str(result[0][3])
horaInicial = horaInicial.split()[1]
else:
dataInicial = ' ** '
horaInicial = ' ** '
if result[0][4] != None:
dataFinal = str(result[0][4])
dataFinal = dataFinal.split()[0]
horaFinal = str(result[0][4])
horaFinal = horaFinal.split()[1]
else:
dataFinal = ' ** '
horaFinal = ' ** '
#Criando mini janela para exibir detalhes das operações
tela = Toplevel()
tela.title('Detalhes da operação')
tela['bg'] = 'white'
self.centraliza_tela(400, 400, tela)
def sair(event):
tela.destroy()
tela.bind('<Escape>', sair)
lbl = Label(tela, text='Nome:', font=('arial', 10), fg='black', bg='white')
lbl.place(relx=0.020, rely=0.020)
lbl = Label(tela, text=nome, font=('arial', 10), border=0, relief=GROOVE, bg='white')
lbl.place(relx=0.200, rely=0.025)
lbl = Label(tela, text='CPF:', font=('arial', 10), fg='black', bg='white')
lbl.place(relx=0.020, rely=0.080)
lbl = Label(tela, text=cpf, font=('arial', 10), border=0, relief=GROOVE, bg='white')
lbl.place(relx=0.200, rely=0.085)
canvas = Canvas(tela, bg='#e1e1e1')
canvas.place(relx=0.020, rely=0.160, relheight=0.005)
lbl = Label(tela, text='Aberta às '+horaAberta+' no dia '+dataAberta, font=('arial', 10), bg='white')
lbl.place(relx=0.50, rely=0.220, anchor='center')
lbl = Label(tela, text='Iniciada às '+horaInicial+' no dia '+dataInicial, font=('arial', 10), bg='white')
lbl.place(relx=0.50, rely=0.290, anchor='center')
lbl = Label(tela, text='Finalizada às '+horaFinal+' no dia '+dataFinal, font=('arial', 10), bg='white')
lbl.place(relx=0.50, rely=0.360, anchor='center')
canvas = Canvas(tela, bg='#e1e1e1')
canvas.place(relx=0.020, rely=0.420, relheight=0.005)
frame = LabelFrame(tela, text='Observação', bg='white', highlightbackground='#e1e1e1', highlightcolor='#e1e1e1')
frame.place(relx=0.050, rely=0.580, relwidth=0.900, relheight=0.300)
tela.transient(self.janelaDetalhesOS)
tela.focus_force()
tela.grab_set()
tela.mainloop()
def exibir_info__pausa(self, event):
try:
#Pegando o Número de O.S que foi selecionada ao abrir janela
selecionada = self.viewPausas.selection()[0]
pegarID = self.viewPausas.item(selecionada, "values")
pegarID = pegarID[0]
except: return ''
self.exibir_info_tempo_horas('')
tela = Toplevel()
#Chamando função para centralizar tela
self.centraliza_tela(400, 400, tela)
def sair(event):
tela.destroy()
tela.bind('<Escape>', sair)
try:
self.cursor.execute('select Operador, CPF, DataPause, DataRetomada from pausas where id = '+pegarID)
detalhes = self.cursor.fetchall()
nome = detalhes[0][0]
cpf = detalhes[0][1]
a = cpf[0:3]
b = cpf[3:6]
c = cpf[6:9]
d = cpf[9:]
cpf = a+'.'+b+'.'+c+'-'+d
horaPause = str(detalhes[0][2])
horaPause = horaPause.split()[1]
dataPause = str(detalhes[0][2])
dataPause = dataPause.split()[0]
horaRetomada = str(detalhes[0][3])
horaRetomada = horaRetomada.split()[1]
dataRetomada = str(detalhes[0][3])
dataRetomada = dataRetomada.split()[0]
except:
nome = ''
cpf = ''
horaRetomada = ''
dataRetomada = ''
lbl = Label(tela, text='Nome', font=('arial', 12, 'bold'), fg='#344f84')
lbl.place(relx=0.020, rely=0.020)
lbl = Label(tela, text=nome, font=('arial', 10), border=2, relief=GROOVE)
lbl.place(relx=0.200, rely=0.020, relwidth=0.700)
lbl = Label(tela, text='CPF', font=('arial', 12, 'bold'), fg='#344f84')
lbl.place(relx=0.020, rely=0.080)
lbl = Label(tela, text=cpf, font=('arial', 10), border=2, relief=GROOVE)
lbl.place(relx=0.200, rely=0.080, relwidth=0.400)
frame = LabelFrame(tela, text='Data', fg='#344f84')
frame.place(relx=0.050, rely=0.200, relwidth=0.900, relheight=0.170)
lbl = Label(frame, text='Hora Pause', font=('arial', 10, 'bold'), fg='#344f84')
lbl.place(relx=0.040, rely=0)
lbl = Label(frame, text=horaPause, font=('arial', 10), border=0, relief=GROOVE)
lbl.place(relx=0.310, rely=0.050, relwidth=0.170)
lbl = Label(frame, text='Data Pause', font=('arial', 10, 'bold'), fg='#344f84')
lbl.place(relx=0.510, rely=0)
lbl = Label(frame, text=dataPause, font=('arial', 10), border=0, relief=GROOVE)
lbl.place(relx=0.780, rely=0.050, relwidth=0.200)
lbl = Label(frame, text='Hora Retomada', font=('arial', 10, 'bold'), fg='#344f84')
lbl.place(relx=0.010, rely=0.450)
lbl = Label(frame, text=horaRetomada, font=('arial', 10), border=0, relief=GROOVE)
lbl.place(relx=0.310, rely=0.470, relwidth=0.170)
lbl = Label(frame, text='Data Retomada', font=('arial', 10, 'bold'), fg='#344f84')
lbl.place(relx=0.480, rely=0.450)
lbl = Label(frame, text=dataRetomada, font=('arial', 10), border=0, relief=GROOVE)
lbl.place(relx=0.780, rely=0.470, relwidth=0.200)
frame = LabelFrame(tela, text='Observação', fg='#344f84')
frame.place(relx=0.050, rely=0.380, relwidth=0.900, relheight=0.300)
tela.transient(self.janelaDetalhesOS)
tela.focus_force()
tela.grab_set()
tela.mainloop()
instancia = Application() | 45.353036 | 1,151 | 0.563743 |
acdf2736bc8c52c8b7d042c39713a3c14db97aef | 37,652 | py | Python | paramak/shape.py | PullRequest-Agent/paramak | f807d88098458d1975dd1a4a47dcea22d9f74785 | [
"MIT"
] | null | null | null | paramak/shape.py | PullRequest-Agent/paramak | f807d88098458d1975dd1a4a47dcea22d9f74785 | [
"MIT"
] | null | null | null | paramak/shape.py | PullRequest-Agent/paramak | f807d88098458d1975dd1a4a47dcea22d9f74785 | [
"MIT"
] | null | null | null |
import json
import numbers
import warnings
from collections import Iterable
from os import fdopen, remove
from pathlib import Path
from shutil import copymode, move
from tempfile import mkstemp
import cadquery as cq
import matplotlib.pyplot as plt
import plotly.graph_objects as go
from cadquery import exporters
from matplotlib.collections import PatchCollection
from matplotlib.patches import Polygon
from paramak.utils import cut_solid, get_hash, intersect_solid, union_solid
class Shape:
"""A shape object that represents a 3d volume and can have materials and
neutronics tallies assigned. Shape objects are not intended to be used
directly bly the user but provide basic functionality for user-facing
classes that inherit from Shape.
Args:
points (list of (float, float, float), optional): the x, y, z
coordinates of points that make up the shape. Defaults to None.
name (str, optional): the name of the shape, used in the graph legend
by export_html. Defaults to None.
color (RGB or RGBA, sequences of 3 or 4 floats, respectively, each in
the range 0-1, optional): the color to use when exporting as html
graphs or png images. Defaults to (0.5, 0.5, 0.5).
material_tag (str, optional): the material name to use when exporting
the neutronics description. Defaults to None.
stp_filename (str, optional): the filename used when saving stp files.
Defaults to None.
stl_filename (str, optional): the filename used when saving stl files.
Defaults to None.
azimuth_placement_angle (iterable of floats or float, optional): the
azimuth angle(s) used when positioning the shape. If a list of
angles is provided, the shape is duplicated at all angles.
Defaults to 0.0.
workplane (str, optional): the orientation of the Cadquery workplane.
(XY, YZ or XZ). Defaults to "XZ".
rotation_axis (str or list, optional): rotation axis around which the
solid is rotated. If None, the rotation axis will depend on the
workplane or path_workplane if applicable. Can be set to "X", "-Y",
"Z", etc. A custom axis can be set by setting a list of two XYZ
floats. Defaults to None.
tet_mesh (str, optional): If not None, a tet mesh flag will be added to
the neutronics description output. Defaults to None.
physical_groups (dict, optional): contains information on physical
groups (volumes and surfaces). Defaults to None.
cut (paramak.shape or list, optional): If set, the current solid will
be cut with the provided solid or iterable in cut. Defaults to
None.
intersect (paramak.shape or list, optional): If set, the current solid
will be interested with the provided solid or iterable of solids.
Defaults to None.
union (paramak.shape or list, optional): If set, the current solid
will be united with the provided solid or iterable of solids.
Defaults to None.
"""
def __init__(
self,
points=None,
connection_type="mixed",
name=None,
color=(0.5, 0.5, 0.5),
material_tag=None,
stp_filename=None,
stl_filename=None,
azimuth_placement_angle=0.0,
workplane="XZ",
rotation_axis=None,
tet_mesh=None,
physical_groups=None,
cut=None,
intersect=None,
union=None
):
self.connection_type = connection_type
self.points = points
self.stp_filename = stp_filename
self.stl_filename = stl_filename
self.color = color
self.name = name
self.cut = cut
self.intersect = intersect
self.union = union
self.azimuth_placement_angle = azimuth_placement_angle
self.workplane = workplane
self.rotation_axis = rotation_axis
# neutronics specific properties
self.material_tag = material_tag
self.tet_mesh = tet_mesh
self.physical_groups = physical_groups
# properties calculated internally by the class
self.solid = None
self.render_mesh = None
# self.volume = None
self.hash_value = None
self.points_hash_value = None
self.x_min = None
self.x_max = None
self.z_min = None
self.z_max = None
@property
def solid(self):
"""The CadQuery solid of the 3d object. Returns a CadQuery workplane
or CadQuery Compound"""
ignored_keys = ["_solid", "_hash_value"]
if get_hash(self, ignored_keys) != self.hash_value:
self.create_solid()
self.hash_value = get_hash(self, ignored_keys)
return self._solid
@solid.setter
def solid(self, value):
self._solid = value
@property
def cut(self):
return self._cut
@cut.setter
def cut(self, value):
self._cut = value
@property
def intersect(self):
return self._intersect
@intersect.setter
def intersect(self, value):
self._intersect = value
@property
def union(self):
return self._union
@union.setter
def union(self, value):
self._union = value
@property
def workplane(self):
return self._workplane
@workplane.setter
def workplane(self, value):
acceptable_values = ["XY", "YZ", "XZ", "YX", "ZY", "ZX"]
if value in acceptable_values:
self._workplane = value
else:
raise ValueError(
"Shape.workplane must be one of ",
acceptable_values,
" not ",
value)
@property
def rotation_axis(self):
return self._rotation_axis
@rotation_axis.setter
def rotation_axis(self, value):
if isinstance(value, str):
acceptable_values = \
["X", "Y", "Z", "-X", "-Y", "-Z", "+X", "+Y", "+Z"]
if value not in acceptable_values:
msg = "Shape.rotation_axis must be one of " + \
" ".join(acceptable_values) + \
" not " + value
raise ValueError(msg)
elif isinstance(value, Iterable):
msg = "Shape.rotation_axis must be a list of two (X, Y, Z) floats"
if len(value) != 2:
raise ValueError(msg)
for point in value:
if not isinstance(point, tuple):
raise ValueError(msg)
if len(point) != 3:
raise ValueError(msg)
for val in point:
if not isinstance(val, (int, float)):
raise ValueError(msg)
if value[0] == value[1]:
msg = "The two points must be different"
raise ValueError(msg)
elif value is not None:
msg = "Shape.rotation_axis must be a list or a string or None"
raise ValueError(msg)
self._rotation_axis = value
@property
def volume(self):
"""Get the total volume of the Shape. Returns a float"""
if isinstance(self.solid, cq.Compound):
return self.solid.Volume()
return self.solid.val().Volume()
@property
def volumes(self):
"""Get the volumes of the Shape. Compound shapes provide a seperate
volume value for each entry. Returns a list of floats"""
all_volumes = []
if isinstance(self.solid, cq.Compound):
for solid in self.solid.Solids():
all_volumes.append(solid.Volume())
return all_volumes
else:
return [self.solid.val().Volume()]
@property
def area(self):
"""Get the total surface area of the Shape. Returns a float"""
if isinstance(self.solid, cq.Compound):
return self.solid.Area()
else:
return self.solid.val().Area()
@property
def areas(self):
"""Get the surface areas of the Shape. Compound shapes provide a
seperate area value for each entry. Returns a list of floats"""
all_areas = []
if isinstance(self.solid, cq.Compound):
for face in self.solid.Faces():
all_areas.append(face.Area())
return all_areas
else:
for face in self.solid.val().Faces():
all_areas.append(face.Area())
return all_areas
@property
def hash_value(self):
return self._hash_value
@hash_value.setter
def hash_value(self, value):
self._hash_value = value
@property
def points_hash_value(self):
return self._points_hash_value
@points_hash_value.setter
def points_hash_value(self, value):
self._points_hash_value = value
@property
def color(self):
return self._color
@color.setter
def color(self, value):
error = False
if isinstance(value, (list, tuple)):
if len(value) in [3, 4]:
for i in value:
if not isinstance(i, (int, float)):
error = True
else:
error = True
else:
error = True
# raise error
if error:
raise ValueError(
"Shape.color must be a list or tuple of 3 or 4 floats")
self._color = value
@property
def material_tag(self):
"""The material_tag assigned to the Shape. Used when taging materials
for use in neutronics descriptions"""
return self._material_tag
@material_tag.setter
def material_tag(self, value):
if value is None:
self._material_tag = value
elif isinstance(value, str):
if len(value) > 27:
msg = "Shape.material_tag > 28 characters." + \
"Use with DAGMC will be affected." + str(value)
warnings.warn(msg, UserWarning)
self._material_tag = value
else:
raise ValueError("Shape.material_tag must be a string", value)
@property
def tet_mesh(self):
return self._tet_mesh
@tet_mesh.setter
def tet_mesh(self, value):
if value is not None and not isinstance(value, str):
raise ValueError("Shape.tet_mesh must be a string", value)
self._tet_mesh = value
@property
def name(self):
"""The name of the Shape, used to identify Shapes when exporting_html
"""
return self._name
@name.setter
def name(self, value):
if value is not None and not isinstance(value, str):
raise ValueError("Shape.name must be a string", value)
self._name = value
@property
def points(self):
"""Sets the Shape.point attributes.
Args:
points (a list of lists or tuples): list of points that create the
shape
Raises:
incorrect type: only list of lists or tuples are accepted
"""
ignored_keys = ["_points", "_points_hash_value"]
if hasattr(self, 'find_points') and \
self.points_hash_value != get_hash(self, ignored_keys):
self.find_points()
self.points_hash_value = get_hash(self, ignored_keys)
return self._points
@points.setter
def points(self, values):
if values is not None:
if not isinstance(values, list):
raise ValueError("points must be a list")
for value in values:
if type(value) not in [list, tuple]:
msg = "individual points must be a list or a tuple." + \
"{} in of type {}".format(value, type(value))
raise ValueError(msg)
for value in values:
# Checks that the length of each tuple in points is 2 or 3
if len(value) not in [2, 3]:
msg = "individual points contain 2 or 3 entries {} has a \
length of {}".format(value, len(values[0]))
raise ValueError(msg)
# Checks that the XY points are numbers
if not isinstance(value[0], numbers.Number):
msg = "The first value in the tuples that make \
up the points represents the X value \
and must be a number {}".format(value)
raise ValueError(msg)
if not isinstance(value[1], numbers.Number):
msg = "The second value in the tuples that make \
up the points represents the X value \
and must be a number {}".format(value)
raise ValueError(msg)
# Checks that only straight and spline are in the connections
# part of points
if len(value) == 3:
if value[2] not in ["straight", "spline", "circle"]:
msg = 'individual connections must be either \
"straight", "circle" or "spline"'
raise ValueError(msg)
# checks that the entries in the points are either all 2 long or
# all 3 long, not a mixture
if not all(len(entry) == 2 for entry in values):
if not all(len(entry) == 3 for entry in values):
msg = "The points list should contain entries of length 2 \
or 3 but not a mixture of 2 and 3"
raise ValueError(msg)
if len(values) > 1:
if values[0][:2] == values[-1][:2]:
msg = "The coordinates of the last and first points are \
the same."
raise ValueError(msg)
values.append(values[0])
if self.connection_type != "mixed":
values = [(*p, self.connection_type) for p in values]
self._points = values
@property
def stp_filename(self):
"""Sets the Shape.stp_filename attribute which is used as the filename
when exporting the geometry to stp format. Note, .stp will be added to
filenames not ending with .step or .stp.
Args:
value (str): the value to use as the stp_filename
Raises:
incorrect type: only str values are accepted
"""
return self._stp_filename
@stp_filename.setter
def stp_filename(self, value):
if value is not None:
if isinstance(value, str):
if Path(value).suffix not in [".stp", ".step"]:
msg = "Incorrect filename ending, filename must end with \
.stp or .step"
raise ValueError(msg)
else:
msg = "stp_filename must be a \
string {} {}".format(value, type(value))
raise ValueError(msg)
self._stp_filename = value
@property
def stl_filename(self):
"""Sets the Shape.stl_filename attribute which is used as the filename
when exporting the geometry to stl format. Note .stl will be added to
filenames not ending with .stl
Args:
value (str): the value to use as the stl_filename
Raises:
incorrect type: only str values are accepted
"""
return self._stl_filename
@stl_filename.setter
def stl_filename(self, value):
if value is not None:
if isinstance(value, str):
if Path(value).suffix != ".stl":
msg = "Incorrect filename ending, filename must end with \
.stl"
raise ValueError(msg)
else:
msg = "stl_filename must be a string \
{} {}".format(value, type(value))
raise ValueError(msg)
self._stl_filename = value
@property
def azimuth_placement_angle(self):
return self._azimuth_placement_angle
@azimuth_placement_angle.setter
def azimuth_placement_angle(self, value):
error = False
if isinstance(value, (int, float, Iterable)) and \
not isinstance(value, str):
if isinstance(value, Iterable):
for i in value:
if not isinstance(i, (int, float)):
error = True
else:
error = True
if error:
msg = "azimuth_placement_angle must be a float or list of floats"
raise ValueError(msg)
self._azimuth_placement_angle = value
def create_solid(self):
solid = None
if self.points is not None:
# obtains the first two values of the points list
XZ_points = [(p[0], p[1]) for p in self.points]
# obtains the last values of the points list
connections = [p[2] for p in self.points[:-1]]
current_linetype = connections[0]
current_points_list = []
instructions = []
# groups together common connection types
for i, c in enumerate(connections):
if c == current_linetype:
current_points_list.append(XZ_points[i])
else:
current_points_list.append(XZ_points[i])
instructions.append(
{current_linetype: current_points_list})
current_linetype = c
current_points_list = [XZ_points[i]]
instructions.append({current_linetype: current_points_list})
if list(instructions[-1].values())[0][-1] != XZ_points[0]:
keyname = list(instructions[-1].keys())[0]
instructions[-1][keyname].append(XZ_points[0])
if hasattr(self, "path_points"):
factor = 1
if self.workplane in ["XZ", "YX", "ZY"]:
factor *= -1
solid = cq.Workplane(self.workplane).moveTo(0, 0)
if self.force_cross_section:
for point in self.path_points[:-1]:
solid = solid.workplane(
offset=point[1] *
factor).moveTo(
point[0],
0).workplane()
for entry in instructions:
if list(entry.keys())[0] == "spline":
solid = solid.spline(
listOfXYTuple=list(entry.values())[0])
if list(entry.keys())[0] == "straight":
solid = solid.polyline(list(entry.values())[0])
if list(entry.keys())[0] == "circle":
p0 = list(entry.values())[0][0]
p1 = list(entry.values())[0][1]
p2 = list(entry.values())[0][2]
solid = solid.moveTo(
p0[0], p0[1]).threePointArc(
p1, p2)
solid = solid.close().moveTo(
0, 0).moveTo(-point[0], 0).workplane(offset=-point[1] * factor)
elif self.force_cross_section == False:
solid = solid.workplane(
offset=self.path_points[0][1] *
factor).moveTo(
self.path_points[0][0],
0).workplane()
for entry in instructions:
if list(entry.keys())[0] == "spline":
solid = solid.spline(
listOfXYTuple=list(entry.values())[0])
if list(entry.keys())[0] == "straight":
solid = solid.polyline(list(entry.values())[0])
if list(entry.keys())[0] == "circle":
p0 = list(entry.values())[0][0]
p1 = list(entry.values())[0][1]
p2 = list(entry.values())[0][2]
solid = solid.moveTo(
p0[0], p0[1]).threePointArc(
p1, p2)
solid = solid.close().moveTo(0,
0).moveTo(-self.path_points[0][0],
0).workplane(offset=-self.path_points[0][1] * factor)
solid = solid.workplane(
offset=self.path_points[-1][1] * factor).moveTo(self.path_points[-1][0], 0).workplane()
else:
# for rotate and extrude shapes
solid = cq.Workplane(self.workplane)
# for extrude shapes
if hasattr(self, "extrusion_start_offset"):
extrusion_offset = -self.extrusion_start_offset
solid = solid.workplane(offset=extrusion_offset)
for entry in instructions:
if list(entry.keys())[0] == "spline":
solid = solid.spline(listOfXYTuple=list(entry.values())[0])
if list(entry.keys())[0] == "straight":
solid = solid.polyline(list(entry.values())[0])
if list(entry.keys())[0] == "circle":
p0 = list(entry.values())[0][0]
p1 = list(entry.values())[0][1]
p2 = list(entry.values())[0][2]
solid = solid.moveTo(p0[0], p0[1]).threePointArc(p1, p2)
return solid
def rotate_solid(self, solid):
# Checks if the azimuth_placement_angle is a list of angles
if isinstance(self.azimuth_placement_angle, Iterable):
azimuth_placement_angles = self.azimuth_placement_angle
else:
azimuth_placement_angles = [self.azimuth_placement_angle]
rotated_solids = []
# Perform seperate rotations for each angle
for angle in azimuth_placement_angles:
rotated_solids.append(
solid.rotate(
*self.get_rotation_axis()[0], angle))
solid = cq.Workplane(self.workplane)
# Joins the seperate solids together
for i in rotated_solids:
solid = solid.union(i)
return solid
def get_rotation_axis(self):
"""Returns the rotation axis for a given shape. If self.rotation_axis
is None, the rotation axis will be computed from self.workplane (or
from self.path_workplane if applicable). If self.rotation_axis is an
acceptable string (eg. "X", "+Y", "-Z"...) then this axis will be used.
If self.rotation_axis is a list of two points, then these two points
will be used to form an axis.
Returns:
list, str: list of two XYZ points and the string of the axis (eg.
"X", "Y"..)
"""
rotation_axis = {
"X": [(-1, 0, 0), (1, 0, 0)],
"-X": [(1, 0, 0), (-1, 0, 0)],
"Y": [(0, -1, 0), (0, 1, 0)],
"-Y": [(0, 1, 0), (0, -1, 0)],
"Z": [(0, 0, -1), (0, 0, 1)],
"-Z": [(0, 0, 1), (0, 0, -1)],
}
if isinstance(self.rotation_axis, str):
# X, Y or Z axis
return (
rotation_axis[self.rotation_axis.replace("+", "")],
self.rotation_axis
)
elif isinstance(self.rotation_axis, Iterable):
# Custom axis
return self.rotation_axis, "custom_axis"
elif self.rotation_axis is None:
# Axis from workplane or path_workplane
if hasattr(self, "path_workplane"):
# compute from path_workplane instead
workplane = self.path_workplane
else:
workplane = self.workplane
return rotation_axis[workplane[1]], workplane[1]
def create_limits(self):
"""Finds the x,y,z limits (min and max) of the points that make up the
face of the shape. Note the Shape may extend beyond this boundary if
splines are used to connect points.
Raises:
ValueError: if no points are defined
Returns:
float, float, float, float, float, float: x_minimum, x_maximum,
y_minimum, y_maximum, z_minimum, z_maximum
"""
if hasattr(self, "find_points"):
self.find_points()
if self.points is None:
raise ValueError("No points defined for", self)
self.x_min = float(min([row[0] for row in self.points]))
self.x_max = float(max([row[0] for row in self.points]))
self.z_min = float(min([row[1] for row in self.points]))
self.z_max = float(max([row[1] for row in self.points]))
return self.x_min, self.x_max, self.z_min, self.z_max
def export_stl(self, filename, tolerance=0.001):
"""Exports an stl file for the Shape.solid. If the provided filename
doesn't end with .stl it will be added
Args:
filename (str): the filename of the stl file to be exported
tolerance (float): the precision of the faceting
"""
path_filename = Path(filename)
if path_filename.suffix != ".stl":
path_filename = path_filename.with_suffix(".stl")
path_filename.parents[0].mkdir(parents=True, exist_ok=True)
with open(path_filename, "w") as f:
exporters.exportShape(self.solid, "STL", f, tolerance)
print("Saved file as ", path_filename)
return str(path_filename)
def export_stp(self, filename=None, units='mm'):
"""Exports an stp file for the Shape.solid. If the filename provided
doesn't end with .stp or .step then .stp will be added. If a
filename is not provided and the shape's stp_filename property is
not None the stp_filename will be used as the export filename.
Args:
filename (str): the filename of the stp
units (str): the units of the stp file, options are 'cm' or 'mm'.
Default is mm.
"""
if filename is not None:
path_filename = Path(filename)
if path_filename.suffix == ".stp" or path_filename.suffix == ".step":
pass
else:
path_filename = path_filename.with_suffix(".stp")
path_filename.parents[0].mkdir(parents=True, exist_ok=True)
elif self.stp_filename is not None:
path_filename = Path(self.stp_filename)
with open(path_filename, "w") as f:
exporters.exportShape(self.solid, "STEP", f)
if units == 'cm':
self._replace(
path_filename,
'SI_UNIT(.MILLI.,.METRE.)',
'SI_UNIT(.CENTI.,.METRE.)')
print("Saved file as ", path_filename)
return str(path_filename)
def export_physical_groups(self, filename):
"""Exports a JSON file containing a look up table which is useful for
identifying faces and volumes. If filename provided doesn't end with
.json then .json will be added.
Args:
filename (str): the filename used to save the json file
"""
path_filename = Path(filename)
if path_filename.suffix != ".json":
path_filename = path_filename.with_suffix(".json")
path_filename.parents[0].mkdir(parents=True, exist_ok=True)
if self.physical_groups is not None:
with open(filename, "w") as outfile:
json.dump(self.physical_groups, outfile, indent=4)
print("Saved physical_groups description to ", path_filename)
else:
print(
"Warning: physical_groups attribute is None \
for {}".format(
self.name
)
)
return filename
def export_svg(self, filename):
"""Exports an svg file for the Shape.solid. If the provided filename
doesn't end with .svg it will be added.
Args:
filename (str): the filename of the svg file to be exported
"""
path_filename = Path(filename)
if path_filename.suffix != ".svg":
path_filename = path_filename.with_suffix(".svg")
path_filename.parents[0].mkdir(parents=True, exist_ok=True)
with open(path_filename, "w") as f:
exporters.exportShape(self.solid, "SVG", f)
print("Saved file as ", path_filename)
return str(path_filename)
def export_html(self, filename):
"""Creates a html graph representation of the points and connections
for the Shape object. Shapes are colored by their .color property.
Shapes are also labelled by their .name. If filename provided doesn't
end with .html then .html will be added.
Args:
filename (str): the filename used to save the html graph
Returns:
plotly.Figure(): figure object
"""
if self.__class__.__name__ == "SweepCircleShape":
msg = 'WARNING: export_html will plot path_points for ' + \
'the SweepCircleShape class'
print(msg)
if self.points is None:
raise ValueError("No points defined for", self)
Path(filename).parents[0].mkdir(parents=True, exist_ok=True)
path_filename = Path(filename)
if path_filename.suffix != ".html":
path_filename = path_filename.with_suffix(".html")
fig = go.Figure()
fig.update_layout(
{"title": "coordinates of components", "hovermode": "closest"}
)
fig.add_trace(self._trace())
fig.write_html(str(path_filename))
print("Exported html graph to ", path_filename)
return fig
def _trace(self):
"""Creates a plotly trace representation of the points of the Shape
object. This method is intended for internal use by Shape.export_html.
Returns:
plotly trace: trace object
"""
color_list = [i * 255 for i in self.color]
if len(color_list) == 3:
color = "rgb(" + str(color_list).strip("[]") + ")"
elif len(color_list) == 4:
color = "rgba(" + str(color_list).strip("[]") + ")"
if self.name is None:
name = "Shape not named"
else:
name = self.name
text_values = []
for i, p in enumerate(self.points[:-1]):
if len(p) == 3:
text_values.append(
"point number="
+ str(i)
+ "<br>"
+ "connection to next point="
+ str(p[2])
+ "<br>"
+ "x="
+ str(p[0])
+ "<br>"
+ "z="
+ str(p[1])
+ "<br>"
)
else:
text_values.append(
"point number="
+ str(i)
+ "<br>"
+ "x="
+ str(p[0])
+ "<br>"
+ "z="
+ str(p[1])
+ "<br>"
)
trace = go.Scatter(
{
"x": [row[0] for row in self.points],
"y": [row[1] for row in self.points],
"hoverinfo": "text",
"text": text_values,
"mode": "markers+lines",
"marker": {"size": 5, "color": color},
"name": name,
}
)
return trace
def export_2d_image(
self, filename, xmin=0., xmax=900., ymin=-600., ymax=600.):
"""Exports a 2d image (png) of the reactor. Components are colored by
their Shape.color property. If filename provided doesn't end with .png
then .png will be added.
Args:
filename (str): the filename of the saved png image.
xmin (float, optional): the minimum x value of the x axis.
Defaults to 0..
xmax (float, optional): the maximum x value of the x axis.
Defaults to 900..
ymin (float, optional): the minimum y value of the y axis.
Defaults to -600..
ymax (float, optional): the maximum y value of the y axis.
Defaults to 600..
Returns:
matplotlib.plt(): a plt object
"""
fig, ax = plt.subplots()
patch = self._create_patch()
ax.add_collection(patch)
ax.axis("equal")
ax.set(xlim=(xmin, xmax), ylim=(ymin, ymax))
ax.set_aspect("equal", "box")
plt.savefig(filename, dpi=100)
plt.close()
print("\n saved 2d image to ", filename)
return plt
def _create_patch(self):
"""Creates a matplotlib polygon patch from the Shape points. This is
used when making 2d images of the Shape object.
Raises:
ValueError: No points defined for the Shape
Returns:
Matplotlib object patch: a plotable polygon shape
"""
if self.points is None:
raise ValueError("No points defined for", self)
patches = []
xylist = []
for point in self.points:
xylist.append([point[0], point[1]])
polygon = Polygon(xylist, closed=True)
patches.append(polygon)
patch = PatchCollection(patches)
if self.color is not None:
patch.set_facecolor(self.color)
patch.set_color(self.color)
patch.color = self.color
patch.edgecolor = self.color
# checks to see if an alpha value is provided in the color
if len(self.color) == 4:
patch.set_alpha = self.color[-1]
self.patch = patch
return patch
def neutronics_description(self):
"""Returns a neutronics description of the Shape object. This is needed
for the use with automated neutronics model methods which require
linkage between the stp files and materials. If tet meshing of the
volume is required then Trelis meshing commands can be optionally
specified as the tet_mesh argument.
Returns:
dictionary: a dictionary of the step filename and material name
"""
neutronics_description = {"material": self.material_tag}
if self.stp_filename is not None:
neutronics_description["stp_filename"] = self.stp_filename
# this is needed as ppp looks for the filename key
neutronics_description["filename"] = self.stp_filename
if self.tet_mesh is not None:
neutronics_description["tet_mesh"] = self.tet_mesh
if self.stl_filename is not None:
neutronics_description["stl_filename"] = self.stl_filename
return neutronics_description
def perform_boolean_operations(self, solid, **kwargs):
"""Performs boolean cut, intersect and union operations if shapes are
provided"""
# If a cut solid is provided then perform a boolean cut
if self.cut is not None:
solid = cut_solid(solid, self.cut)
# If a wedge cut is provided then perform a boolean cut
# Performed independantly to avoid use of self.cut
# Prevents repetition of 'outdated' wedge cuts
if 'wedge_cut' in kwargs:
if kwargs['wedge_cut'] is not None:
solid = cut_solid(solid, kwargs['wedge_cut'])
# If an intersect is provided then perform a boolean intersect
if self.intersect is not None:
solid = intersect_solid(solid, self.intersect)
# If an intersect is provided then perform a boolean intersect
if self.union is not None:
solid = union_solid(solid, self.union)
return solid
def _replace(self, filename, pattern, subst):
"""Opens a file and replaces occurances of a particular string
(pattern)with a new string (subst) and overwrites the file.
Used internally within the paramak to ensure .STP files are
in units of cm not the default mm.
Args:
filename (str): the filename of the file to edit
pattern (str): the string that should be removed
subst (str): the string that should be used in the place of the
pattern string
"""
# Create temp file
fh, abs_path = mkstemp()
with fdopen(fh, 'w') as new_file:
with open(filename) as old_file:
for line in old_file:
new_file.write(line.replace(pattern, subst))
# Copy the file permissions from the old file to the new file
copymode(filename, abs_path)
# Remove original file
remove(filename)
# Move new file
move(abs_path, filename)
| 36.030622 | 112 | 0.546266 |
acdf2902ec17853ba6f0d79e02e50dde4a0912f1 | 2,613 | py | Python | tad/cmds/tad.py | randywessels/tad-blockchain | 08a5f9565aa27f211350717d5e8cda14b46359e4 | [
"Apache-2.0"
] | null | null | null | tad/cmds/tad.py | randywessels/tad-blockchain | 08a5f9565aa27f211350717d5e8cda14b46359e4 | [
"Apache-2.0"
] | null | null | null | tad/cmds/tad.py | randywessels/tad-blockchain | 08a5f9565aa27f211350717d5e8cda14b46359e4 | [
"Apache-2.0"
] | null | null | null | import click
from tad import __version__
from tad.cmds.configure import configure_cmd
from tad.cmds.farm import farm_cmd
from tad.cmds.init import init_cmd
from tad.cmds.keys import keys_cmd
from tad.cmds.netspace import netspace_cmd
from tad.cmds.plots import plots_cmd
from tad.cmds.show import show_cmd
from tad.cmds.start import start_cmd
from tad.cmds.stop import stop_cmd
from tad.cmds.wallet import wallet_cmd
from tad.cmds.plotnft import plotnft_cmd
from tad.util.default_root import DEFAULT_ROOT_PATH
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
def monkey_patch_click() -> None:
# this hacks around what seems to be an incompatibility between the python from `pyinstaller`
# and `click`
#
# Not 100% sure on the details, but it seems that `click` performs a check on start-up
# that `codecs.lookup(locale.getpreferredencoding()).name != 'ascii'`, and refuses to start
# if it's not. The python that comes with `pyinstaller` fails this check.
#
# This will probably cause problems with the command-line tools that use parameters that
# are not strict ascii. The real fix is likely with the `pyinstaller` python.
import click.core
click.core._verify_python3_env = lambda *args, **kwargs: 0 # type: ignore
@click.group(
help=f"\n Manage tad blockchain infrastructure ({__version__})\n",
epilog="Try 'tad start node', 'tad netspace -d 192', or 'tad show -s'",
context_settings=CONTEXT_SETTINGS,
)
@click.option("--root-path", default=DEFAULT_ROOT_PATH, help="Config file root", type=click.Path(), show_default=True)
@click.pass_context
def cli(ctx: click.Context, root_path: str) -> None:
from pathlib import Path
ctx.ensure_object(dict)
ctx.obj["root_path"] = Path(root_path)
@cli.command("version", short_help="Show tad version")
def version_cmd() -> None:
print(__version__)
@cli.command("run_daemon", short_help="Runs tad daemon")
@click.pass_context
def run_daemon_cmd(ctx: click.Context) -> None:
from tad.daemon.server import async_run_daemon
import asyncio
asyncio.get_event_loop().run_until_complete(async_run_daemon(ctx.obj["root_path"]))
cli.add_command(keys_cmd)
cli.add_command(plots_cmd)
cli.add_command(wallet_cmd)
cli.add_command(plotnft_cmd)
cli.add_command(configure_cmd)
cli.add_command(init_cmd)
cli.add_command(show_cmd)
cli.add_command(start_cmd)
cli.add_command(stop_cmd)
cli.add_command(netspace_cmd)
cli.add_command(farm_cmd)
def main() -> None:
monkey_patch_click()
cli() # pylint: disable=no-value-for-parameter
if __name__ == "__main__":
main()
| 31.107143 | 118 | 0.751244 |
acdf2a4a58c03803f7c05a8e3620d17da02e3764 | 963 | py | Python | JupyterNotebook/PythonScripts/Buy_Hold.py | Jzbonner/dataanalytics-py | 86814ba67beaa0a722d43e67f59053167f07e02a | [
"MIT"
] | null | null | null | JupyterNotebook/PythonScripts/Buy_Hold.py | Jzbonner/dataanalytics-py | 86814ba67beaa0a722d43e67f59053167f07e02a | [
"MIT"
] | 1 | 2021-09-02T17:44:02.000Z | 2021-09-02T17:44:02.000Z | JupyterNotebook/PythonScripts/Buy_Hold.py | Jzbonner/dataanalytics-py | 86814ba67beaa0a722d43e67f59053167f07e02a | [
"MIT"
] | 1 | 2021-10-05T10:26:45.000Z | 2021-10-05T10:26:45.000Z | from zipline.api import order, symbol
from zipline.finance import commission, slippage
stocks = ['AAPL', 'MSFT']
def initialize(context):
context.has_ordered = False
context.stocks = stocks
# Explicitly set the commission/slippage to the "old" value until we can
# rebuild example data.
# github.com/quantopian/zipline/blob/master/tests/resources/
# rebuild_example_data#L105
context.set_commission(commission.PerShare(cost=.0075, min_trade_cost=1.0))
context.set_slippage(slippage.VolumeShareSlippage())
def handle_data(context, data):
if not context.has_ordered:
for stock in context.stocks:
order(symbol(stock), 100)
context.has_ordered = True
def _test_args():
"""Extra arguments to use when zipline's automated tests run this example.
"""
import pandas as pd
return {
'start': pd.Timestamp('2008', tz='utc'),
'end': pd.Timestamp('2013', tz='utc'),
} | 28.323529 | 79 | 0.686397 |
acdf2b3445be7278894010c3db2e20e8296053d3 | 18,346 | py | Python | myems-api/excelexporters/storeload.py | lijinchang3/myems | ede74ec2982e610d218095904817268951d3bc06 | [
"MIT"
] | 2 | 2021-02-19T10:22:36.000Z | 2021-02-19T10:23:22.000Z | myems-api/excelexporters/storeload.py | tianlinzhong/myems | 07dd1eb8060f4145be66c8d1a20b5e064a68281b | [
"MIT"
] | null | null | null | myems-api/excelexporters/storeload.py | tianlinzhong/myems | 07dd1eb8060f4145be66c8d1a20b5e064a68281b | [
"MIT"
] | 1 | 2022-01-29T14:18:47.000Z | 2022-01-29T14:18:47.000Z | import base64
import uuid
import os
from openpyxl.chart import (
LineChart,
Reference,
)
from openpyxl.chart.label import DataLabelList
from openpyxl.styles import PatternFill, Border, Side, Alignment, Font
from openpyxl.drawing.image import Image
from openpyxl import Workbook
####################################################################################################################
# PROCEDURES
# Step 1: Validate the report data
# Step 2: Generate excel file
# Step 3: Encode the excel file bytes to Base64
####################################################################################################################
def export(report,
name,
reporting_start_datetime_local,
reporting_end_datetime_local,
period_type):
####################################################################################################################
# Step 1: Validate the report data
####################################################################################################################
if report is None:
return None
####################################################################################################################
# Step 2: Generate excel file from the report data
####################################################################################################################
filename = generate_excel(report,
name,
reporting_start_datetime_local,
reporting_end_datetime_local,
period_type)
####################################################################################################################
# Step 3: Encode the excel file to Base64
####################################################################################################################
try:
with open(filename, 'rb') as binary_file:
binary_file_data = binary_file.read()
except IOError as ex:
pass
# Base64 encode the bytes
base64_encoded_data = base64.b64encode(binary_file_data)
# get the Base64 encoded data using human-readable characters.
base64_message = base64_encoded_data.decode('utf-8')
# delete the file from server
try:
os.remove(filename)
except NotImplementedError as ex:
pass
return base64_message
def generate_excel(report,
name,
reporting_start_datetime_local,
reporting_end_datetime_local,
period_type):
wb = Workbook()
ws = wb.active
# Row height
ws.row_dimensions[1].height = 102
for i in range(2, 2000 + 1):
ws.row_dimensions[i].height = 42
# Col width
ws.column_dimensions['A'].width = 1.5
ws.column_dimensions['B'].width = 25.0
for i in range(ord('C'), ord('L')):
ws.column_dimensions[chr(i)].width = 15.0
# Font
name_font = Font(name='Constantia', size=15, bold=True)
title_font = Font(name='宋体', size=15, bold=True)
# data_font = Font(name='Franklin Gothic Book', size=11)
table_fill = PatternFill(fill_type='solid', fgColor='1F497D')
f_border = Border(left=Side(border_style='medium', color='00000000'),
right=Side(border_style='medium', color='00000000'),
bottom=Side(border_style='medium', color='00000000'),
top=Side(border_style='medium', color='00000000')
)
b_border = Border(
bottom=Side(border_style='medium', color='00000000'),
)
b_c_alignment = Alignment(vertical='bottom',
horizontal='center',
text_rotation=0,
wrap_text=False,
shrink_to_fit=False,
indent=0)
c_c_alignment = Alignment(vertical='center',
horizontal='center',
text_rotation=0,
wrap_text=True,
shrink_to_fit=False,
indent=0)
b_r_alignment = Alignment(vertical='bottom',
horizontal='right',
text_rotation=0,
wrap_text=False,
shrink_to_fit=False,
indent=0)
# Img
img = Image("excelexporters/myems.png")
# img = Image("myems.png")
img.width = img.width * 0.85
img.height = img.height * 0.85
ws.add_image(img, 'B1')
# Title
ws['B3'].font = name_font
ws['B3'].alignment = b_r_alignment
ws['B3'] = 'Name:'
ws['C3'].border = b_border
ws['C3'].alignment = b_c_alignment
ws['C3'].font = name_font
ws['C3'] = name
ws['D3'].font = name_font
ws['D3'].alignment = b_r_alignment
ws['D3'] = 'Period:'
ws['E3'].border = b_border
ws['E3'].alignment = b_c_alignment
ws['E3'].font = name_font
ws['E3'] = period_type
ws['F3'].font = name_font
ws['F3'].alignment = b_r_alignment
ws['F3'] = 'Date:'
ws['G3'].border = b_border
ws['G3'].alignment = b_c_alignment
ws['G3'].font = name_font
ws['G3'] = reporting_start_datetime_local + "__" + reporting_end_datetime_local
ws.merge_cells("G3:H3")
if "reporting_period" not in report.keys() or \
"names" not in report['reporting_period'].keys() or len(report['reporting_period']['names']) == 0:
filename = str(uuid.uuid4()) + '.xlsx'
wb.save(filename)
return filename
#################################################
# First: 统计分析
# 6: title
# 7: table title
# 8~ca_len table_data
#################################################
reporting_period_data = report['reporting_period']
has_energy_data_flag = True
if "names" not in reporting_period_data.keys() or \
reporting_period_data['names'] is None or \
len(reporting_period_data['names']) == 0:
has_energy_data_flag = False
filename = str(uuid.uuid4()) + '.xlsx'
wb.save(filename)
return filename
if has_energy_data_flag:
ws['B6'].font = title_font
ws['B6'] = name + ' 统计分析'
category = reporting_period_data['names']
# table_title
ws['B7'].fill = table_fill
ws['B7'].font = title_font
ws['B7'].alignment = c_c_alignment
ws['B7'] = '报告期'
ws['B7'].border = f_border
ws['C7'].font = title_font
ws['C7'].alignment = c_c_alignment
ws['C7'] = '平均负荷'
ws['C7'].border = f_border
ws['D7'].font = title_font
ws['D7'].alignment = c_c_alignment
ws['D7'] = '最大负荷'
ws['D7'].border = f_border
ws['E7'].font = title_font
ws['E7'].alignment = c_c_alignment
ws['E7'] = '负荷系数'
ws['E7'].border = f_border
# table_data
for i, value in enumerate(category):
row = i * 2 + 8
ws['B' + str(row)].font = name_font
ws['B' + str(row)].alignment = c_c_alignment
ws['B' + str(row)] = reporting_period_data['names'][i] + " (" + reporting_period_data['units'][i] + "/H )"
ws['B' + str(row)].border = f_border
ws['B' + str(row + 1)].font = name_font
ws['B' + str(row + 1)].alignment = c_c_alignment
ws['B' + str(row + 1)] = "环比"
ws['B' + str(row + 1)].border = f_border
ws['C' + str(row)].font = name_font
ws['C' + str(row)].alignment = c_c_alignment
ws['C' + str(row)] = round(reporting_period_data['averages'][i], 2) \
if reporting_period_data['averages'][i] is not None else ''
ws['C' + str(row)].border = f_border
ws['C' + str(row)].number_format = '0.00'
ws['C' + str(row + 1)].font = name_font
ws['C' + str(row + 1)].alignment = c_c_alignment
ws['C' + str(row + 1)] = str(round(reporting_period_data['averages_increment_rate'][i] * 100, 2)) + "%" \
if reporting_period_data['averages_increment_rate'][i] is not None else '0.00%'
ws['C' + str(row + 1)].border = f_border
ws['D' + str(row)].font = name_font
ws['D' + str(row)].alignment = c_c_alignment
ws['D' + str(row)] = round(reporting_period_data['maximums'][i], 2) \
if reporting_period_data['maximums'][i] is not None else ''
ws['D' + str(row)].border = f_border
ws['D' + str(row)].number_format = '0.00'
ws['D' + str(row + 1)].font = name_font
ws['D' + str(row + 1)].alignment = c_c_alignment
ws['D' + str(row + 1)] = str(round(reporting_period_data['maximums_increment_rate'][i] * 100, 2)) + "%" \
if reporting_period_data['maximums_increment_rate'][i] is not None else '0.00%'
ws['D' + str(row + 1)].border = f_border
ws['E' + str(row)].font = name_font
ws['E' + str(row)].alignment = c_c_alignment
ws['E' + str(row)] = round(reporting_period_data['factors'][i], 2) \
if reporting_period_data['factors'][i] is not None else ''
ws['E' + str(row)].border = f_border
ws['E' + str(row)].number_format = '0.00'
ws['E' + str(row + 1)].font = name_font
ws['E' + str(row + 1)].alignment = c_c_alignment
ws['E' + str(row + 1)] = str(round(reporting_period_data['factors_increment_rate'][i] * 100, 2)) + "%" \
if reporting_period_data['factors_increment_rate'][i] is not None else '0.00%'
ws['E' + str(row + 1)].border = f_border
#################################################
# Second: 报告期单位面积消耗
# 9 + ca_len * 2: title
# 10 + ca_len * 2: table title
# per_unit_area_start_row_number + 2 ~ per_unit_area_start_row_number + 2 + ca_len : table_data
#################################################
if has_energy_data_flag:
names = reporting_period_data['names']
ca_len = len(names)
per_unit_area_start_row_number = 9 + ca_len * 2
ws['B' + str(per_unit_area_start_row_number)].font = title_font
ws['B' + str(per_unit_area_start_row_number)] = name + ' 单位面积值' + str(report['store']['area']) + 'M²'
category = reporting_period_data['names']
# table_title
ws['B' + str(per_unit_area_start_row_number + 1)].fill = table_fill
ws['B' + str(per_unit_area_start_row_number + 1)].font = title_font
ws['B' + str(per_unit_area_start_row_number + 1)].alignment = c_c_alignment
ws['B' + str(per_unit_area_start_row_number + 1)] = '报告期'
ws['B' + str(per_unit_area_start_row_number + 1)].border = f_border
ws['C' + str(per_unit_area_start_row_number + 1)].font = title_font
ws['C' + str(per_unit_area_start_row_number + 1)].alignment = c_c_alignment
ws['C' + str(per_unit_area_start_row_number + 1)] = '平均负荷'
ws['C' + str(per_unit_area_start_row_number + 1)].border = f_border
ws['D' + str(per_unit_area_start_row_number + 1)].font = title_font
ws['D' + str(per_unit_area_start_row_number + 1)].alignment = c_c_alignment
ws['D' + str(per_unit_area_start_row_number + 1)] = '最大负荷'
ws['D' + str(per_unit_area_start_row_number + 1)].border = f_border
# table_data
for i, value in enumerate(category):
row_data = per_unit_area_start_row_number + 2 + i
ws['B' + str(row_data)].font = name_font
ws['B' + str(row_data)].alignment = c_c_alignment
ws['B' + str(row_data)] = reporting_period_data['names'][i] + " (" + reporting_period_data['units'][
i] + "/H/M²)"
ws['B' + str(row_data)].border = f_border
ws['C' + str(row_data)].font = name_font
ws['C' + str(row_data)].alignment = c_c_alignment
if reporting_period_data['averages_per_unit_area'][i] \
or reporting_period_data['averages_per_unit_area'][i] == 0:
ws['C' + str(row_data)] = round(reporting_period_data['averages_per_unit_area'][i], 2)
ws['C' + str(row_data)].border = f_border
ws['C' + str(row_data)].number_format = '0.00'
ws['D' + str(row_data)].font = name_font
ws['D' + str(row_data)].alignment = c_c_alignment
if reporting_period_data['maximums_per_unit_area'][i] \
or reporting_period_data['maximums_per_unit_area'][i] == 0:
ws['D' + str(row_data)] = round(reporting_period_data['maximums_per_unit_area'][i], 2)
ws['D' + str(row_data)].border = f_border
ws['D' + str(row_data)].number_format = '0.00'
########################################################
# Third: 详细数据
# analysis_end_row_number~ analysis_end_row_number + 6*cal_len: line
# detailed_start_row_number: table title
# detailed_start_row_number + 1~: table_data
########################################################
has_timestamps_flag = True
if "timestamps" not in reporting_period_data.keys() or \
reporting_period_data['timestamps'] is None or \
len(reporting_period_data['timestamps']) == 0:
has_timestamps_flag = False
if has_timestamps_flag:
timestamps = reporting_period_data['timestamps'][0]
names = reporting_period_data['names']
ca_len = len(names)
time_len = len(timestamps)
line_charts_row_number = 6 * ca_len
analysis_end_row_number = 12 + 3 * ca_len
detailed_start_row_number = analysis_end_row_number + line_charts_row_number + 1
ws['B' + str(detailed_start_row_number)].font = title_font
ws['B' + str(detailed_start_row_number)] = name + ' 详细数据'
# table_title
ws['B' + str(detailed_start_row_number + 1)].fill = table_fill
ws['B' + str(detailed_start_row_number + 1)].font = name_font
ws['B' + str(detailed_start_row_number + 1)].alignment = c_c_alignment
ws['B' + str(detailed_start_row_number + 1)] = "日期时间"
ws['B' + str(detailed_start_row_number + 1)].border = f_border
for i in range(0, ca_len):
col_average = chr(ord('C') + 2 * i)
col_maximum = chr(ord('D') + 2 * i)
ws[col_average + str(detailed_start_row_number + 1)].font = name_font
ws[col_average + str(detailed_start_row_number + 1)].alignment = c_c_alignment
ws[col_average + str(detailed_start_row_number + 1)] = names[i] + " 平均负荷(" + reporting_period_data['units'][
i] + "/H)"
ws[col_average + str(detailed_start_row_number + 1)].border = f_border
ws[col_maximum + str(detailed_start_row_number + 1)].font = name_font
ws[col_maximum + str(detailed_start_row_number + 1)].alignment = c_c_alignment
ws[col_maximum + str(detailed_start_row_number + 1)] = names[i] + " 最大负荷(" + reporting_period_data['units'][
i] + "/H)"
ws[col_maximum + str(detailed_start_row_number + 1)].border = f_border
# table_date
for i in range(0, time_len):
rows = i + detailed_start_row_number + 2
ws['B' + str(rows)].font = name_font
ws['B' + str(rows)].alignment = c_c_alignment
ws['B' + str(rows)] = timestamps[i]
ws['B' + str(rows)].border = f_border
for index in range(0, ca_len):
col_average = chr(ord('C') + 2 * index)
col_maximum = chr(ord('D') + 2 * index)
ws[col_average + str(rows)].font = name_font
ws[col_average + str(rows)].alignment = c_c_alignment
ws[col_average + str(rows)] = reporting_period_data['sub_averages'][index][i] \
if reporting_period_data['sub_maximums'][index] is not None else ''
ws[col_average + str(rows)].number_format = '0.00'
ws[col_average + str(rows)].border = f_border
ws[col_maximum + str(rows)].font = name_font
ws[col_maximum + str(rows)].alignment = c_c_alignment
ws[col_maximum + str(rows)] = reporting_period_data['sub_maximums'][index][i] \
if reporting_period_data['sub_maximums'][index] is not None else ''
ws[col_maximum + str(rows)].number_format = '0.00'
ws[col_maximum + str(rows)].border = f_border
########################################################
# third: LineChart
# LineChart requires data from the detailed data table in the Excel file
# so print the detailed data table first and then print LineChart
########################################################
for i in range(0, ca_len):
line = LineChart()
line.title = "报告期 最大负荷 - " + names[i] + "(" + reporting_period_data['units'][i] + ")"
line.style = 10
line.x_axis.majorTickMark = 'in'
line.y_axis.majorTickMark = 'in'
line.smooth = True
line.x_axis.crosses = 'min'
line.height = 8.25
line.width = 24
line.dLbls = DataLabelList()
line.dLbls.dLblPos = 't'
line.dLbls.showVal = True
times = Reference(ws, min_col=2, min_row=detailed_start_row_number + 2,
max_row=detailed_start_row_number + 2 + time_len)
line_data = Reference(ws, min_col=2 + 2 * (i+1), min_row=detailed_start_row_number + 1,
max_row=detailed_start_row_number + 1 + time_len)
line.add_data(line_data, titles_from_data=True)
line.set_categories(times)
ser = line.series[0]
ser.marker.symbol = "diamond"
ser.marker.size = 5
chart_col = 'B'
chart_cell = str(analysis_end_row_number + 6 * i)
ws.add_chart(line, chart_col + chart_cell)
filename = str(uuid.uuid4()) + '.xlsx'
wb.save(filename)
return filename
| 43.268868 | 120 | 0.53047 |
acdf2b3a1aa6b682e0774f9f45ee735486b5dce8 | 2,325 | py | Python | locations/spiders/juiceland.py | nbeecher/alltheplaces | f28b75ffbd7a6b09aaf80bf3a46cb563527632de | [
"MIT"
] | 297 | 2017-12-07T01:29:14.000Z | 2022-03-29T06:58:01.000Z | locations/spiders/juiceland.py | nbeecher/alltheplaces | f28b75ffbd7a6b09aaf80bf3a46cb563527632de | [
"MIT"
] | 2,770 | 2017-11-28T04:20:21.000Z | 2022-03-31T11:29:16.000Z | locations/spiders/juiceland.py | nbeecher/alltheplaces | f28b75ffbd7a6b09aaf80bf3a46cb563527632de | [
"MIT"
] | 111 | 2017-11-27T21:40:02.000Z | 2022-01-22T01:21:52.000Z | # -*- coding: utf-8 -*-
import json
import re
import scrapy
from locations.items import GeojsonPointItem
from locations.hours import OpeningHours
class JuicelandSpider(scrapy.Spider):
name = "juiceland"
item_attributes = {'brand': 'JuiceLand'}
allowed_domains = ['juiceland.com']
start_urls = [
'https://www.juiceland.com/all-locations/',
]
def parse_hours(self, hours):
opening_hours = OpeningHours()
for hour in hours:
try:
day, open_time, close_time = re.search(r'([A-Za-z]{2})\s([\d:]+)-([\d:]+)', hour).groups()
except:
continue
opening_hours.add_range(day=day, open_time=open_time, close_time=close_time, time_format='%H:%M')
return opening_hours.as_opening_hours()
def parse_store(self, response):
long = response.xpath('//script[contains(text(), "lng")]/text()').re_first(r'lng":"([\d\.\-]+)"')
lat = response.xpath('//script[contains(text(), "lng")]/text()').re_first(r'lat":"([\d\.\-]+)"')
properties = {
'ref': response.url,
'name': response.xpath('normalize-space(//*[@itemprop="name"]//text())').extract_first(),
'addr_full': response.xpath('normalize-space(//span[@itemprop="StreetAddress"]//text())').extract_first(),
'city': response.xpath('normalize-space(//span[@itemprop="addressLocality"]//text())').extract_first(),
'state': response.xpath('normalize-space(//span[@itemprop="addressRegion"]//text())').extract_first(),
'postcode': response.xpath('normalize-space(//span[@itemprop="postalCode"]//text())').extract_first(),
'country': 'US',
'phone': response.xpath('normalize-space(//span[@itemprop="telephone"]//text())').extract_first(),
'website': response.url,
'lat': lat,
'lon': long
}
hours = self.parse_hours(response.xpath('//*[@itemprop="openingHours"]/@content').extract())
if hours:
properties['opening_hours'] = hours
yield GeojsonPointItem(**properties)
def parse(self, response):
for url in response.xpath('//span[@class="store-info"]/a/@href').extract():
yield scrapy.Request(response.urljoin(url), callback=self.parse_store)
| 39.40678 | 118 | 0.59957 |
acdf2b4577c507f743d0fbac365d43e9f5fc08a6 | 15,282 | py | Python | rsvis/tools/topwindow/twhfilter.py | Tom-Hirschberger/DataVisualization | 1aec6a85e2af7ba62ba47e6ee93dc9a7d99c6221 | [
"MIT"
] | null | null | null | rsvis/tools/topwindow/twhfilter.py | Tom-Hirschberger/DataVisualization | 1aec6a85e2af7ba62ba47e6ee93dc9a7d99c6221 | [
"MIT"
] | 4 | 2020-04-28T11:53:24.000Z | 2022-03-12T00:15:30.000Z | rsvis/tools/topwindow/twhfilter.py | Tom-Hirschberger/DataVisualization | 1aec6a85e2af7ba62ba47e6ee93dc9a7d99c6221 | [
"MIT"
] | 2 | 2020-07-01T15:35:29.000Z | 2021-03-11T17:53:23.000Z | # ===========================================================================
# twhfilter.py ------------------------------------------------------------
# ===========================================================================
# import ------------------------------------------------------------------
# ---------------------------------------------------------------------------
from rsvis.utils.height import Height
from rsvis.utils import imgtools
import rsvis.utils.imgcontainer
from rsvis.tools.widgets import csbox, buttonbox, scalebox
from rsvis.tools.topwindow import tw, twhist
import cv2
import numpy as np
from tkinter import *
from tkinter import ttk
# class -------------------------------------------------------------------
# ---------------------------------------------------------------------------
class TWHFilter(twhist.TWHist):
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def __init__(
self,
parent,
**kwargs
):
# settings --------------------------------------------------------
super(TWHFilter, self).__init__(parent, **kwargs)
self.reset_dimage()
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def set_canvas(self, img, **kwargs):
"""Set the main image canvas with the image to be displayed and the corresponding histogram
"""
super(TWHFilter, self).set_canvas(img, **kwargs)
# set combobox and settingsbox for blurring parameters
self._csbox_blur = csbox.CSBox(self, cbox=[["Model"], [["Average", "Gaussian", "Median", "Bilateral Filtering"]], ["Bilateral Filtering"], ["str"]], sbox=[["Kernel Size", "Sigma", "d", "sigmaColor", "sigmaSpace"], [5, 2.3, 7, 100, 500], ["int", "float", "int", "int", "int"]], bbox=[["Blur Image", "Gradient Image"], [self.blur_image, self.gradient_image]])
self._csbox_blur.grid(row=2, column=0, rowspan=7, sticky=N+W+E+S)
# set combobox and settingsbox for edge detection parameters
self._csbox_edges = csbox.CSBox(self, bbox=[["Get Edges"], [self.get_edges]], sbox=[["Threshold I", "Threshold II", "Aperture Size"], [50, 150, 3], ["int", "int", "int"]])
self._csbox_edges.grid(row=10, column=0, rowspan=4, sticky=N+W+E+S)
# set combobox and settingsbox for thresholding parameters
self._csbox_bthreshold = csbox.CSBox(self, bbox=[["Simple Thresholding"], [self.set_threshold_img]])
self._csbox_bthreshold.grid(row=2, column=1, rowspan=1, sticky=N+W+S+E)
self._csbox_threshold = scalebox.ScaleBox(self, scbox=[["Thresh"], [[0, 255, 2, 0]], ["int"]], orient=HORIZONTAL, func=self.set_threshold_img_mask)
self._csbox_threshold.grid(row=3, column=1, rowspan=1, sticky=N+W+S+E)
self._csbox_athreshold = csbox.CSBox(self, cbox=[["adaptiveMethod"], [["Mean", "Gaussian"]], ["Gaussian"], ["str"]], sbox=[["blockSize", "C"], [5, 2], ["int", "int"]], bbox=[["Adaptive Thresholding"], [self.set_adaptive_thresholding]])
self._csbox_athreshold.grid(row=4, column=1, rowspan=3, sticky=N+W+S+E)
# set combobox and settingsbox for building difference images
self._csbox_difference = csbox.CSBox(self, bbox=[["Clear Image List", "Add Image to Image List", "Compute Difference (Image)", "Show Image List"], [self.reset_dimage, self.set_dimage, self.compute_dimage, self.show_dimage]])
self._csbox_difference.grid(row=7, column=1, rowspan=4, sticky=N+W+S+E)
# set combobox and settingsbox for hough transformation
self._csbox_hough = csbox.CSBox(self, bbox=[["Hough Transform"], [self.get_hough_transform]], sbox=[["Threshold", "Minimum Line Length","Maximum Line Gap"], [40, 40, 40], ["int", "int", "int"]])
self._csbox_hough.grid(row=14, column=0, rowspan=4, sticky=N+W+S+E)
self._button_quit.grid(row=18, column=0, columnspan=3, sticky=W+E)
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def reset_dimage(self, event=None):
"""Reset list of difference images
"""
self._dimage = list()
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def set_dimage(self, event=None):
"""Append list of difference images with the currently displayed image
"""
self._dimage.append(self.get_obj().get_img(show=True))
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def show_dimage(self, event=None):
"""Show list of difference images in a own topwindow
"""
if not len(self._dimage):
raise IndexError("There are no images available.")
# open a topwindow with images used for building the difference
tw.TopWindow(self, title="Difference of images", dtype="img", value=self._dimage)
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def compute_dimage(self, event=None):
"""Compute the difference image of the currently images in 'd_image'
"""
# continue if two images are provided
if len(self._dimage)<2:
raise IndexError("There are not enough images available to compute the difference.")
# compute the difference image of the currently images in 'd_image'
img = np.absolute(imgtools.gray_image(self._dimage[-2].astype(np.float32))-imgtools.gray_image(self._dimage[-1].astype(np.float32)))
#check wheter the image is not empty
if np.sum(img) == 0:
raise ValueError("Sum of differences is zero.")
img = imgtools.project_data_to_img(img)
# set image in canvas and update histogram
self.get_obj().set_img(img, clear_mask=False)
self.set_img()
# open a topwindow with images used for building the difference
tw.TopWindow(self, title="Difference of images", dtype="img", value=[img, self._dimage[-1], self._dimage[-2]])
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def blur_image(self):
"""Blur the currently displayed image with an average or median filter
"""
# get settings of combobox and fields
param = self._csbox_blur.get_dict()
kernel_size = (param["Kernel Size"], param["Kernel Size"])
if (kernel_size[0]%2)==0 or kernel_size[0]>32:
raise ValueError("Kernel size must be odd and not larger than 31.")
# set the border mode used to extrapolate pixels outside of the image, see https://docs.opencv.org/master/d2/de8/group__core__array.html#ga209f2f4869e304c82d07739337eae7c5
param["BorderType"] = cv2.BORDER_REFLECT
# get the currently displayed image
img = self.get_obj().get_img(show=True)
# blur the image with selected model
if param["Model"] == "Average":
# https://docs.opencv.org/master/d4/d86/group__imgproc__filter.html#gad533230ebf2d42509547d514f7d3fbc3
img = cv2.boxFilter(img, -1, kernel_size, normalize=True, borderType=param["BorderType"])
elif param["Model"] == "Gaussian":
# https://docs.opencv.org/master/d4/d86/group__imgproc__filter.html#gaabe8c836e97159a9193fb0b11ac52cf1
img = cv2.GaussianBlur(img, kernel_size, param["Sigma"], borderType=param["BorderType"])
elif param["Model"] == "Median":
# https://docs.opencv.org/master/d4/d86/group__imgproc__filter.html#ga564869aa33e58769b4469101aac458f9
img = cv2.medianBlur(img, kernel_size[0])
elif param["Model"] == "Bilateral Filtering":
# https://docs.opencv.org/master/d4/d86/group__imgproc__filter.html#ga9d7064d478c95d60003cf839430737ed
img = cv2.bilateralFilter(img, param["d"], param["sigmaColor"], param["sigmaSpace"])
# set image in canvas and update histogram
self.get_obj().set_img(img, clear_mask=False)
self.set_img()
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def gradient_image(self):
"""Calculate the horizontal and vertical gradients of the currently displayed image
"""
# https://www.learnopencv.com/histogram-of-oriented-gradients/
# get settings of combobox and fields
param = self._csbox_blur.get_dict()
kernel_size = param["Kernel Size"]
if (kernel_size%2)==0 or kernel_size>32:
raise ValueError("Kernel size must be odd and not larger than 31.")
# get the currently displayed image
img = imgtools.project_data_to_img(imgtools.gray_image(self.get_obj().get_img(show=True)))
# calculate gradient
gradient_x = cv2.Sobel(img, cv2.CV_32F, 1, 0, ksize=kernel_size)
gradient_y = cv2.Sobel(img, cv2.CV_32F, 0, 1, ksize=kernel_size)
# calculate gradient magnitude and direction (in degrees)
magnitude, angle = cv2.cartToPolar(gradient_x, gradient_y, angleInDegrees=True)
# set image in canvas and update histogram
# self.get_obj().set_img(magnitude, clear_mask=False)
# self.set_img()
# open a topwindow with gradient images
tw.TopWindow(self, title="Gradient Image", dtype="img", value=[img, magnitude, gradient_x, gradient_y])
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def set_threshold(self, event=None):
"""Set a threshold via input of windows's slider
"""
# get settings of combobox and fields
param = self._csbox_threshold.get_dict()
thresh = param["Thresh"]
method = cv2.THRESH_BINARY if param["Thresh"] else cv2.THRESH_BINARY + cv2.THRESH_OTSU
# get the currently displayed image
grayimg = imgtools.gray_image(self.get_obj().get_img(show=True))
# implement thresholding and assign the result to the variable dst
ret, dst = cv2.threshold(grayimg, thresh, 255, method)
self._logger("Simple Thresholding with thresh: {}".format(ret))
return ret, dst
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def set_threshold_img_mask(self, event=None):
"""Set a threshold via input of windows's slider and display as a mask
"""
# set a threshold via input of windows's slider and display as a mask
ret, dst = self.set_threshold()
# visualize the binary mask in the currently displayed image
self.set_threshold_mask(dst)
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def set_threshold_img(self, event=None):
"""Set a threshold via input of windows's slider and display as a mask
"""
# set a threshold via input of windows's slider and display as a mask
ret, dst = self.set_threshold()
# set image in canvas and update histogram
self.get_obj().set_img(imgtools.project_data_to_img(dst, dtype=np.uint8, factor=255), clear_mask=True)
self.set_img()
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def set_adaptive_thresholding(self, event=None):
# get settings of combobox and fields
param = self._csbox_athreshold.get_dict()
if param["adaptiveMethod"] == "Mean":
param_method = cv2.ADAPTIVE_THRESH_MEAN_C
elif param["adaptiveMethod"] == "Gaussian":
param_method = cv2.ADAPTIVE_THRESH_GAUSSIAN_C
dst = cv2.adaptiveThreshold(imgtools.gray_image(self._img), 255, param_method, cv2.THRESH_BINARY, param["blockSize"], param["C"])
self.set_threshold_mask(dst)
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def get_edges(self):
# get settings of combobox and fields
param = self._csbox_edges.get_dict()
# get the currently displayed image
img = imgtools.project_data_to_img(imgtools.gray_image(self.get_obj().get_img(show=True)), dtype=np.uint8, factor=255)
aperture_size = param["Aperture Size"]
if (aperture_size%2)==0 or aperture_size<3 or aperture_size>7:
raise ValueError("Aperture size should be odd between 3 and 7.")
edges = cv2.Canny(img, param["Threshold I"], param["Threshold II"], apertureSize=param["Aperture Size"])
# set image in canvas and update histogram
# self.get_obj().set_img(edges, clear_mask=False)
# self.set_img()
# open a topwindow with the edges of the currently displayed image computed via canny
tw.TopWindow(self, title="Edges", dtype="img", value=[img, edges])
# method --------------------------------------------------------------
# -----------------------------------------------------------------------
def get_hough_transform(self, event=None):
# get settings of combobox and fields
param_edges = self._csbox_edges.get_dict()
param_hough = self._csbox_hough.get_dict()
# get the currently displayed image
img = self.get_obj().get_img(show=True)
grayimg = imgtools.gray_image(img)
aperture_size = param_edges["Aperture Size"]
if (aperture_size%2)==0 or aperture_size<3 or aperture_size>7:
raise ValueError("Aperture size should be odd between 3 and 7.")
edgeimg = cv2.Canny(grayimg, param_edges["Threshold I"], param_edges["Threshold II"], apertureSize=param_edges["Aperture Size"])
img_list = [img, edgeimg]
lines = cv2.HoughLinesP(edgeimg, 1, np.pi/180, param_hough["Threshold"],minLineLength=param_hough["Minimum Line Length"], maxLineGap=param_hough["Maximum Line Gap"])
if lines is not None:
houghimg = img.copy()
for line in lines:
x1, y1, x2, y2 = line[0]
cv2.line(houghimg, (x1, y1), (x2, y2), (0, 0, 128), 1)
img_list.append(houghimg)
# open a topwindow with the edges of the currently displayed image computed via hough transform
tw.TopWindow(self, title="Hough Transform", dtype="img", value=img_list) | 49.941176 | 365 | 0.539393 |
acdf2b68dd54a35b26d912750c7d3aaa95c97294 | 2,013 | py | Python | bin/pannzer/operators/RemoveRedundantGO.py | nestorzaburannyi/annotate | e175226504efef811d4ac3914f2ab342968edf98 | [
"MIT"
] | 1 | 2021-11-26T17:29:56.000Z | 2021-11-26T17:29:56.000Z | bin/pannzer/operators/RemoveRedundantGO.py | nestorzaburannyi/annotate | e175226504efef811d4ac3914f2ab342968edf98 | [
"MIT"
] | 1 | 2020-03-19T21:12:23.000Z | 2020-03-19T21:12:23.000Z | bin/pannzer/operators/RemoveRedundantGO.py | nestorzaburannyi/annotate | e175226504efef811d4ac3914f2ab342968edf98 | [
"MIT"
] | null | null | null | from myoperator import BlockOperator
import sys
class RemoveRedundantGO(BlockOperator):
"""
Set status of redundant GO classes to False. Redundant classes are a parent or child of higher ranking class.
Sets row_status to False if redundant. Redundant GO classes will be hidden from output.
Inputs: goclass_data column 'goid'. Block is assumed sorted.
Outputs: row_status attribute of goclass_data spreadsheet
"""
def __init__(self, glob):
sys.stderr.write("# Init RedundantGO\n")
self.glob=glob
self.goclass_data=glob.use_sheet("goclass")
glob.use_online_dictionaries(["GOIDELIC"])
[self.goid_col]=self.goclass_data.use_columns(["goid"])
def process(self,block):
accepted={}
rejected={}
for i in range(0,self.goclass_data.nrows):
self.goclass_data.row_status[i]=False # overwrite accepted
for i in range(0,self.goclass_data.nrows):
goid=self.goclass_data.block[i][self.goid_col]
# already rejected as parent of previously accepted
if goid in rejected:
continue
if not goid in self.glob.GOparents: continue # must be root
# reject if child of previously accepted
for p in self.glob.GOparents[goid]:
if p in accepted:
rejected[goid]=True
break
if goid in rejected:
continue
# accept goid, reject parents
accepted[goid]=True
self.goclass_data.row_status[i]=True
for p in self.glob.GOparents[goid]:
rejected[p]=True
| 45.75 | 117 | 0.517139 |
acdf2b97a317dd2878ad4bf942c097908786bb59 | 11,862 | py | Python | selfdrive/test/test_routes.py | timoxd7/openpilot | 94b3d4d26745bb2b982da064230155698d61cf44 | [
"MIT"
] | 2 | 2021-10-17T15:55:05.000Z | 2022-03-29T13:17:02.000Z | selfdrive/test/test_routes.py | timoxd7/openpilot | 94b3d4d26745bb2b982da064230155698d61cf44 | [
"MIT"
] | null | null | null | selfdrive/test/test_routes.py | timoxd7/openpilot | 94b3d4d26745bb2b982da064230155698d61cf44 | [
"MIT"
] | 1 | 2022-01-27T11:10:29.000Z | 2022-01-27T11:10:29.000Z | #!/usr/bin/env python3
from collections import namedtuple
from selfdrive.car.chrysler.values import CAR as CHRYSLER
from selfdrive.car.ford.values import CAR as FORD
from selfdrive.car.gm.values import CAR as GM
from selfdrive.car.honda.values import CAR as HONDA
from selfdrive.car.hyundai.values import CAR as HYUNDAI
from selfdrive.car.nissan.values import CAR as NISSAN
from selfdrive.car.mazda.values import CAR as MAZDA
from selfdrive.car.subaru.values import CAR as SUBARU
from selfdrive.car.toyota.values import CAR as TOYOTA
from selfdrive.car.volkswagen.values import CAR as VOLKSWAGEN
from selfdrive.car.tesla.values import CAR as TESLA
# TODO: add routes for these cars
non_tested_cars = [
GM.CADILLAC_ATS,
GM.HOLDEN_ASTRA,
GM.MALIBU,
HYUNDAI.ELANTRA_GT_I30,
HYUNDAI.GENESIS_G90,
HYUNDAI.KIA_OPTIMA_H,
]
TestRoute = namedtuple('TestRoute', ['route', 'car_fingerprint'])
routes = [
TestRoute("0c94aa1e1296d7c6|2021-05-05--19-48-37", CHRYSLER.JEEP_CHEROKEE),
TestRoute("91dfedae61d7bd75|2021-05-22--20-07-52", CHRYSLER.JEEP_CHEROKEE_2019),
TestRoute("420a8e183f1aed48|2020-03-05--07-15-29", CHRYSLER.PACIFICA_2017_HYBRID),
TestRoute("43a685a66291579b|2021-05-27--19-47-29", CHRYSLER.PACIFICA_2018),
TestRoute("378472f830ee7395|2021-05-28--07-38-43", CHRYSLER.PACIFICA_2018_HYBRID),
TestRoute("8190c7275a24557b|2020-01-29--08-33-58", CHRYSLER.PACIFICA_2019_HYBRID),
TestRoute("3d84727705fecd04|2021-05-25--08-38-56", CHRYSLER.PACIFICA_2020),
TestRoute("f1b4c567731f4a1b|2018-04-30--10-15-35", FORD.FUSION),
TestRoute("c950e28c26b5b168|2018-05-30--22-03-41", GM.VOLT),
# TODO: use another route that has radar data at start
TestRoute("7cc2a8365b4dd8a9|2018-12-02--12-10-44", GM.ACADIA),
TestRoute("aa20e335f61ba898|2019-02-05--16-59-04", GM.BUICK_REGAL),
TestRoute("0e7a2ba168465df5|2020-10-18--14-14-22", HONDA.ACURA_RDX_3G),
TestRoute("a74b011b32b51b56|2020-07-26--17-09-36", HONDA.CIVIC),
TestRoute("a859a044a447c2b0|2020-03-03--18-42-45", HONDA.CRV_EU),
TestRoute("68aac44ad69f838e|2021-05-18--20-40-52", HONDA.CRV),
TestRoute("14fed2e5fa0aa1a5|2021-05-25--14-59-42", HONDA.CRV_HYBRID),
TestRoute("52f3e9ae60c0d886|2021-05-23--15-59-43", HONDA.FIT),
TestRoute("2c4292a5cd10536c|2021-08-19--21-32-15", HONDA.FREED),
TestRoute("03be5f2fd5c508d1|2020-04-19--18-44-15", HONDA.HRV),
TestRoute("917b074700869333|2021-05-24--20-40-20", HONDA.ACURA_ILX),
TestRoute("81722949a62ea724|2019-04-06--15-19-25", HONDA.ODYSSEY_CHN),
TestRoute("08a3deb07573f157|2020-03-06--16-11-19", HONDA.ACCORD), # 1.5T
TestRoute("1da5847ac2488106|2021-05-24--19-31-50", HONDA.ACCORD), # 2.0T
TestRoute("07585b0da3c88459|2021-05-26--18-52-04", HONDA.ACCORDH),
TestRoute("1ad763dd22ef1a0e|2020-02-29--18-37-03", HONDA.CRV_5G),
TestRoute("0a96f86fcfe35964|2020-02-05--07-25-51", HONDA.ODYSSEY),
TestRoute("d83f36766f8012a5|2020-02-05--18-42-21", HONDA.CIVIC_BOSCH_DIESEL),
TestRoute("f0890d16a07a236b|2021-05-25--17-27-22", HONDA.INSIGHT),
TestRoute("07d37d27996096b6|2020-03-04--21-57-27", HONDA.PILOT),
TestRoute("fa1cd231131ca137|2021-05-22--07-59-57", HONDA.PILOT_2019),
TestRoute("0a78dfbacc8504ef|2020-03-04--13-29-55", HONDA.CIVIC_BOSCH),
TestRoute("f34a60d68d83b1e5|2020-10-06--14-35-55", HONDA.ACURA_RDX),
TestRoute("54fd8451b3974762|2021-04-01--14-50-10", HONDA.RIDGELINE),
TestRoute("2d5808fae0b38ac6|2021-09-01--17-14-11", HONDA.HONDA_E),
TestRoute("6fe86b4e410e4c37|2020-07-22--16-27-13", HYUNDAI.HYUNDAI_GENESIS),
TestRoute("70c5bec28ec8e345|2020-08-08--12-22-23", HYUNDAI.GENESIS_G70),
TestRoute("6b301bf83f10aa90|2020-11-22--16-45-07", HYUNDAI.GENESIS_G80),
TestRoute("38bfd238edecbcd7|2018-08-29--22-02-15", HYUNDAI.SANTA_FE),
TestRoute("bf43d9df2b660eb0|2021-09-23--14-16-37", HYUNDAI.SANTA_FE_2022),
TestRoute("e0e98335f3ebc58f|2021-03-07--16-38-29", HYUNDAI.KIA_CEED),
TestRoute("7653b2bce7bcfdaa|2020-03-04--15-34-32", HYUNDAI.KIA_OPTIMA),
TestRoute("c75a59efa0ecd502|2021-03-11--20-52-55", HYUNDAI.KIA_SELTOS),
TestRoute("5b7c365c50084530|2020-04-15--16-13-24", HYUNDAI.SONATA),
TestRoute("b2a38c712dcf90bd|2020-05-18--18-12-48", HYUNDAI.SONATA_LF),
TestRoute("5875672fc1d4bf57|2020-07-23--21-33-28", HYUNDAI.KIA_SORENTO),
TestRoute("9c917ba0d42ffe78|2020-04-17--12-43-19", HYUNDAI.PALISADE),
TestRoute("fa8db5869167f821|2021-06-10--22-50-10", HYUNDAI.IONIQ_PHEV),
TestRoute("2c5cf2dd6102e5da|2020-12-17--16-06-44", HYUNDAI.IONIQ_EV_2020),
TestRoute("610ebb9faaad6b43|2020-06-13--15-28-36", HYUNDAI.IONIQ_EV_LTD),
TestRoute("2c5cf2dd6102e5da|2020-06-26--16-00-08", HYUNDAI.IONIQ),
TestRoute("22d955b2cd499c22|2020-08-10--19-58-21", HYUNDAI.KONA),
TestRoute("efc48acf44b1e64d|2021-05-28--21-05-04", HYUNDAI.KONA_EV),
TestRoute("49f3c13141b6bc87|2021-07-28--08-05-13", HYUNDAI.KONA_HEV),
TestRoute("5dddcbca6eb66c62|2020-07-26--13-24-19", HYUNDAI.KIA_STINGER),
TestRoute("d624b3d19adce635|2020-08-01--14-59-12", HYUNDAI.VELOSTER),
TestRoute("007d5e4ad9f86d13|2021-09-30--15-09-23", HYUNDAI.KIA_K5_2021),
TestRoute("50c6c9b85fd1ff03|2020-10-26--17-56-06", HYUNDAI.KIA_NIRO_EV),
TestRoute("173219cf50acdd7b|2021-07-05--10-27-41", HYUNDAI.KIA_NIRO_HEV),
TestRoute("34a875f29f69841a|2021-07-29--13-02-09", HYUNDAI.KIA_NIRO_HEV_2021),
TestRoute("50a2212c41f65c7b|2021-05-24--16-22-06", HYUNDAI.KIA_FORTE),
TestRoute("c5ac319aa9583f83|2021-06-01--18-18-31", HYUNDAI.ELANTRA),
TestRoute("82e9cdd3f43bf83e|2021-05-15--02-42-51", HYUNDAI.ELANTRA_2021),
TestRoute("715ac05b594e9c59|2021-06-20--16-21-07", HYUNDAI.ELANTRA_HEV_2021),
TestRoute("7120aa90bbc3add7|2021-08-02--07-12-31", HYUNDAI.SONATA_HYBRID),
TestRoute("00c829b1b7613dea|2021-06-24--09-10-10", TOYOTA.ALPHARD_TSS2),
TestRoute("000cf3730200c71c|2021-05-24--10-42-05", TOYOTA.AVALON),
TestRoute("0bb588106852abb7|2021-05-26--12-22-01", TOYOTA.AVALON_2019),
TestRoute("87bef2930af86592|2021-05-30--09-40-54", TOYOTA.AVALONH_2019),
TestRoute("6cdecc4728d4af37|2020-02-23--15-44-18", TOYOTA.CAMRY),
TestRoute("3456ad0cd7281b24|2020-12-13--17-45-56", TOYOTA.CAMRY_TSS2),
TestRoute("ffccc77938ddbc44|2021-01-04--16-55-41", TOYOTA.CAMRYH_TSS2),
TestRoute("54034823d30962f5|2021-05-24--06-37-34", TOYOTA.CAMRYH),
TestRoute("4e45c89c38e8ec4d|2021-05-02--02-49-28", TOYOTA.COROLLA),
TestRoute("5f5afb36036506e4|2019-05-14--02-09-54", TOYOTA.COROLLA_TSS2),
TestRoute("5ceff72287a5c86c|2019-10-19--10-59-02", TOYOTA.COROLLAH_TSS2),
TestRoute("d2525c22173da58b|2021-04-25--16-47-04", TOYOTA.PRIUS),
TestRoute("b0f5a01cf604185c|2017-12-18--20-32-32", TOYOTA.RAV4),
TestRoute("b0c9d2329ad1606b|2019-04-02--13-24-43", TOYOTA.RAV4),
TestRoute("b14c5b4742e6fc85|2020-07-28--19-50-11", TOYOTA.RAV4),
TestRoute("32a7df20486b0f70|2020-02-06--16-06-50", TOYOTA.RAV4H),
TestRoute("cdf2f7de565d40ae|2019-04-25--03-53-41", TOYOTA.RAV4_TSS2),
TestRoute("7e34a988419b5307|2019-12-18--19-13-30", TOYOTA.RAV4H_TSS2),
TestRoute("e6a24be49a6cd46e|2019-10-29--10-52-42", TOYOTA.LEXUS_ES_TSS2),
TestRoute("25057fa6a5a63dfb|2020-03-04--08-44-23", TOYOTA.LEXUS_CTH),
TestRoute("f49e8041283f2939|2019-05-30--11-51-51", TOYOTA.LEXUS_ESH_TSS2),
TestRoute("37041c500fd30100|2020-12-30--12-17-24", TOYOTA.LEXUS_ESH),
TestRoute("886fcd8408d570e9|2020-01-29--05-11-22", TOYOTA.LEXUS_RX),
TestRoute("886fcd8408d570e9|2020-01-29--02-18-55", TOYOTA.LEXUS_RX),
TestRoute("d27ad752e9b08d4f|2021-05-26--19-39-51", TOYOTA.LEXUS_RXH),
TestRoute("01b22eb2ed121565|2020-02-02--11-25-51", TOYOTA.LEXUS_RX_TSS2),
TestRoute("b74758c690a49668|2020-05-20--15-58-57", TOYOTA.LEXUS_RXH_TSS2),
TestRoute("ec429c0f37564e3c|2020-02-01--17-28-12", TOYOTA.LEXUS_NXH),
TestRoute("964c09eb11ca8089|2020-11-03--22-04-00", TOYOTA.LEXUS_NX),
TestRoute("3fd5305f8b6ca765|2021-04-28--19-26-49", TOYOTA.LEXUS_NX_TSS2),
TestRoute("0a302ffddbb3e3d3|2020-02-08--16-19-08", TOYOTA.HIGHLANDER_TSS2),
TestRoute("437e4d2402abf524|2021-05-25--07-58-50", TOYOTA.HIGHLANDERH_TSS2),
TestRoute("3183cd9b021e89ce|2021-05-25--10-34-44", TOYOTA.HIGHLANDER),
TestRoute("80d16a262e33d57f|2021-05-23--20-01-43", TOYOTA.HIGHLANDERH),
TestRoute("eb6acd681135480d|2019-06-20--20-00-00", TOYOTA.SIENNA),
TestRoute("2e07163a1ba9a780|2019-08-25--13-15-13", TOYOTA.LEXUS_IS),
TestRoute("0a0de17a1e6a2d15|2020-09-21--21-24-41", TOYOTA.PRIUS_TSS2),
TestRoute("9b36accae406390e|2021-03-30--10-41-38", TOYOTA.MIRAI),
TestRoute("cd9cff4b0b26c435|2021-05-13--15-12-39", TOYOTA.CHR),
TestRoute("57858ede0369a261|2021-05-18--20-34-20", TOYOTA.CHRH),
TestRoute("202c40641158a6e5|2021-09-21--09-43-24", VOLKSWAGEN.ARTEON_MK1),
TestRoute("2c68dda277d887ac|2021-05-11--15-22-20", VOLKSWAGEN.ATLAS_MK1),
TestRoute("cae14e88932eb364|2021-03-26--14-43-28", VOLKSWAGEN.GOLF_MK7),
TestRoute("58a7d3b707987d65|2021-03-25--17-26-37", VOLKSWAGEN.JETTA_MK7),
TestRoute("4d134e099430fba2|2021-03-26--00-26-06", VOLKSWAGEN.PASSAT_MK8),
TestRoute("2744c89a8dda9a51|2021-07-24--21-28-06", VOLKSWAGEN.TCROSS_MK1),
TestRoute("2cef8a0b898f331a|2021-03-25--20-13-57", VOLKSWAGEN.TIGUAN_MK2),
TestRoute("a589dcc642fdb10a|2021-06-14--20-54-26", VOLKSWAGEN.TOURAN_MK2),
TestRoute("a459f4556782eba1|2021-09-19--09-48-00", VOLKSWAGEN.TRANSPORTER_T61),
TestRoute("07667b885add75fd|2021-01-23--19-48-42", VOLKSWAGEN.AUDI_A3_MK3),
TestRoute("6c6b466346192818|2021-06-06--14-17-47", VOLKSWAGEN.AUDI_Q2_MK1),
TestRoute("8f205bdd11bcbb65|2021-03-26--01-00-17", VOLKSWAGEN.SEAT_ATECA_MK1),
TestRoute("fc6b6c9a3471c846|2021-05-27--13-39-56", VOLKSWAGEN.SEAT_LEON_MK3),
TestRoute("12d6ae3057c04b0d|2021-09-15--00-04-07", VOLKSWAGEN.SKODA_KAMIQ_MK1),
TestRoute("12d6ae3057c04b0d|2021-09-04--21-21-21", VOLKSWAGEN.SKODA_KAROQ_MK1),
TestRoute("90434ff5d7c8d603|2021-03-15--12-07-31", VOLKSWAGEN.SKODA_KODIAQ_MK1),
TestRoute("66e5edc3a16459c5|2021-05-25--19-00-29", VOLKSWAGEN.SKODA_OCTAVIA_MK3),
TestRoute("026b6d18fba6417f|2021-03-26--09-17-04", VOLKSWAGEN.SKODA_SCALA_MK1),
TestRoute("b2e9858e29db492b|2021-03-26--16-58-42", VOLKSWAGEN.SKODA_SUPERB_MK3),
TestRoute("3c8f0c502e119c1c|2020-06-30--12-58-02", SUBARU.ASCENT),
TestRoute("c321c6b697c5a5ff|2020-06-23--11-04-33", SUBARU.FORESTER),
TestRoute("791340bc01ed993d|2019-03-10--16-28-08", SUBARU.IMPREZA),
# Dashcam
TestRoute("95441c38ae8c130e|2020-06-08--12-10-17", SUBARU.FORESTER_PREGLOBAL),
# Dashcam
TestRoute("df5ca7660000fba8|2020-06-16--17-37-19", SUBARU.LEGACY_PREGLOBAL),
# Dashcam
TestRoute("5ab784f361e19b78|2020-06-08--16-30-41", SUBARU.OUTBACK_PREGLOBAL),
# Dashcam
TestRoute("e19eb5d5353b1ac1|2020-08-09--14-37-56", SUBARU.OUTBACK_PREGLOBAL_2018),
TestRoute("fbbfa6af821552b9|2020-03-03--08-09-43", NISSAN.XTRAIL),
TestRoute("5b7c365c50084530|2020-03-25--22-10-13", NISSAN.LEAF),
TestRoute("22c3dcce2dd627eb|2020-12-30--16-38-48", NISSAN.LEAF_IC),
TestRoute("059ab9162e23198e|2020-05-30--09-41-01", NISSAN.ROGUE),
TestRoute("b72d3ec617c0a90f|2020-12-11--15-38-17", NISSAN.ALTIMA),
TestRoute("32a319f057902bb3|2020-04-27--15-18-58", MAZDA.CX5),
TestRoute("10b5a4b380434151|2020-08-26--17-11-45", MAZDA.CX9),
TestRoute("74f1038827005090|2020-08-26--20-05-50", MAZDA.MAZDA3),
TestRoute("fb53c640f499b73d|2021-06-01--04-17-56", MAZDA.MAZDA6),
TestRoute("f6d5b1a9d7a1c92e|2021-07-08--06-56-59", MAZDA.CX9_2021),
TestRoute("6c14ee12b74823ce|2021-06-30--11-49-02", TESLA.AP1_MODELS),
TestRoute("bb50caf5f0945ab1|2021-06-19--17-20-18", TESLA.AP2_MODELS),
]
forced_dashcam_routes = [
# Ford fusion
"f1b4c567731f4a1b|2018-04-18--11-29-37",
"f1b4c567731f4a1b|2018-04-30--10-15-35",
# Mazda CX5
"32a319f057902bb3|2020-04-27--15-18-58",
# Mazda CX9
"10b5a4b380434151|2020-08-26--17-11-45",
# Mazda3
"74f1038827005090|2020-08-26--20-05-50",
# Mazda6
"fb53c640f499b73d|2021-06-01--04-17-56",
# CX-9 2021
"f6d5b1a9d7a1c92e|2021-07-08--06-56-59",
]
| 58.433498 | 84 | 0.752993 |
acdf2b9e2a82fe4efe2c439a4efc09f889497531 | 994 | py | Python | BENCHMARKS/STELLOPT_TEST/TOK_R0_RHO/compare_TOK_R0_RHO.py | joseluisvelasco/STELLOPT | e064ebb96414d5afc4e205f43b44766558dca2af | [
"MIT"
] | 29 | 2020-05-08T01:47:34.000Z | 2022-03-06T10:35:28.000Z | BENCHMARKS/STELLOPT_TEST/TOK_R0_RHO/compare_TOK_R0_RHO.py | joseluisvelasco/STELLOPT | e064ebb96414d5afc4e205f43b44766558dca2af | [
"MIT"
] | 77 | 2020-05-08T07:18:09.000Z | 2022-03-30T11:20:33.000Z | BENCHMARKS/STELLOPT_TEST/TOK_R0_RHO/compare_TOK_R0_RHO.py | joseluisvelasco/STELLOPT | e064ebb96414d5afc4e205f43b44766558dca2af | [
"MIT"
] | 3 | 2021-02-10T13:47:08.000Z | 2022-03-21T12:53:43.000Z | #!/usr/bin/env python3
import sys, os
sys.path.insert(0, '../../../pySTEL/')
import numpy as np #For Arrays
from math import pi
from libstell.stellopt import read_stellopt
try:
qtCreatorPath=os.environ["STELLOPT_PATH"]
except KeyError:
print("Please set environment variable STELLOPT_PATH")
sys.exit(1)
failtol = 1.0
filename='stellopt.TOK_R0_RHO'
data=read_stellopt(filename)
if not data:
print('ERROR Opening File: '+filename)
sys.exit(0)
else:
print('EXTENSION: '+filename)
lfail = 0;
print('==== Scalars ====')
varlist={}
varlist['R0_equil']=10.0
varlist['ASPECT_equil']=10.0
n = data['R0_equil'].shape
for temp in varlist:
act = varlist[temp]
cal = data[temp][n[0]-1]
perct = 100*(abs(act-cal)/act)
print(' '+temp+': '+str(cal)+' '+str(act)+' '+str(int(perct))+'%')
if perct > failtol:
lfail = 1
print('=================')
if (lfail):
print(' STATUS: FAIL!!!!!')
else:
print(' STATUS: PASS')
sys.exit(0)
| 21.148936 | 75 | 0.620724 |
acdf2ba46fafa0d603aa4a1e178fbf8f610b60e3 | 10,371 | py | Python | api/src/models.py | dojo-modeling/dojo | 8abc71790cbb8639a56c89791535b3df59c725ac | [
"MIT"
] | 3 | 2022-01-28T01:43:30.000Z | 2022-02-24T21:42:13.000Z | api/src/models.py | dojo-modeling/dojo | 8abc71790cbb8639a56c89791535b3df59c725ac | [
"MIT"
] | 8 | 2022-01-12T16:39:57.000Z | 2022-01-31T19:05:45.000Z | api/src/models.py | dojo-modeling/dojo | 8abc71790cbb8639a56c89791535b3df59c725ac | [
"MIT"
] | null | null | null | from __future__ import annotations
import logging
import uuid
import time
from copy import deepcopy
import json
from typing import Any, Dict, Generator, List, Optional, Union
from elasticsearch import Elasticsearch
from pydantic import BaseModel, Field
from fastapi import APIRouter, Depends, HTTPException, Query, Response, status, Body
from fastapi.logger import logger
from validation import ModelSchema, DojoSchema
from src.settings import settings
from src.dojo import search_and_scroll, copy_configs, copy_outputfiles, copy_directive, copy_accessory_files
from src.utils import plugin_action
router = APIRouter()
es = Elasticsearch([settings.ELASTICSEARCH_URL], port=settings.ELASTICSEARCH_PORT)
logger = logging.getLogger(__name__)
# For created_at times in epoch milliseconds
def current_milli_time():
return round(time.time() * 1000)
@router.post("/models")
def create_model(payload: ModelSchema.ModelMetadataSchema):
model_id = payload.id
payload.created_at = current_milli_time()
body = payload.json()
model = json.loads(body)
plugin_action("before_create", data=model, type="model")
es.index(index="models", body=model, id=model_id)
plugin_action("post_create", data=model, type="model")
return Response(
status_code=status.HTTP_201_CREATED,
headers={"location": f"/api/models/{model_id}"},
content=f"Created model with id = {model_id}",
)
@router.get("/models/latest", response_model=DojoSchema.ModelSearchResult)
def get_latest_models(size=100, scroll_id=None) -> DojoSchema.ModelSearchResult:
q = {
'query': {
'bool':{
'must_not': {
'exists': {'field': 'next_version'}
}}
}
}
if not scroll_id:
# we need to kick off the query
results = es.search(index='models', body=q, scroll="2m", size=size)
else:
# otherwise, we can use the scroll
results = es.scroll(scroll_id=scroll_id, scroll="2m")
# get count
count = es.count(index='models', body=q)
# if results are less than the page size (10) don't return a scroll_id
if len(results["hits"]["hits"]) < int(size):
scroll_id = None
else:
scroll_id = results.get("_scroll_id", None)
return {
"hits": count["count"],
"scroll_id": scroll_id,
"results": [i["_source"] for i in results["hits"]["hits"]],
}
@router.put("/models/{model_id}")
def update_model(model_id: str, payload: ModelSchema.ModelMetadataSchema):
payload.created_at = current_milli_time()
model = payload.json()
plugin_action("before_update", data=model, type="model")
es.index(index="models", body=model, id=model_id)
plugin_action("post_update", data=model, type="model")
return Response(
status_code=status.HTTP_201_CREATED,
headers={"location": f"/api/models/{model_id}"},
content=f"Updated model with id = {model_id}",
)
@router.patch("/models/{model_id}")
def modify_model(model_id: str, payload: ModelSchema.ModelMetadataPatchSchema):
body = json.loads(payload.json(exclude_unset=True))
logging.info(body)
plugin_action("before_update", data=body, type="model")
es.update(index="models", body={"doc": body}, id=model_id)
plugin_action("post_update", data=body, type="model")
return Response(
status_code=status.HTTP_200_OK,
headers={"location": f"/api/models/{model_id}"},
content=f"Modified model with id = {model_id}",
)
@router.get("/models", response_model=DojoSchema.ModelSearchResult)
def search_models(
query: str = None, size: int = 10, scroll_id: str = Query(None)
) -> DojoSchema.ModelSearchResult:
return search_and_scroll(
index="models", size=size, query=query, scroll_id=scroll_id
)
@router.get("/models/{model_id}", response_model=ModelSchema.ModelMetadataSchema)
def get_model(model_id: str) -> ModelSchema.ModelMetadataSchema:
try:
model = es.get(index="models", id=model_id)["_source"]
except:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND)
return model
def delete_model(model_id: str) -> None:
try:
es.delete(index="models", id=model_id)
except:
pass
@router.post("/models/register/{model_id}")
def register_model(model_id: str):
"""
This endpoint finalizes the registration of a model by notifying
external services via plugins.
"""
model = es.get(index="models", id=model_id)["_source"]
plugin_action("before_register", data=model, type="model")
plugin_action("register", data=model, type="model")
plugin_action("post_register", data=model, type="model")
return Response(
status_code=status.HTTP_201_CREATED,
content=f"Registered model with id = {model_id}"
)
@router.get("/models/version/{model_id}")
def version_model(model_id : str, exclude_files: bool = False):
"""
This endpoint creates a new version of a model. It is primarily used as part of the model
editing workflow. When a modeler wishes to edit their model, a new version is created
and the modelers edits are made against this new (cloned) model.
"""
def get_updated_outputs(
outputs: List[Union[ModelSchema.Output, ModelSchema.QualifierOutput]],
uuid_mapping: Dict[str, str]
):
"""
Helper function to remap Outputs to their new uuids
Each output or qualifier output has a uuid corresponding to the outputfile idx
this function changes the uuids in the models outputs and qualifiers to the new model version
outputfiles uuid. This is the uuid used by annotate.
"""
updated_outputs = []
for output in deepcopy(outputs):
original_uuid = output.uuid
new_uuid = uuid_mapping.get(original_uuid)
if new_uuid:
output.uuid = new_uuid
updated_outputs.append(output)
return updated_outputs
original_model_definition = get_model(model_id)
new_id = str(uuid.uuid4())
# Update required fields from the original definition
original_model_definition['id'] = new_id
original_model_definition['prev_version'] = model_id
if original_model_definition.get('next_version', False):
del original_model_definition['next_version']
# Create a new pydantic model for processing
new_model = ModelSchema.ModelMetadataSchema(**original_model_definition)
# Reset variables related to publishing since they don't apply to the new model
new_model.is_published = False
new_model.commit_message = None
try:
if exclude_files:
# Update the created model setting the mappings to be empty/blank
new_model.parameters = []
new_model.outputs = []
new_model.qualifier_outputs = []
else:
# Make copies of related items
outputfile_uuid_mapping = copy_outputfiles(model_id, new_id)
copy_configs(model_id, new_id)
copy_directive(model_id, new_id)
copy_accessory_files(model_id, new_id)
# Update the created model with the changes related to copying
if new_model.outputs:
new_model.outputs = get_updated_outputs(new_model.outputs, outputfile_uuid_mapping)
if new_model.qualifier_outputs:
new_model.qualifier_outputs = get_updated_outputs(new_model.qualifier_outputs, outputfile_uuid_mapping)
# Save model
create_model(new_model)
# Assign next_version id to original model after save
modify_model(model_id=model_id, payload=ModelSchema.ModelMetadataPatchSchema(next_version=new_id))
except Exception as e:
# Delete partially created model
# TODO: Clean up copies configs, directives, accessories, and output file data which may exist even if the
# TODO: model was never actually created due to error
delete_model(new_id)
raise
return Response(
status_code=status.HTTP_200_OK,
headers={"location": f"/api/models/{model_id}", "Content-Type": "text/plain"},
content=new_id
)
@router.get("/models/{model_id}/versions", response_model=ModelSchema.VersionSchema)
def model_versions(model_id : str) -> ModelSchema.VersionSchema:
"""
This endpoint returns the model ids for all versions of the model, both any previous version or any later versions.
"""
model_definition = get_model(model_id)
prev_versions = []
later_versions = []
prev_leaf = model_definition.get("prev_version", None)
next_leaf = model_definition.get("next_version", None)
while prev_leaf:
prev_versions.append(prev_leaf)
prev_model = get_model(prev_leaf)
prev_leaf = prev_model.get("prev_version", None)
while next_leaf:
later_versions.append(next_leaf)
next_model = get_model(next_leaf)
next_leaf = next_model.get("next_version", None)
prev_versions.reverse()
return {
"current_version": model_id,
"prev_versions": prev_versions,
"later_versions": later_versions,
}
@router.post("/models/{model_id}/publish")
def publish_model(model_id: str, publish_data: ModelSchema.PublishSchema):
"""
This endpoint finalizes the model, setting the state to published and saving a commit message.
A model should only be able to be edited while is_published is set to false.
Once a model is published, any changes should be done via a new version.
"""
# Update the model, setting is_published to True and saving the commit message.
model = get_model(model_id)
if model.get("is_published", False):
return Response(
status_code=status.HTTP_403_FORBIDDEN,
content="Model has already been published and cannot be republished.",
)
plugin_action("before_publish", data=model, type="model")
body = json.loads(publish_data.json(exclude_unset=False))
body["is_published"] = True
es.update(index="models", body={"doc": body}, id=model_id)
plugin_action("publish", data=model, type="model")
plugin_action("post_publish", data=model, type="model")
return Response(
status_code=status.HTTP_200_OK,
content="Model published",
)
| 34.685619 | 119 | 0.685083 |
acdf2c65229bf3baf92b7372c653f1defecb15db | 5,792 | py | Python | contrib/seeds/makeseeds.py | okoto-xyz/jagaricoin | 44f0606fbaf2ef0d1ee55cb599b142f5e819f8af | [
"MIT"
] | 1 | 2018-11-08T13:03:28.000Z | 2018-11-08T13:03:28.000Z | contrib/seeds/makeseeds.py | jagaricoin-project/jagaricoin | bc65110d5d685cc5eacf6feab820c15556ede23f | [
"MIT"
] | null | null | null | contrib/seeds/makeseeds.py | jagaricoin-project/jagaricoin | bc65110d5d685cc5eacf6feab820c15556ede23f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2013-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 337600
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
"130.211.129.106", "178.63.107.226",
"83.81.130.26", "88.198.17.7", "148.251.238.178", "176.9.46.6",
"54.173.72.127", "54.174.10.182", "54.183.64.54", "54.194.231.211",
"54.66.214.167", "54.66.220.137", "54.67.33.14", "54.77.251.214",
"54.94.195.96", "54.94.200.247"
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(/Satoshi:0.13.(0|1|2|99)/|/JagaricoinCore:0.13.(0|1|2|99)/|/JagaricoinCore:0.14.(0|1|2|99)/|/JagaricoinCore:0.15.(0|1|2|99)/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(ips):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
# Sift out ips by type
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv4 by ASN
result = []
asn_count = {}
for ip in ips_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(ips_ipv6)
result.extend(ips_onion)
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
ips = [ip for ip in ips if ip['uptime'] > 50]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
ips = filtermultiport(ips)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
| 33.479769 | 186 | 0.57096 |
acdf2d3b2da93a2883617e83d5799bd38e7290d4 | 606 | py | Python | setup.py | mediapills/console | a4ec98a0205e199649297eedd445186faad1ee27 | [
"MIT"
] | null | null | null | setup.py | mediapills/console | a4ec98a0205e199649297eedd445186faad1ee27 | [
"MIT"
] | null | null | null | setup.py | mediapills/console | a4ec98a0205e199649297eedd445186faad1ee27 | [
"MIT"
] | null | null | null | import setuptools
# for pip >= 10
try:
from pip._internal.req import parse_requirements
from pip._internal.network.session import PipSession
# for pip <= 9.0.3
except ImportError:
from pip.req import parse_requirements # type: ignore
from pip.network.session import PipSession # type: ignore
version = "0.0.1"
requirements = parse_requirements("requirements.txt", session=PipSession())
install_requires = [
str(i.requirement if hasattr(i, "requirement") else i.req) # type: ignore
for i in requirements
]
setuptools.setup(version=version, install_requires=install_requires)
| 27.545455 | 78 | 0.749175 |
acdf2d7b5b3ff94e3eb0532c45c48a8ab5d76c8d | 4,246 | py | Python | hotel_main/settings.py | Hotel-online/hotel-serveAntes | 725615fd823206e8abcbcef926d149003593b5cc | [
"MIT"
] | null | null | null | hotel_main/settings.py | Hotel-online/hotel-serveAntes | 725615fd823206e8abcbcef926d149003593b5cc | [
"MIT"
] | null | null | null | hotel_main/settings.py | Hotel-online/hotel-serveAntes | 725615fd823206e8abcbcef926d149003593b5cc | [
"MIT"
] | null | null | null | """
Django settings for hotel_main project on Heroku. For more info, see:
https://github.com/heroku/heroku-django-template
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "3(ws245$vjewylqsk_-b(5xw7^1eg4u@@=a&=q@r1n7@3(9h74"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
# Disable Django's own staticfiles handling in favour of WhiteNoise, for
# greater consistency between gunicorn and `./manage.py runserver`. See:
# http://whitenoise.evans.io/en/stable/django.html#using-whitenoise-in-development
'whitenoise.runserver_nostatic',
'django.contrib.staticfiles',
'django.contrib.admindocs',
'rest_framework',
'core',
'hotel',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'hotel_main.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'debug': DEBUG,
},
},
]
WSGI_APPLICATION = 'hotel_main.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
DATABASESx = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'dfpbi6etuppp4n',
'USER': 'zdsfnxsnixpepm',
'PASSWORD': '240158d9c5b8a0401e9c426c7c9b75c18797e1281e9437d3f8d13a4264b144b2',
'HOST': 'ec2-50-17-203-195.compute-1.amazonaws.com',
'PORT': '5432',
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Change 'default' database configuration with $DATABASE_URL.
DATABASES['default'].update(dj_database_url.config(conn_max_age=500))
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = [
os.path.join(PROJECT_ROOT, 'static'),
]
# Simplified static file serving.
# https://warehouse.python.org/project/whitenoise/
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
AUTH_USER_MODEL = 'core.User'
| 29.486111 | 91 | 0.709609 |
acdf306c1e8687d148449467a3efcf93ad62bc51 | 10,507 | py | Python | core/storage/auth/gae_models.py | oswalgopal/oppia | 7513e8eca5adc278974ad266b0ea3f59a646983d | [
"Apache-2.0"
] | 1 | 2021-03-16T12:04:07.000Z | 2021-03-16T12:04:07.000Z | core/storage/auth/gae_models.py | oswalgopal/oppia | 7513e8eca5adc278974ad266b0ea3f59a646983d | [
"Apache-2.0"
] | null | null | null | core/storage/auth/gae_models.py | oswalgopal/oppia | 7513e8eca5adc278974ad266b0ea3f59a646983d | [
"Apache-2.0"
] | 1 | 2022-02-14T22:03:53.000Z | 2022-02-14T22:03:53.000Z | # coding: utf-8
#
# Copyright 2020 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Models for managing user authentication."""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
from core.platform import models
import feconf
base_models, user_models = models.Registry.import_models(
[models.NAMES.base_model, models.NAMES.user])
datastore_services = models.Registry.import_datastore_services()
class UserAuthDetailsModel(base_models.BaseModel):
"""Stores the authentication details for a particular user.
Instances of this class are keyed by user id.
"""
# Authentication identifier from Google AppEngine (GAE). Exists only for
# full users. None for profile users.
gae_id = datastore_services.StringProperty(indexed=True)
# Authentication identifier from the Firebase authentication server.
# TODO(#11462): This will exist for all users after the Firebase migration,
# so update this description once it has succeeded.
firebase_auth_id = datastore_services.StringProperty(indexed=True)
# For profile users, the user ID of the full user associated with them.
# None for full users. Required for profiles because gae_id/firebase_auth_id
# attribute is None for them, hence this attribute stores their association
# with a full user who do have a gae_id/firebase_auth_id.
parent_user_id = (
datastore_services.StringProperty(indexed=True, default=None))
@staticmethod
def get_lowest_supported_role():
"""The lowest supported role here should be Learner."""
return feconf.ROLE_ID_LEARNER
@staticmethod
def get_deletion_policy():
"""Model contains data to delete corresponding to a user: id, gae_id,
firebase_auth_id, and parent_user_id fields.
"""
return base_models.DELETION_POLICY.DELETE_AT_END
@staticmethod
def get_model_association_to_user():
"""Currently, the model holds authentication details relevant only for
backend. Currently the only relevant user data is the username of the
parent.
"""
return base_models.MODEL_ASSOCIATION_TO_USER.ONE_INSTANCE_PER_USER
@staticmethod
def get_field_names_for_takeout():
"""We do not want to export the internal user id for the parent, so we
export the username instead.
"""
return {
'parent_user_id': 'parent_username'
}
@classmethod
def get_export_policy(cls):
"""Model doesn't contain any data directly corresponding to a user.
Currently, the model holds authentication details relevant only for
backend, and no exportable user data. It may contain user data in the
future.
"""
return dict(super(cls, cls).get_export_policy(), **{
'gae_id': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'firebase_auth_id': base_models.EXPORT_POLICY.NOT_APPLICABLE,
'parent_user_id': base_models.EXPORT_POLICY.EXPORTED
})
@classmethod
def export_data(cls, user_id):
"""Exports the username of the parent."""
user_auth_model = cls.get(user_id, strict=False)
if user_auth_model and user_auth_model.parent_user_id:
parent_data = user_models.UserSettingsModel.get(
user_auth_model.parent_user_id)
parent_username = parent_data.username
return {'parent_username': parent_username}
else:
return {}
@classmethod
def apply_deletion_policy(cls, user_id):
"""Delete instances of UserAuthDetailsModel for the user.
Args:
user_id: str. The ID of the user whose data should be deleted.
"""
cls.delete_by_id(user_id)
@classmethod
def has_reference_to_user_id(cls, user_id):
"""Check whether UserAuthDetailsModel exists for the given user.
Args:
user_id: str. The ID of the user whose data should be checked.
Returns:
bool. Whether any UserAuthDetailsModel refers to the given user ID.
"""
return cls.get_by_id(user_id) is not None
@classmethod
def get_by_auth_id(cls, provider_id, auth_id):
"""Fetch a user entry by auth_id of a particular auth service.
Args:
provider_id: str. Name of the provider of the auth ID.
auth_id: str. Authentication detail corresponding to the
authentication provider.
Returns:
UserAuthDetailsModel. The UserAuthDetailsModel instance having a
particular user mapped to the given auth_id and the auth provider
if there exists one, else None.
"""
if provider_id == feconf.GAE_AUTH_PROVIDER_ID:
return cls.query(cls.gae_id == auth_id).get()
elif provider_id == feconf.FIREBASE_AUTH_PROVIDER_ID:
return cls.query(cls.firebase_auth_id == auth_id).get()
return None
class UserIdentifiersModel(base_models.BaseModel):
"""Stores the relationship between user ID and GAE ID.
Instances of this class are keyed by GAE ID.
"""
user_id = datastore_services.StringProperty(required=True, indexed=True)
@staticmethod
def get_deletion_policy():
"""Model contains data to delete corresponding to a user: id, and
user_id fields.
"""
return base_models.DELETION_POLICY.DELETE_AT_END
@staticmethod
def get_model_association_to_user():
"""Currently, the model holds identifiers relevant only for backend that
should not be exported.
"""
return base_models.MODEL_ASSOCIATION_TO_USER.NOT_CORRESPONDING_TO_USER
@classmethod
def get_export_policy(cls):
"""Model doesn't contain any data directly corresponding to a user.
Currently, the model holds authentication details relevant only for
backend, and no exportable user data. It may contain user data in the
future.
"""
return dict(super(cls, cls).get_export_policy(), **{
'user_id': base_models.EXPORT_POLICY.NOT_APPLICABLE
})
@classmethod
def apply_deletion_policy(cls, user_id):
"""Delete instances of UserIdentifiersModel for the user.
Args:
user_id: str. The ID of the user whose data should be deleted.
"""
datastore_services.delete_multi(
cls.query(cls.user_id == user_id).fetch(keys_only=True))
@classmethod
def has_reference_to_user_id(cls, user_id):
"""Check whether UserIdentifiersModel exists for the given user.
Args:
user_id: str. The ID of the user whose data should be checked.
Returns:
bool. Whether any UserIdentifiersModel refers to the given user ID.
"""
return cls.query(cls.user_id == user_id).get(keys_only=True) is not None
@classmethod
def get_by_gae_id(cls, gae_id):
"""Fetch an entry by GAE ID.
Args:
gae_id: str. The GAE ID.
Returns:
UserIdentifiersModel. The model with user_id field equal to user_id
argument.
"""
return cls.get_by_id(gae_id)
@classmethod
def get_by_user_id(cls, user_id):
"""Fetch an entry by user ID.
Args:
user_id: str. The user ID.
Returns:
UserIdentifiersModel. The model with user_id field equal to user_id
argument.
"""
return cls.query(cls.user_id == user_id).get()
class UserIdByFirebaseAuthIdModel(base_models.BaseModel):
"""Stores the relationship between user ID and Firebase auth ID.
Instances of this class are keyed by Firebase auth ID.
"""
user_id = datastore_services.StringProperty(required=True, indexed=True)
@staticmethod
def get_deletion_policy():
"""Model has data to delete corresponding to users: id and user_id."""
return base_models.DELETION_POLICY.DELETE_AT_END
@staticmethod
def get_model_association_to_user():
"""Currently, the model holds IDs relevant only for backend that should
not be exported.
"""
return base_models.MODEL_ASSOCIATION_TO_USER.NOT_CORRESPONDING_TO_USER
@classmethod
def get_export_policy(cls):
"""Model doesn't contain any data directly corresponding to a user.
Currently, the model holds authentication details relevant only for
backend, and no exportable user data. It may contain user data in the
future.
"""
return dict(
super(UserIdByFirebaseAuthIdModel, cls).get_export_policy(),
**{'user_id': base_models.EXPORT_POLICY.NOT_APPLICABLE})
@classmethod
def apply_deletion_policy(cls, user_id):
"""Delete instances of UserIdByFirebaseAuthIdModel for the user.
Args:
user_id: str. The ID of the user whose data should be deleted.
"""
datastore_services.delete_multi(
cls.query(cls.user_id == user_id).fetch(keys_only=True))
@classmethod
def has_reference_to_user_id(cls, user_id):
"""Check whether UserIdByFirebaseAuthIdModel exists for given user.
Args:
user_id: str. The ID of the user whose data should be checked.
Returns:
bool. Whether any UserIdByFirebaseAuthIdModel refers to the given
user ID.
"""
return cls.query(cls.user_id == user_id).get(keys_only=True) is not None
@classmethod
def get_by_user_id(cls, user_id):
"""Fetch an entry by user ID.
Args:
user_id: str. The user ID.
Returns:
UserIdByFirebaseAuthIdModel. The model with user_id field equal
to user_id argument.
"""
return cls.query(cls.user_id == user_id).get()
| 35.616949 | 80 | 0.673836 |
acdf30b89f5d2e93aafd8860b519d87d625c1d1c | 101,517 | py | Python | magnetovis/objects.py | rweigel/magnetovis | 2f466df4e29c2bb71c3d92294efea7f9eb036cec | [
"BSD-2-Clause"
] | null | null | null | magnetovis/objects.py | rweigel/magnetovis | 2f466df4e29c2bb71c3d92294efea7f9eb036cec | [
"BSD-2-Clause"
] | 2 | 2020-11-06T10:20:08.000Z | 2021-01-25T17:44:48.000Z | magnetovis/objects.py | rweigel/magnetovis | 2f466df4e29c2bb71c3d92294efea7f9eb036cec | [
"BSD-2-Clause"
] | 1 | 2021-05-22T11:35:18.000Z | 2021-05-22T11:35:18.000Z | import os
import sys
import tempfile
import numpy as np
from magnetovis import util
#def trajectory([L, theta, phi], [phase_angle, pitch_angle, E], dt=..., e_over_m=...)
# Plots a tube showing trajectory.
# L in R_E
# angles in degrees
# Energy in keV
# e_over_m = positive or negative
# t = run time in seconds
# dt = time step in seconds
def rotation_matrix(axis, theta):
"""
Rotation matrix for CCW rotation about axis by theta radians.
"""
# from: https://stackoverflow.com/questions/6802577/rotation-of-3d-vector/6802723#6802723
axis = np.asarray(axis)
axis = axis / np.sqrt(np.dot(axis, axis))
theta = np.deg2rad(theta)
a = np.cos(theta / 2.0)
b, c, d = -axis * np.sin(theta / 2.0)
aa, bb, cc, dd = a * a, b * b, c * c, d * d
bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d
return np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],
[2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],
[2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])
def cutplane(run='DIPTSUR2', time=(2019,9,2,4,10,0,0), plane='xz', var='p',
renderView=None, render=True, show=True, debug=True):
from hapiclient.util import urlretrieve
if plane == 'xz':
extend=[[-55,25],[-55,55]]
else:
raise ValueError ('only xz plane currently supported')
vtk_fname = '%s_GSM_plane_%s_demo.vtk'%(plane,var)
# Dowload demo vtk from online server and save to /tmp/
vtk_url = 'http://mag.gmu.edu/git-data/magnetovis/simulation/' + vtk_fname
retd = urlretrieve(vtk_url, '/tmp/'+vtk_fname, check_last_modified=False)
import paraview.simple as pvs
if not renderView:
renderView = pvs.GetActiveViewOrCreate('RenderView')
# load vtk from /tmp/ into reader object
cutplane_vtk = pvs.LegacyVTKReader(FileNames=['/tmp/'+vtk_fname])
# show data in view
cutplane_vtkDisplay = pvs.Show(cutplane_vtk, renderView)
if not show:
pvs.Hide(cutplane_vtk, renderView)
if render:
# Render all display objects in renderView
pvs.Render()
def _dipole_field(self, output, time, extend, NxNyNz, coord_sys):
"""
extend [[x0,y0,z0],[x1,y1,z1]] points of the corner across the diagonal of the grid
NxNyNz: number of points along each axis
"""
import numpy as np
from hxform import hxform as hx
def structured_grid(output, points, F):
import vtk
from vtk.numpy_interface import dataset_adapter as dsa
if False:
# this is never meant to run. it is only to get rid of error message
# that output is not defined. output is defined when running
# this script in the programmable source text box.
output = ''
# communication between "script" and "script (RequestInformation)"
executive = self.GetExecutive()
outInfo = executive.GetOutputInformation(0)
exts = [executive.UPDATE_EXTENT().Get(outInfo, i) for i in range(6)]
dims = [exts[1]+1, exts[3]+1, exts[5]+1]
# setting the sgrid extent
output.SetExtent(exts)
# setting up the points and allocate the number points
pvtk = dsa.numpyTovtkDataArray(points)
pts = vtk.vtkPoints()
pts.Allocate(dims[0] * dims[1] * dims[2])
pts.SetData(pvtk)
output.SetPoints(pts)
for name, data in F.items():
fvtk = dsa.numpyTovtkDataArray(data)
fvtk.SetName(name)
output.GetPointData().AddArray(fvtk)
extend = np.array(extend)
xax = np.linspace(extend[0,0],extend[1,0], NxNyNz[0])
yax = np.linspace(extend[0,1],extend[1,1], NxNyNz[1])
zax = np.linspace(extend[0,2],extend[1,2], NxNyNz[2])
Y, Z, X = np.meshgrid(yax, zax, xax)
points = np.column_stack([X.flatten(), Y.flatten(), Z.flatten()])
r = np.linalg.norm(points,axis=1)
data_arrays = {}
if coord_sys != 'GSM':
points = hx.transform(points, time, 'GSM', coord_sys)
B = np.zeros(points.shape)
B[:,0] = 3*M*points[:,0]*points[:,2]/r**5 # Bx = 3*M*x*z/r^5
B[:,1] = 3*M*points[:,1]*points[:,2]/r**5 # By = 3*M*y*z/r^5
B[:,2] = M*(3*points[:,2]**2-r**2)/r**5 # Bz = M(3*z^2 - r^2)/r^5
data_arrays['B_field'] = B
data_arrays['distance'] = r
structured_grid(output, points, data_arrays) # S is programmable source
def dipole_field(time, M=7.788E22, coord_sys='GSM', extend=[[-21,-21,-21],[21,21,21]], NxNyNz=[22,22,22],):
return objs_wrapper(time=time, extend=extend, NxNyNz=NxNyNz,
coord_sys=coord_sys, M=M, representation='Surface',
obj='dipole field')
def trajectory():
# read http://mag.gmu.edu/git-data/magnetovis/trajectory/demo.vtk and plot it
pass
#$ cd ~/.local/progs/ParaView-5.9.1-MPI-Linux-Python3.8-64bit/lib/python3.8/
#$ ln -s ./__pycache__/_sysconfigdata__linux_x86_64-linux-gnu.cpython-38.pyc _sysconfigdata__linux_x86_64-linux-gnu.pyc
def earth(time,
coord_sys='GSM',
renderView=None,
render=True,
show=True,
out_dir=tempfile.gettempdir(),
topo_url='http://mag.gmu.edu/git-data/magnetovis/topography/world.topo.2004{0:02d}.3x5400x2700.png',
debug=False):
"""Show Earth sphere in a given coordinate system with a topographic overlay"""
def writevtk(time, coord_sys=coord_sys,
Nt=100, Np=100,
out_dir=out_dir, debug=debug, ftype='BINARY'):
"""Write VTK file for a sphere rotated into a given coordinate system"""
import numpy as np
from hxform import hxform as hx
from magnetovis.vtk.vtk_export import vtk_export
fnameVTK = os.path.join(out_dir, 'earth-' + coord_sys + '-' + util.tstr(time, length=5) +'.vtk')
if os.path.exists(fnameVTK):
return fnameVTK
R = 1.
theta = np.linspace(0., np.pi, Nt)
phi = np.linspace(0., 2.*np.pi, Np)
B1, B2 = np.meshgrid(phi, theta)
B1 = B1.flatten(order='C')
B2 = B2.flatten(order='C')
normPhi = np.linspace(0., 1., Np)
normTheta = np.flipud(np.linspace(0., 1., Nt))
u, v = np.meshgrid(normPhi, normTheta)
u = u.flatten(order='C')
v = v.flatten(order='C')
UV = np.column_stack((u, v))
PI = np.pi*np.ones((B1.size, ))
x = R*np.cos(B1+PI)*np.sin(B2)
y = R*np.sin(B1+PI)*np.sin(B2)
z = R*np.cos(B2)
XYZr = np.column_stack((x, y, z))
if coord_sys != 'GEO':
XYZr = hx.transform(XYZr, time, 'GEO', coord_sys)
{"name":'Angular_Coords_for_PNG', "array":UV, "texture":'TEXTURE_COORDINATES'}
vtk_export(fnameVTK, XYZr,
dataset = 'STRUCTURED_GRID',
connectivity = {'DIMENSIONS':(Nt, Np, 1)},
point_data = {"name":'Angular_Coords_for_PNG', "array":UV, "texture":'TEXTURE_COORDINATES'},
title='Earth',
ftype=ftype,
debug=debug)
return fnameVTK
urlPNG = topo_url.format(time[1])
filePNG = os.path.join(out_dir, os.path.split(topo_url)[1].format(time[1]))
# Download topographic overlay file if not found.
from hapiclient.util import urlretrieve
if not os.path.exists(filePNG):
if debug:
print("Downloading " + urlPNG)
urlretrieve(urlPNG, filePNG)
if debug:
print("Downloaded " + urlPNG + "\nto\n" + filePNG)
# Save VTK file
fileVTK = writevtk(time)
# Import statement down here so we can test above code w/o paraview.
import paraview.simple as pvs
# Create VTK object
# TODO: It should be possible to not need to write a file. See
# https://stackoverflow.com/questions/59273490/python-read-vtk-file-add-data-set-then-write-vtk
# https://blog.kitware.com/improved-vtk-numpy-integration/
sphereVTK = pvs.LegacyVTKReader(FileNames=[fileVTK])
if not renderView:
renderView = pvs.GetActiveViewOrCreate('RenderView')
# Create a display object in the renderView
sphereDisplay = pvs.Show(sphereVTK, renderView)
# Defaults shown by Python trace for the display properties of a sphere
sphereDisplay.Representation = 'Surface'
sphereDisplay.ColorArrayName = [None, '']
sphereDisplay.OSPRayScaleFunction = 'PiecewiseFunction'
sphereDisplay.SelectOrientationVectors = 'None'
sphereDisplay.ScaleFactor = 0.4
sphereDisplay.SelectScaleArray = 'None'
sphereDisplay.GlyphType = 'Arrow'
sphereDisplay.GlyphTableIndexArray = 'None'
sphereDisplay.DataAxesGrid = 'GridAxesRepresentation'
sphereDisplay.PolarAxes = 'PolarAxesRepresentation'
sphereDisplay.ScalarOpacityUnitDistance = 0.15493986305312726
# Apply overlay
textureProxy = pvs.servermanager.CreateProxy("textures", "ImageTexture")
textureProxy.GetProperty("FileName").SetElement(0, filePNG)
textureProxy.UpdateVTKObjects()
sphereDisplay.Texture = textureProxy
if not show:
pvs.Hide(sphereVTK, renderView)
if render:
# Render all display objects in renderView
pvs.Render()
pvs.RenameSource('Earth - {} {}'.format(coord_sys, util.tstr(time,6)))
return sphereDisplay, renderView, sphereVTK
def field_data(time, Xgrid, values, dims, texture, # dims = [Nx,Ny,Nz]
var = 'dummy_variable',
out_filename = os.path.join(tempfile.gettempdir(), 'structured_grid_dummy'),
renderView=None,
render=True,
show=True,
debug=True, sum_total=False):
from magnetovis.vtk.vtk_export import vtk_export
if os.path.exists(out_filename):
if debug: print(out_filename + ' ALREADY EXISTS')
else:
if sum_total:
tot = np.sum(values, axis=0)
tot = ' ' + str(tot)
else:
tot = ''
vtk_export(out_filename, Xgrid,
dataset='STRUCTURED_GRID',
connectivity=dims,
point_data=values,
texture=texture,
point_data_name=var,
title=var + 'field' + tot,
ftype='BINARY',
debug=debug)
import paraview.simple as pvs
if not renderView:
renderView = pvs.GetActiveViewOrCreate('RenderView')
# create a new 'Legacy VTK Reader'
structured_gridvtk = pvs.LegacyVTKReader(FileNames=[out_filename])
# show data in view
structured_gridvtkDisplay = pvs.Show(structured_gridvtk, renderView)
# trace defaults for the display properties.
structured_gridvtkDisplay.Representation = 'Points'
structured_gridvtkDisplay.ScaleFactor = 21.0
structured_gridvtkDisplay.ScalarOpacityUnitDistance = 5.766431907
if not show:
pvs.Hide(structured_gridvtk, renderView)
if render:
# print title of structured grid vtk (including total if summed)
title = 'structured_gridvtk.'
print('\n\n########################\n########################')
print('\n\n' + title + '\n\n')
print('\n\n########################\n########################')
# Render all display objects in renderView
pvs.Render()
return structured_gridvtk
def plane_data(time, Ugrid, values, dims, texture,
var = 'dummy_variable',
out_filename = os.path.join(tempfile.gettempdir(), 'plane_grid_dummy'),
renderView=None,
render=True,
show=True,
debug=True, sum_total=False):
pass
if False:
"""
# def slice(structured_grid, origin, normal,
# renderView=None,
# render=True,
# show=True,
# debug=True, vector_component=None):
# import paraview.simple as pvs
# if not renderView:
# renderView = pvs.GetActiveViewOrCreate('RenderView')
# # create a new 'Slice'
# slice1 = pvs.Slice(Input=structured_grid)
# slice1.SliceType = 'Plane'
# slice1.SliceOffsetValues = [0.0]
# # init the 'Plane' selected for 'SliceType'
# slice1.SliceType.Origin = origin
# slice1.SliceType.Normal = normal
# # get color transfer function/color map for var
# point_data_name = structured_grid.PointData.keys()[0]
# print('point_data_name = ' + point_data_name)
# colorMap = pvs.GetColorTransferFunction(point_data_name)
# # show data in view
# slice1Display = pvs.Show(slice1, renderView)
# # trace defaults for the display properties.
# slice1Display.Representation = 'Surface'
# slice1Display.LookupTable = colorMap
# slice1Display.OSPRayScaleFunction = 'PiecewiseFunction'
# if vector_component is not None:
# #https://kitware.github.io/paraview-docs/latest/python/_modules/paraview/simple.html
# pvs.ColorBy(slice1Display, ('POINTS', point_data_name, vector_component))
# slice1Display.RescaleTransferFunctionToDataRange(False)
# # show color bar/color legend
# slice1Display.SetScalarBarVisibility(renderView, True)
# # https://docs.paraview.org/en/latest/ReferenceManual/colorMapping.html
# # apply custom color transfer function
# if False:
# colorMap.RGBPoints = get_color_transfer_function()
# #slice1Display.RescaleTransferFunctionToDataRange(False) #screws everything up if put here
# return colorMap
# def get_color_transfer_function(scale='continuous_log', highest_val = 100., unit = 1., n = 5001):
# # write color transfer function with numpy
# def transfunc(x_units):
# x = x_units/log_units
# try:
# assert(len(x.shape) == 1)
# ret = []
# for i in range(x.size):
# ret.append(transfunc(x[i]))
# return np.array(ret)
# except:
# if 0<=x and x <= p:
# return B*x
# if x>p:
# return np.log10(x) + 1.
# if x<0:
# #return -transfunc(-x)
# return 0.
# def transfunc(x_units):
# x = x_units/unit
# if scale=='continuous_log':
# B = 10./(np.e*np.log(10.)) # log is nat log (base e)
# p = np.e/10.
# if 0<=x and x <= p:
# return B*x
# if x>p:
# return np.log10(x) + 1.
# if x<0:
# #return -transfunc(-x)
# return 0.
# if scale=='linear':
# if x>0:
# return x
# else:
# #return -x
# return 0
# if scale == 'kinked_log':
# if 0 <= x and x <= 1.:
# return x
# if x>1:
# return np.log10(x) + 1.
# if x<0:
# return 0
# #val_range = highest_val*np.linspace(-1, 1, 100)
# #CAREFUL: with above val_range, it made the magnitude look slightly
# # blue (so slightly negative) on the outskirts where it
# # should be zero. Note zero was point.
# # TODO: find what interpolation inbetween paraview uses
# val_range = highest_val*np.linspace(-1, 1, n)
# mx = np.max(val_range)
# norm = transfunc(mx)
# #print('mx',mx)
# #print('norm',norm)
# #print(transfunc(val_range))
# red_range = np.zeros(n)
# for i in range(n):
# red_range[i] = (1./norm)*transfunc(val_range[i])
# blue_range = np.zeros(n)
# for i in range(n):
# blue_range[i] = (1./norm)*transfunc(-val_range[i])
# green_range = np.zeros(n)
# transfunc_array = np.column_stack([val_range,
# red_range,
# green_range, blue_range])
# return transfunc_array.flatten()
# def location_on_earth(time, mlat, mlon,
# renderView=None,
# render=True,
# show=True,
# debug=True):
# import cxtransform as cx
# import paraview.simple as pvs
# center = cx.MAGtoGSM([1., mlat, mlon], time, 'sph', 'car')
# if not renderView:
# renderView = pvs.GetActiveViewOrCreate('RenderView')
# sph = pvs.Sphere()
# # Properties modified on sph
# sph.Center = center
# sph.Radius = 0.2
# sph.ThetaResolution = 10
# sph.PhiResolution = 10
# # show data in view
# sphDisplay = pvs.Show(sph, renderView)
# # trace defaults for the display properties.
# sphDisplay.Representation = 'Surface'
# sphDisplay.ColorArrayName = [None, '']
# sphDisplay.OSPRayScaleArray = 'Normals'
# sphDisplay.OSPRayScaleFunction = 'PiecewiseFunction'
# sphDisplay.SelectOrientationVectors = 'None'
# sphDisplay.ScaleFactor = 0.2
# sphDisplay.SelectScaleArray = 'None'
# sphDisplay.GlyphType = 'Arrow'
# sphDisplay.GlyphTableIndexArray = 'None'
# sphDisplay.DataAxesGrid = 'GridAxesRepresentation'
# sphDisplay.PolarAxes = 'PolarAxesRepresentation'
# # change solid color
# sphDisplay.DiffuseColor = [1.0, 0.0, 1.0]
# if not show:
# pvs.Hide(structured_gridvtk, renderView)
# if render:
# # Render all display objects in renderView
# pvs.Render()"""
pass
def magnetic_dipole(time,
renderView=None,
render=True,
show=True,
out_dir=tempfile.gettempdir(),
debug=False):
axis(time, 'z', coord_sys='MAG',
length_positive=15., length_negative=0., tick_spacing=1, label=False,
renderView=renderView,
render=render,
show=show,
out_dir=out_dir,
debug=debug)
def trace_lines(points, connectivity, out_fname=os.path.join(tempfile.gettempdir(),'line_tmp.vtk'),
color=[1,0,0], ftype='BINARY',
renderView=None,
render=True,
show=True,
debug=False):
# connectivity = [0,0,0,0,0,1,1,1,1,1,1,1,2,2,2,2,2,2,2,3,3,3,3,3]
# ?or? connectivity = [5,7,7,5]
# Save line as VTK file
sys.path.append('../')
from vtk_export import vtk_export
#vtk_export(out_fname, line, None, connectivity, 'line', None, title='Title', ftype=ftype, grid='POLYDATA') # modify
if os.path.exists(out_fname):
print(out_fname + ' ALREADY EXISTS')
else:
vtk_export(out_fname, points,
dataset = 'POLYDATA',
connectivity = connectivity,
ftype=ftype)
# Import statement down here so we can test above code w/o paraview.
import paraview.simple as pvs
if renderView is None:
renderView = pvs.GetActiveViewOrCreate('RenderView')
fileVTK = out_fname
# create a new 'Legacy VTK Reader'
field_linevtk = pvs.LegacyVTKReader(FileNames=[fileVTK])
# show data in view
field_linevtkDisplay = pvs.Show(field_linevtk, renderView)
# trace defaults for the display properties.
field_linevtkDisplay.Representation = 'Surface'
field_linevtkDisplay.ColorArrayName = [None, '']
field_linevtkDisplay.OSPRayScaleFunction = 'PiecewiseFunction'
field_linevtkDisplay.SelectOrientationVectors = 'None'
field_linevtkDisplay.ScaleFactor = 0.20896326303482057
field_linevtkDisplay.SelectScaleArray = 'None'
field_linevtkDisplay.GlyphType = 'Arrow'
field_linevtkDisplay.GlyphTableIndexArray = 'None'
field_linevtkDisplay.DataAxesGrid = 'GridAxesRepresentation'
field_linevtkDisplay.PolarAxes = 'PolarAxesRepresentation'
# create a new 'Tube'
tube1 = pvs.Tube(Input=field_linevtk, guiName=fileVTK)
tube1.Scalars = [None, '']
tube1.Vectors = [None, '1']
tube1.Radius = 0.05
# Properties modified on tube1
tube1.Vectors = [None, '']
# show data in view
tube1Display = pvs.Show(tube1, renderView)
# trace defaults for the display properties.
tube1Display.Representation = 'Surface'
tube1Display.ColorArrayName = [None, '']
tube1Display.OSPRayScaleArray = 'TubeNormals'
tube1Display.OSPRayScaleFunction = 'PiecewiseFunction'
tube1Display.SelectOrientationVectors = 'None'
tube1Display.ScaleFactor = 0.2129082262516022
tube1Display.SelectScaleArray = 'None'
tube1Display.GlyphType = 'Arrow'
tube1Display.GlyphTableIndexArray = 'None'
tube1Display.DataAxesGrid = 'GridAxesRepresentation'
tube1Display.PolarAxes = 'PolarAxesRepresentation'
# hide data in view
pvs.Hide(field_linevtk, renderView)
# change solid color
tube1Display.DiffuseColor = color
def _latitude_lines(self, time, coord_sys='GEO', increment=15, color=[1,0,0]):
import numpy as np
import numpy.matlib
import vtk
from hxform import hxform as hx
#from magnetovis import cxtransform as cx
lon = np.arange(0, 360 + 5, 5)
lat = np.arange(-90, 90 + increment, increment)
lon_repeat = len(lat)
lat_repeat = len(lon)
lon = np.matlib.repmat(lon, 1, lon_repeat).flatten()
lat = np.repeat(lat,lat_repeat)
r = np.ones(lon_repeat*lat_repeat)
sph_coords = np.column_stack((r,lat,lon))
points = hx.transform(sph_coords, time, 'GEO', coord_sys, ctype_in='sph', ctype_out='car')
pdo = self.GetPolyDataOutput()
pdo.Allocate(len(r), 1)
pts = vtk.vtkPoints()
lon_size = np.unique(lon).size
lat_size = np.unique(lat).size
for i in range(lat_size):
polyline = vtk.vtkPolyLine()
polyline.GetPointIds().SetNumberOfIds(lon_size)
for j in range(lon_size):
pts_index = j+i*lon_size
pts.InsertPoint(pts_index, points[pts_index,0], points[pts_index,1], points[pts_index,2] )
polyline.GetPointIds().SetId(j,pts_index)
pdo.InsertNextCell(polyline.GetCellType(), polyline.GetPointIds())
pdo.SetPoints(pts)
colors = vtk.vtkUnsignedCharArray()
colors.SetNumberOfComponents(1)
colors.SetName('lat_lon')
for n in range(r.size):
colors.InsertNextTuple([0]) # 0 for lat, # 1 for lon
pdo.GetPointData().AddArray(colors)
def _longitude_lines(self, time, coord_sys='GSM', increment=15, color=[1,0,0]):
import numpy as np
import numpy.matlib
import vtk
from hxform import hxform as hx
#from magnetovis import cxtransform as cx
lon = np.arange(0,360 + increment, increment) #360/npts) # [0, 90, 180, 270]
lat = np.arange(-90,90 + 5, 5) # [-90, -45, 0, 45, 90]
lon_repeat = len(lat) # 5
lat_repeat = len(lon) # 4
lat = np.matlib.repmat(lat, 1, lat_repeat).flatten()
lon = np.repeat(lon,lon_repeat)
r = np.ones(lon_repeat*lat_repeat)
sph_coords = np.column_stack((r,lat,lon))
points = hx.transform(sph_coords, time, 'GSM', coord_sys, ctype_in='sph', ctype_out='car')
### start of vtk
pdo = self.GetPolyDataOutput()
pdo.Allocate(len(r), 1)
pts = vtk.vtkPoints()
lon_size = np.unique(lon).size
lat_size = np.unique(lat).size
for i in range(lon_size): # 4
polyline = vtk.vtkPolyLine()
polyline.GetPointIds().SetNumberOfIds(lat_size)
for j in range(lat_size): # 5
pts_index = j+i*lat_size
pts.InsertPoint(pts_index, points[pts_index,0], points[pts_index,1], points[pts_index,2] )
polyline.GetPointIds().SetId(j,pts_index)
pdo.InsertNextCell(polyline.GetCellType(), polyline.GetPointIds())
pdo.SetPoints(pts)
colors = vtk.vtkUnsignedCharArray()
colors.SetNumberOfComponents(1)
colors.SetName('lat_lon')
for n in range(r.size):
colors.InsertNextTuple([1]) # 0 for lat, # 1 for lon
pdo.GetPointData().AddArray(colors)
def latitude_lines(time, coord_sys='GEO', increment=15, color=[0,0,1],
representation='Surface', renderView=None,
render=True, show=True, show_annotations=False):
return objs_wrapper(time=time, coord_sys=coord_sys, increment=increment,
color=color, representation=representation,
renderView=renderView, render=render, show=show,
show_annotations=show_annotations, obj='latitude')
def longitude_lines(time, coord_sys='GEO', increment=15, color=[0,.5,1],
representation='Surface', renderView=None,
render=True, show=True, show_annotations=False):
return objs_wrapper(time=time, coord_sys=coord_sys, increment=increment,
color=color, representation=representation,
renderView=renderView, render=render, show=show,
show_annotations=show_annotations, obj='longitude')
def plane(time, val, extend=[[-40,40],[-40,40]], coord_sys='GSM', labels=True,
opacity=.25,renderView=None, render=True, show=True,):
# val='XY', 'XZ', 'YZ'
import datetime
import numpy as np
import paraview.simple as pvs
from magnetovis.util import tstr
from hxform import hxform as hx
assert isinstance(extend, list or tuple or np.ndarray), \
'extend has to be either an list, tuple, or numpy.ndarray'
extend = np.array(extend)
assert extend[0,0] < extend[0,1], \
'lower bounds for {}-axis is higher than upper bound for {}-axis'\
+'\n extend[0]={} '.format(val[0], val[0], extend[0])
assert extend[1,0] < extend[1,1], \
'lower bounds for {}-axis is higher than upper bound for {}-axis'\
+ '\n extend[1]={}'.format(val[1], val[1], extend[1])
col1 = np.array((extend[0,0], extend[0,1], extend[0,0]))
col2 = np.array((extend[1,0], extend[1,0], extend[1,1]))
if val == 'XY':
c1 = 0
c2 = 1
color = [1, 0, 0]
elif val == 'XZ':
c1 = 0
c2 = 2
color = [1, 1, 0.5]
elif val == 'YZ':
c1 = 1
c2 = 2
color = [0, 1, 0.1]
else:
assert False, 'val should be "XY", "XZ", or "YZ"'
exarray = np.zeros((3,3))
exarray[:,c1] = col1
exarray[:,c2] = col2
if coord_sys != 'GSM':
assert time != None, 'If coord_sys in not GSM then time cannot be None'
exarray = hx.transform(exarray, time, 'GSM', coord_sys, 'car', 'car')
plane = pvs.Plane()
if not renderView:
renderView = pvs.GetActiveViewOrCreate('RenderView')
plane.Origin = exarray[0]
plane.Point1 = exarray[1]
plane.Point2 = exarray[2]
planeDisplay = pvs.Show(plane, renderView)
planeDisplay.Representation = 'Surface'
planeDisplay.Opacity = opacity
scalar_data = '{} axes'.format(val)
LUT = pvs.GetColorTransferFunction('{} plane'.format(val))
LUT.IndexedColors = color
LUT.Annotations = ['0', val]
LUT.InterpretValuesAsCategories = 1
LUT.AnnotationsInitialized = 1
planeDisplay.LookupTable = LUT
planeDisplay.OpacityArray = ['POINTS', scalar_data]
planeDisplay.ColorArrayName = ['POINTS', scalar_data]
#planeDisplay.SetScalarBarVisibility(renderView, True)
pvs.RenameSource('{}-plane {} {}'.format(val, coord_sys, util.tstr(time, length=5)))
if not show:
pvs.Hide()
if render:
pvs.Render()
return planeDisplay, renderView, plane
if False:
'''
def fieldlines(time, start_points, model='IGRF', # or SCARR ect
model_function=None,
#stop_function=None,
line_color=[1,0,0],
tube_color=[1,0,1], # 4 values?
tube_radius=0.01,
s_grid=None,
max_iterations=100,
renderView=None,
render=True,
show=True,
out_dir=tempfile.gettempdir(),
debug=True):
#def fieldlines(time, start_points, model='IGRF', model_function=None, stop_function=None, line_color=[1,0,0], tube_color=[1,0,0,1], tube_radius=0.01):
analytic = ['IGRF','T1995','T2000'] # Ones SpacePy knows
if model not in analytic:
import scipy.odeint
analytic = ['IGRF','T1995','T2000'] # Ones SpacePy knows
if model not in analytic:
# TODO: Modify cutplane.fieldlines to accept stop_function.
# TODO: Modify cutplane.fieldlines to accept run_id.
# TODO: Move fieldlines out of cutplane and put it in fieldlines.py
lines = cutplane.fieldlines(time, ..., stop_function=stop_function)
else:
# TODO: Use spacepy to get field lines for analytic model
# (May need to extend their code to handle 3-D field lines _but_
# we should consider using vtk 3-D interpolator (pass grid and triangulate
# then use the vtk 3-D interpolation lib which will probably be fast, See
# https://stackoverflow.com/a/21990296)
# If stop_function=None, define our own stop function.
# model_function returns the field given time, X, Y, Z. Will used to connect to simulation.s
#(time, mag, fieldvar='b', s_grid=None, max_iterations=100, debug=False)
if s_grid is None:
s_grid = np.arange(0., 10., 0.1)
# Trace field line for a total length of smax, and check if stop conditions
# satified. If not satified, trace for another total length of smax.
# Note that Python 3 version of integration library has stop function
# that can be passed so this won't be needed.
linenum = 0
conn = np.zeros(start_points.shape[0], dtype=int)
#points = np.zeros((?, 3))
points = np.empty((0, 3)) # combined flines to pass to trace_lines
try:
from scipy.integrate import odeint
for k in range(start_points.shape[0]):
X0 = start_points[k, :]
if debug:
print('linenum = ' + str(linenum))
print('k =' + str(k))
done = False
sol = np.empty((0, 3)) # Combined segments until stopped
i = 0
while not done:
if debug:
print('i = ' + str(i))
sol_seg = odeint(model_function, X0, s_grid)
R = sol_seg[:, 0]**2+sol_seg[:, 1]**2 + sol_seg[:, 2]**2
# define condition on the field line points
# Find first location where soln steps out-of-bounds
#tr = np.where( False == (R >= 1) & (soln[:,0] > -30.) & (np.abs(soln[:, 2]) < 20.) )
# Boolean array.
tr = (R >= 1) & (sol_seg[:,0] > -30.) & (np.abs(sol_seg[:, 2]) < 20.)
# RuntimeWarning: invalid value encountered in greater_equal
# Indices where stop conditions satisfied
tr_out = np.where(tr == False)
if debug:
print(tr)
if tr_out[0].size > 0:
# Stop condition found at least once. Use solution up to that point.s
sol = np.vstack((sol, sol_seg[0:tr_out[0][0] + 1, :]))
done = True
elif max_iterations == i + 1:
sol = np.vstack((sol, sol_seg)) # return soln faster?
done = True
else:
# New initial condition is stop point.
X0 = sol_seg[-1, :]
# Append solution but exclude last value, which is the
# new initial condition.
sol = np.vstack((sol, sol_seg[0:-1, :]))
i = i + 1
#points[i*npts : (i+1)*npts, :] = line
points = np.vstack((points, sol))
conn[k] = sol.shape[0]
linenum += 1
except ImportError:
pass
out_fname = os.path.join(out_dir, 'test_field_lines.vtk')
trace_lines(points, {'LINES' : conn}, out_fname=out_fname,
color=tube_color, ftype='BINARY',
renderView=renderView,
render=render,
show=show,
debug = debug)
# If stop_function=None, define our own stop function.
# model_function returns the field given time, X, Y, Z. Will used to connect to simulation.s
'''
pass
def _plasmapause(self, output, N, coord_sys, time):
"""
coordinate system is in Spherical SM coordinates with angle in radians
log(n) = a1 * F(L) * G(L) * H(L) = 1.5
where,
F(L) = a2 - e ** (a3 * (1 -a4 * e ** (-h(L,Lambda) / a5)))
G(L) = a6 * L + a7
H(L) = (1 + (L / a8) ** (2 * (a9 - 1))) ** (-a9 / (a9 - 1))
L = R/cos**2(Lambda) # used by SSCWEB
L is the McIlwain L-Shell parameter.
h(L, Lambda) is the height above the Earth's surface
h = 6371.2*(1.-R) # according to SSCWEB
and Lambda is the geomagnetic latitude
constants:
a1 = 1.4
a2 = 1.53
a3 = -0.036
a4 = 30.76
a5 = 159.9
a7 = 6.27
also,
a6 = -0.87 + 0.12 * e ** (-x**2 / 9)
a8 = 0.7 * cos(2 * pi * ((MLT - 21) / 24)) + 4.4
a9 = 15.3 * cos(2 * pi * MLT / 24) + 19.7
also, also
MLT = (PHI*RAD/15.) - 12.
x = MLT
MLT is the magnetic local time measured in HH MLT=0=24 is midnight
and MLT=12 is noon.
MLT domain is [0,24)
x domain is [-12,12]
PHI is the longitude
THETA is the latitude
"""
import numpy as np
import numpy.matlib
import vtk
from copy import deepcopy
from hxform import hxform as hx
#from magnetovis import cxtransform as cx
def logDen(r, theta, phi):
a1 = 1.4
a2 = 1.53
a3 = -0.036
a4 = 30.76
a5 = 159.9
a7 = 6.27
MLT = (phi*180/np.pi/15.) - 12.
x = deepcopy(MLT)
if MLT >= 24: MLT = MLT - 24
if MLT < 0: MLT = MLT + 24
if x > 12: x = x - 24
if x< - 12: x = x + 24
a6 = -0.87 + 0.12 * np.exp(-x*x/9.)
a8 = 0.7 * np.cos(2*np.pi* (MLT-21.)/24.) + 4.4
a9 = 15.3 * np.cos(2*np.pi*MLT/24.) + 19.7
F = a2 - np.exp(a3 * (1.-a4 * np.exp(6371.2*(1.-r)/a5)))
C2LAM = np.cos(theta)*np.cos(theta)
G = (a6*r/C2LAM) + a7
H = (1. + (r /(C2LAM*a8)) ** (2. * (a9 - 1.))) ** (-a9 / (a9 - 1.))
n_log = a1 * F * G * H
return n_log
rmin = 1.05
dphi = 2.*np.pi/N
r_ax = np.arange(rmin,6,(6-rmin)/N) # make radius out to 6.
theta_i = 28*np.pi/180
theta_f = 152 * np.pi/180
theta_step = (theta_f-theta_i)/N
theta_ax = np.arange(theta_i,theta_f,theta_step)
theta_ax = np.pi/2. - theta_ax # converting from colatitude to latitude
phi_ax = dphi*np.arange(N)
phi = np.kron(np.ones(N),phi_ax)
theta = np.kron(theta_ax,np.ones(N))
r = np.kron(r_ax, np.ones(N**2))
phi = np.kron(np.ones(N), phi)
theta = np.kron(np.ones(N), theta)
P = np.column_stack([r,theta,phi])
P_cartesian = np.nan*np.empty(P.shape)
P_cartesian[:,0] = P[:,0]*np.cos(P[:,2])*np.cos(P[:,1]) # x = r cos(phi) cos(theta)
P_cartesian[:,1] = P[:,0]*np.sin(P[:,2])*np.cos(P[:,1]) # y = r sin(phi) cos(theta)
P_cartesian[:,2] = P[:,0]*np.sin(P[:,1]) # z = r sin(theta)
if coord_sys != 'SM':
P_cartesian = hx.transform(P_cartesian, time, 'SM', coord_sys, 'car', 'car')
ind = np.arange(N**3).reshape((N,N,N)) # ind is (50,50,50) going from 0-124999
#PERIODIC IN PHI DIRECTION (indexed by k)
indPeriodic = np.zeros((N,N,N+1), dtype=int) # shape: (50,50,51)
indPeriodic[:,:,:-1] = ind # the same as ind except with an extra column of zeros
indPeriodic[:,:,-1] = ind[:,:,0] # the last row which was all zeros is now a copy of the first row
V_Periodic = []
for i in range(N-1):
for j in range(N-1):
for k in range(N):
V_Periodic.append( (indPeriodic[i,j,k], indPeriodic[i+1,j,k], indPeriodic[i+1,j+1,k], indPeriodic[i,j+1,k],
indPeriodic[i,j,k+1], indPeriodic[i+1,j,k+1], indPeriodic[i+1,j+1,k+1], indPeriodic[i,j+1,k+1])
)
V_Periodic = np.array(V_Periodic, dtype=int) # size = (N-1)(N-1)*N
scalars = vtk.vtkDoubleArray()
scalars.SetName("H+ log density (cm^-3)")
for i in range(N**3):
scalars.InsertNextValue(logDen(P[i,0],P[i,1],P[i,2]))
nV_Periodic = V_Periodic.shape[0]
ppc = V_Periodic.shape[1]
# Creating vtk points
vtkpts = vtk.vtkPoints()
vtkpts.SetNumberOfPoints(N**3)
for i in range(N**3):
vtkpts.InsertPoint(i, P_cartesian[i,0], P_cartesian[i,1], P_cartesian[i,2])
ugo = self.GetUnstructuredGridOutput()
ugo.Allocate(nV_Periodic,1)
ugo.SetPoints(vtkpts)
for row in range(nV_Periodic):
aHexahedron = vtk.vtkHexahedron()
for cell_indx in range(ppc):
aHexahedron.GetPointIds().SetId(cell_indx, V_Periodic[row, cell_indx])
ugo.InsertNextCell(aHexahedron.GetCellType(), aHexahedron.GetPointIds())
output.GetPointData().AddArray(scalars)
def plasmapause(N, representation='Surface', model='Gallagher_Craven_Comfort88',
coord_sys='GSM', log_den=[1.5], time=None,
renderView=None, render=True, show=True):
return objs_wrapper(N=N, representation=representation, model=model,
coord_sys=coord_sys, log_den=log_den, time=time,
renderView=renderView, render=render, show=show,
obj='Plasmapause')
def _neutralsheet(self, output, time, psi,
Rh, G, Lw, d,
extend, NxNy,
coord_sys,
model,
return_sheet, array_scalar_value=1):
"""
Show neutral sheet surface.
Creates the position of the Current Sheet from model outlined in Tsyganenko 1995
[https://doi.org/10.1029/94JA03193]
Uses the parameters Rh = 8, d = 4, G = 10, Lw = 10 used by
https://sscweb.gsfc.nasa.gov/users_guide/ssc_reg_doc.shtml
Z = z1 + z2
z1 = 0.5 * np.tan(psi) \
* (np.sqrt((X - Rh * np.cos(psi))**2 + (d * np.cos(psi))**2)
- np.sqrt((X + Rh * np.cos(psi))**2 + (d * np.cos(psi))**2))
z2 = -G*np.sin(psi) * Y**4/(Y**4 + Lw**4)
Parameters:
----------
psi (float):
Angle of the dipole moment and z-axis in GSM in degrees.
Rh (float):
"hinging distance"
G (float):
Amplitude of the current sheet warping.
Lw (float):
Defines the extension in the dawn-dusk direction.
Returns:
-------
"""
import numpy as np
import numpy.matlib
from hxform import hxform as hx
import paraview.simple as pvs
# retrieving psi value based on time.
if psi == None:
assert time != None, \
'if psi is None then time cannot be None.'
dipole = hx.MAGtoGSM(np.array([0., 0., 1.]), time, 'car', 'sph') # [radius, latitude,longitude]
psi = 90 - dipole[1]
psi = np.deg2rad(psi)
extend = np.array(extend)
xax = np.linspace(extend[0,0],extend[1,0], NxNy[0])
yax = np.linspace(extend[1,0],extend[1,1], NxNy[1])
if return_sheet:
zax = np.linspace(-3,3,3)
else:
zax = np.zeros(yax.size)
Y, _, X = np.meshgrid(yax, zax, xax)
# Tsyganenko 1995 eq.
z1 = 0.5 * np.tan(psi) \
* (np.sqrt((X - Rh * np.cos(psi))**2 + (d * np.cos(psi))**2)
- np.sqrt((X + Rh * np.cos(psi))**2 + (d * np.cos(psi))**2))
z2 = - G * np.sin(psi) * Y**4/(Y**4 + Lw**4)
Z = z1 + z2
if return_sheet:
Z[0,:] = Z[1,:]-3
Z[2,:] = Z[1,:]+3
points = np.column_stack([X.flatten(), Y.flatten(), Z.flatten()])
if return_sheet:
return points, psi
print('created Tsyganenko 1995 currentsheet model')
if coord_sys != 'GSM':
points = hx.transform(points, time, 'GSM',
coord_sys, 'car', 'car')
############################################################
####### start of the code to use programmable source #######
############################################################
import vtk
if False:
# this is never meant to run. it is only to get rid of error message
# that output is not defined. output is defined when running
# this script in the programmable source text box.
output = ''
# communication between "script" and "script (RequestInformation)"
executive = self.GetExecutive()
outInfo = executive.GetOutputInformation(0)
exts = [executive.UPDATE_EXTENT().Get(outInfo, i) for i in range(6)]
dims = [exts[1]+1, exts[3]+1, exts[5]+1]
# setting the sgrid exent
output.SetExtent(exts)
# setting up the points and allocate the number of points
pvtk = dsa.numpyTovtkDataArray(points)
pts = vtk.vtkPoints()
pts.Allocate(dims[0] * dims[1] * dims[2])
pts.SetData(pvtk)
output.SetPoints(pts)
# color sections
annotations_list = list(pvs.GetColorTransferFunction('Magnetosphere Surface').Annotations)
if 'Neutralsheet' in annotations_list:
value = int(annotations_list[annotations_list.index('Neutralsheet')-1])
else:
value = int(1+len(annotations_list)/2)
color_values = np.zeros(X.size) + value
cvtk = dsa.numpyTovtkDataArray(color_values)
cvtk.SetName("Magnetosphere Surface")
output.GetPointData().AddArray(cvtk)
def plasmasheet(time, psi=None,
Rh=8, G=10, Lw=10, d=4,
extend=[[-40,-5],[-15,15]], NxNy=[40,40],
coord_sys='GSM',
model='tsyganenko95',
color = [.6,.3,.2,0.5],
representation='Surface',
out_dir=tempfile.gettempdir(), png_fn=None,
return_sheet=False,
renderView=None,
render=True,
show=True):
return objs_wrapper(time=time, psi=psi, Rh=Rh, G=G, Lw=Lw, d=d, extend=extend,
NxNy=NxNy, coord_sys=coord_sys, model=model, color=color,
representation=representation,
out_dir=out_dir, png_fn=png_fn,
return_sheet=return_sheet,
renderView=renderView, render=render, show=show,
obj='Plasmasheet')
def _plasmasheet(self, output, time, psi,
Rh, G, Lw, d,
extend, NxNy,
coord_sys,
model,
return_sheet):
"""Show plasma sheet volume"""
import numpy as np
from hxform import hxform as hx
import paraview.simple as pvs
from magnetovis.objects import _neutralsheet
if psi == None:
assert time != None, \
'if psi is None then time cannot be None.'
dipole = hx.MAGtoGSM(np.array([0., 0., 1.]), time, 'car', 'sph') # [radius, latitude,longitude]
psi = 90 - dipole[1]
psi = np.deg2rad(psi)
psi_deg = np.copy(np.rad2deg(psi))
psi_deg = np.around(psi_deg, decimals=3)
points, psi = _neutralsheet(self=False, output=False, time=time, psi=psi, Rh=Rh, G=G,
Lw=Lw, d=d, extend=extend, NxNy=NxNy,
coord_sys=coord_sys, model=model,
return_sheet=True)
print('created Tsyganenko 1995 current sheet model with 3 Re width'
+' above and below')
if coord_sys != 'GSM':
points = hx.transform(points, time, 'GSM',
coord_sys, 'car', 'car')
############################################################
####### start of the code to use programmable source #######
############################################################
import vtk
if False:
# this is never meant to run. it is only to get rid of error message
# that output is not defined. output is defined when running
# this script in the programmable source text box.
output = ''
# communication between "script" and "script (RequestInformation)"
executive = self.GetExecutive()
outInfo = executive.GetOutputInformation(0)
exts = [executive.UPDATE_EXTENT().Get(outInfo, i) for i in range(6)]
dims = [exts[1]+1, exts[3]+1, exts[5]+1]
# setting the sgrid exent
output.SetExtent(exts)
# setting up the points and allocate the number of points
pvtk = dsa.numpyTovtkDataArray(points)
pts = vtk.vtkPoints()
pts.Allocate(dims[0] * dims[1] * dims[2])
pts.SetData(pvtk)
output.SetPoints(pts)
# color sections
annotations_list = list(pvs.GetColorTransferFunction('Magnetosphere Surface').Annotations)
if 'Plasmasheet' in annotations_list:
value = int(annotations_list[annotations_list.index('Plasmasheet')-1])
else:
value = int(1+len(annotations_list)/2)
color_values = np.zeros(points.shape[0]) + value
cvtk = dsa.numpyTovtkDataArray(color_values)
cvtk.SetName("Magnetosphere Surface")
output.GetPointData().AddArray(cvtk)
def objs_wrapper(**kwargs):
import re
import paraview.simple as pvs
from magnetovis.util import tstr
from hxform import hxform as hx
def script(kwargs):
# https://stackoverflow.com/questions/436198/what-is-an-alternative-to-execfile-in-python-3
if sys.version_info[0] < 3:
script_src = "kwargs="+str(kwargs)+";execfile('" + __file__ + "',globals(),locals())"
else:
script_src = "kwargs="+str(kwargs)+";exec(open('" + __file__ + "').read())"
return script_src
valid_rep = ['Surface', '3D Glyphs', 'Feature Edges',
'Outline', 'Point Gaussian', 'Points',
'Surface With Edges', 'Wireframe', 'Volume']
assert kwargs['representation'] in valid_rep, \
"""representation must be one of the following {}""".format(valid_rep)
programmableSource = pvs.ProgrammableSource()
mag_surfaces = ['Magnetopause','Bowshock','Neutralsheet', 'Plasmasheet']
if kwargs['obj'] == 'axis':
scalar_data = '{} axes'.format(kwargs['coord_sys'])
programmableSource.Script = script(kwargs)
if not kwargs['renderView']:
renderView = pvs.GetActiveViewOrCreate('RenderView')
programmableSourceDisplay = pvs.Show(programmableSource, renderView)
programmableSourceDisplay.Representation = kwargs['representation']
LUT = pvs.GetColorTransferFunction(scalar_data)
LUT.IndexedColors = [1,0,0, 1,1,0.5, 0,1,0.1, 0,0,0]
LUT.Annotations = ['0','X','1','Y','2','Z', '-1','ticks']
LUT.InterpretValuesAsCategories = 1
LUT.AnnotationsInitialized = 1
programmableSourceDisplay.LookupTable = LUT
programmableSourceDisplay.OpacityArray = ['POINTS', scalar_data]
programmableSourceDisplay.ColorArrayName = ['POINTS', scalar_data]
# won't need once I am able to put text notations on the axis
programmableSourceDisplay.SetScalarBarVisibility(renderView, False)
if not kwargs['show']:
pvs.Hide(programmableSource, renderView)
if kwargs['render']:
pvs.Render()
renderView.Update()
title = "{}-axis {} {}".format(kwargs['val'],
kwargs['coord_sys'],
tstr(kwargs['time'], 5))
renderView = pvs.GetActiveViewOrCreate('RenderView')
programmableSourceDisplay = pvs.Show(programmableSource, renderView)
programmableSourceDisplay.Representation = kwargs['representation']
# creating the in-graph text here
text = "{} [R_e] | {}".format(kwargs["val"],kwargs["coord_sys"])
text_obj = pvs.Text(registrationName="-- {}".format(text))
text_obj.Text = text
textDisplay = pvs.Show(text_obj, renderView, 'TextSourceRepresentation')
textDisplay.TextPropMode = 'Billboard 3D Text'
textDisplay.FontSize = 12
txt_location = np.array([kwargs['lims'][1],0,0])
translate = np.array([0,0,2])
if kwargs['val'] != "X":
if kwargs['val'] == "Y":
rot_axis = (0,0,1)
else:
rot_axis = (0,-1,0) # rotation right hand rule on rotation axis
txt_location = np.dot(rotation_matrix(rot_axis,90), txt_location)
if kwargs['coord_sys'] != 'GSM':
txt_location = hx.transform(txt_location, kwargs['time'], 'GSM', kwargs['coord_sys'], 'car', 'car')[0]
translate = hx.transform(translate, kwargs['time'], 'GSM', kwargs['coord_sys'], 'car', 'car')[0]
textDisplay.BillboardPosition = txt_location + translate
if kwargs['obj'] in mag_surfaces:
if kwargs['obj'] == 'Magnetopause' or kwargs['obj'] == 'Bowshock':
x_dim = 1
y_dim = 101
z_dim = 101
elif kwargs['obj'] == 'Neutralsheet':
x_dim, y_dim = kwargs['NxNy']
z_dim = 1
elif kwargs['obj'] == 'Plasmasheet':
x_dim, y_dim = kwargs['NxNy']
z_dim=3
scalar_data = 'Magnetosphere Surface'
programmableSource.OutputDataSetType = 'vtkStructuredGrid'
programmableSource.ScriptRequestInformation = """
executive = self.GetExecutive()
outInfo = executive.GetOutputInformation(0)
dims = [{}, {}, {}] # x-dims, y-dims, z-dims
outInfo.Set(executive.WHOLE_EXTENT(), 0, dims[0]-1 , 0, dims[1]-1 , 0, dims[2]-1)
""".format(x_dim, y_dim, z_dim)
programmableSource.Script = script(kwargs)
if not kwargs['renderView']:
renderView = pvs.GetActiveViewOrCreate('RenderView')
else:
renderView = kwargs['renderView']
programmableSourceDisplay = pvs.Show(programmableSource, renderView)
programmableSourceDisplay.Representation = kwargs['representation']
if not kwargs['show']:
pvs.Hide(programmableSource, renderView)
if kwargs['render']:
pvs.Render()
renderView.Update()
if not kwargs['time']:
kwargs['time'] = ''
if kwargs['obj'] == 'Magnetopause':
time_str, Bz_str, Psw_str = \
_magnetopause(self='', output='', time=kwargs['time'],
Bz=kwargs['Bz'], Psw=kwargs['Psw'],
model=kwargs['model'],
coord_sys=kwargs['coord_sys'],
return_x_max=False, return_title=True)
title = "{} {} {} {} {} {}".format(
kwargs['obj'], kwargs['model'], kwargs['coord_sys'],
time_str, Bz_str, Psw_str)
elif kwargs['obj'] == 'Bowshock':
time_str, Bz_str, Psw_str = \
_bowshock(self='', output='', time=kwargs['time'], Bz=kwargs['Bz'],
Psw=kwargs['Psw'], model=kwargs['model'],
mpause_model=kwargs['mpause_model'],
coord_sys=kwargs['coord_sys'], return_title=True)
title = "{} {} {} {} {} {} mpause_model={}".format(
kwargs['obj'], kwargs['model'], kwargs['coord_sys'],
time_str, Bz_str, Psw_str, kwargs['mpause_model'])
elif kwargs['obj'] == 'Neutralsheet' or kwargs['obj'] == 'Plasmasheet':
if kwargs['psi'] == None:
dipole = hx.MAGtoGSM(np.array([0., 0., 1.]), kwargs['time'], 'car', 'sph') # [radius, latitude,longitude]
kwargs['psi'] = 90 - dipole[1]
time_str = ''
else:
time_str = tstr(kwargs['time'], 5)
title = '{} {} {} {} psi={:.3g} Rh={:.3g} G={:.3g} Lw={:.3g} d={:.3g}'\
.format(kwargs['obj'], kwargs['model'], kwargs['coord_sys'],
tstr(kwargs['time'],length=5), kwargs['psi'], kwargs['Rh'],
kwargs['G'], kwargs['Lw'], kwargs['d'])\
.replace(' ', ' ')
LUT = pvs.GetColorTransferFunction(scalar_data) # [].....[1,2,3].....[1,2,3,4,5,6]
index_colored_list = kwargs['color'][0:3]
LUT.IndexedColors = np.concatenate((LUT.IndexedColors,index_colored_list)) # [1.0, 0.0, 0.0]
# # appending the new annotation from last created magnetosphere surface
annotations = list(LUT.Annotations)
annotations.append(str(int(1+ len(LUT.Annotations)/2)))
annotations.append(kwargs['obj'])
LUT.Annotations = annotations # ['0', 'Neutralsheet']
LUT.InterpretValuesAsCategories = 1
LUT.AnnotationsInitialized = 1
# renderView = pvs.GetActiveViewOrCreate('RenderView')
programmableSourceDisplay.LookupTable = LUT
programmableSourceDisplay.OpacityArray = ['POINTS', scalar_data]
programmableSourceDisplay.ColorArrayName = ['POINTS', scalar_data]
programmableSourceDisplay.SetScalarBarVisibility(renderView, True)
if kwargs['obj'] == 'Plasmapause':
programmableSource.OutputDataSetType = 'vtkUnstructuredGrid'
programmableSource.Script = script(kwargs)
if not kwargs['renderView']:
renderView = pvs.GetActiveViewOrCreate('RenderView')
programmableSourceDisplay = pvs.Show(programmableSource, renderView)
programmableSourceDisplay.Representation = kwargs['representation']
if not kwargs['show']:
pvs.Hide(programmableSource, renderView)
if kwargs['render']:
pvs.Render()
renderView.Update()
title = "Plasmapause {} {}".format(kwargs['model'], kwargs['coord_sys'])
pvs.ColorBy(programmableSourceDisplay, ('POINTS', 'H+ log density (cm^-3)'))
programmableSourceDisplay.SetScalarBarVisibility(renderView, True)
if kwargs['obj'] == 'satellite':
from hapiclient import hapi
scalar_data = kwargs['satellite_id'] + ' Spacecraft Region'
programmableSource.Script = script(kwargs)
if not kwargs['renderView']:
renderView = pvs.GetActiveViewOrCreate('RenderView')
programmableSourceDisplay = pvs.Show(programmableSource, renderView)
programmableSourceDisplay.Representation = kwargs['representation']
if not kwargs['show']:
pvs.Hide(programmableSource, renderView)
if kwargs['render']:
pvs.Render()
renderView.Update()
server = 'http://hapi-server.org/servers/SSCWeb/hapi';
opts = {'logging': False, 'usecache': True}
parameters = "X_{},Y_{},Z_{},Spacecraft_Region"\
.format(kwargs["coord_sys"], kwargs["coord_sys"], kwargs["coord_sys"])
data, meta = hapi(server, kwargs["satellite_id"], parameters,
kwargs["time_o"], kwargs["time_f"], **opts)
if re.search('.*(?=:00.000Z)|.*(?=.000Z)', kwargs['time_o']):
kwargs['time_o'] = re.search\
('.*(?=:00.000Z)|.*(?=.000Z)', kwargs['time_o']).group()+'Z'
if re.search('.*(?=:00.000Z)|.*(?=.000Z)', kwargs['time_f']):
kwargs['time_f'] = re.search\
('.*(?=:00.000Z)|.*(?=.000Z)', kwargs['time_f']).group()+'Z'
title = '{} line {} {} to {}'.format(kwargs['satellite_id'],
kwargs['coord_sys'],
kwargs['time_o'],
kwargs['time_f'])
unique_regions = np.unique(data['Spacecraft_Region'])
LUT = pvs.GetColorTransferFunction(scalar_data)
LUT.InterpretValuesAsCategories = 1
LUT.AnnotationsInitialized = 1
annotations = []
index_colored_list = []
for i in range(len(unique_regions)):
annotations.append(str(i))
annotations.append(unique_regions[i])
if kwargs['region_colors'] != None:
tmp = unique_regions[i].decode("utf-8")
index_colored_list.append(kwargs['region_colors'][tmp][0:3])
else:
index_colored_list.append(kwargs['color'][0:3])
LUT.Annotations = annotations
index_colored_list = np.array(index_colored_list).flatten()
LUT.IndexedColors = index_colored_list
programmableSourceDisplay.LookupTable = LUT
programmableSourceDisplay.OpacityArray = ['POINTS', scalar_data]
programmableSourceDisplay.ColorArrayName = ['POINTS', scalar_data]
programmableSourceDisplay.SetScalarBarVisibility(renderView, True)
if kwargs['obj'] == "latitude" or kwargs['obj'] == 'longitude':
scalar_data = 'lat_lon'
programmableSource.Script = script(kwargs)
if not kwargs['renderView']:
renderView = pvs.GetActiveViewOrCreate('RenderView')
programmableSourceDisplay = pvs.Show(programmableSource, renderView)
programmableSourceDisplay.Representation = kwargs['representation']
if not kwargs['show']:
pvs.Hide(programmableSource, renderView)
if kwargs['render']:
pvs.Render()
renderView.Update()
title = "{} line {} {}".format(kwargs['obj'], kwargs['coord_sys'], tstr(kwargs['time'],5))
lat_lonLUT = pvs.GetColorTransferFunction(scalar_data)
lat_lonLUT.InterpretValuesAsCategories = 1
lat_lonLUT.AnnotationsInitialized = 1
lat_lonLUT.Annotations = ['0', 'latitude', '1', 'longitude']
if list(lat_lonLUT.IndexedColors) != []:
if kwargs['obj'] == 'latitude':
lat_lonLUT.IndexedColors = np.concatenate((kwargs['color'], lat_lonLUT.IndexedColors[3:]))
else:
lat_lonLUT.IndexedColors = np.concatenate((lat_lonLUT.IndexedColors[0:3], kwargs['color']))
else:
if kwargs['obj'] == 'latitude':
lat_lonLUT.IndexedColors = np.concatenate((kwargs['color'],.5*np.array(kwargs['color'])))
else:
lat_lonLUT.IndexedColors = np.concatenate((.5*np.array(kwargs['color']),kwargs['color']))
programmableSourceDisplay.LookupTable = lat_lonLUT
programmableSourceDisplay.OpacityArray = ['POINTS', scalar_data]
programmableSourceDisplay.ColorArrayName = ['POINTS', scalar_data]
programmableSourceDisplay.SetScalarBarVisibility(renderView, False)
if kwargs['obj'] == 'dipole field':
Nx,Ny,Nz = kwargs['NxNyNz']
programmableSource.OutputDataSetType = 'vtkStructuredGrid'
programmableSource.ScriptRequestInformation = f"""
executive = self.GetExecutive()
outInfo = executive.GetOutputInformation(0)
outInfo.Set(executive.WHOLE_EXTENT(), 0, {Nx-1}, 0, {Ny-1}, 0, {Nz-1})
"""
programmableSource.Script = script(kwargs)
renderView = pvs.GetActiveViewOrCreate('RenderView')
programmableSourceDisplay = pvs.Show(programmableSource, renderView)
programmableSourceDisplay.Representation = kwargs['representation']
# temp title
title = f'dipole field {kwargs["coord_sys"]} M={kwargs["M"]}'
if kwargs['obj'] == 'dipole field':
Nx,Ny,Nz = kwargs['NxNyNz']
programmableSource.OutputDataSetType = 'vtkStructuredGrid'
programmableSource.ScriptRequestInformation = f"""
executive = self.GetExecutive()
outInfo = executive.GetOutputInformation(0)
outInfo.Set(executive.WHOLE_EXTENT(), 0, {Nx-1}, 0, {Ny-1}, 0, {Nz-1})
"""
programmableSource.Script = script(kwargs)
renderView = pvs.GetActiveViewOrCreate('RenderView')
programmableSourceDisplay = pvs.Show(programmableSource, renderView)
programmableSourceDisplay.Representation = kwargs['representation']
# temp title
title = f'dipole field {kwargs["coord_sys"]} M={kwargs["M"]}'
pvs.RenameSource(title, programmableSource)
renderView.ResetCamera()
return programmableSourceDisplay, renderView, programmableSource
def contour(obj, isosurface, display=None, color_by=None, show_legend=True):
import paraview.simple as pvs
for key, value in pvs.GetSources().items():
if obj.__eq__(value):
title = key[0]
contourFilter = pvs.Contour(obj,guiName=title)
contourFilter.Isosurfaces = isosurface
renderView = pvs.GetActiveViewOrCreate("RenderView")
pvs.Hide(obj, renderView)
conDis = pvs.Show(contourFilter)
conDis.SetScalarBarVisibility(renderView, show_legend)
return conDis, renderView, contourFilter
def tube(obj, tube_radius=.1, vary_radius='Off', radius_factor=4.0,
show_legend=False, renderView=None):
import paraview.simple as pvs
for key, value in pvs.GetSources().items():
if obj.__eq__(value):
title = key[0]
tubeFilter = pvs.Tube(obj, guiName='-- tube')
tubeFilter.Radius = tube_radius
tubeFilter.VaryRadius = vary_radius
tubeFilter.RadiusFactor = radius_factor
if not renderView:
renderView = pvs.GetActiveViewOrCreate("RenderView")
pvs.Hide(obj, renderView)
tubeDis = pvs.Show(tubeFilter)
tubeDis.SetScalarBarVisibility(renderView, show_legend)
return tubeDis, renderView, tubeFilter
def _satellite(self, time_o, time_f, satellite_id, coord_sys, region_colors):
import vtk
import numpy as np
from hapiclient import hapi
server = 'http://hapi-server.org/servers/SSCWeb/hapi';
opts = {'logging': False, 'usecache': True}
parameters = "X_{},Y_{},Z_{},Spacecraft_Region"\
.format(coord_sys, coord_sys, coord_sys)
data, meta = hapi(server, satellite_id, parameters,
time_o, time_f, **opts)
pdo = self.GetPolyDataOutput()
pdo.Allocate(len(data), 1)
pts = vtk.vtkPoints()
polyline = vtk.vtkPolyLine()
polyline.GetPointIds().SetNumberOfIds(len(data['Spacecraft_Region']))
for i in range(len(data['Spacecraft_Region'])):
pts.InsertPoint(i,data['X_'+coord_sys][i], data['Y_'+coord_sys][i],
data['Z_'+coord_sys][i])
polyline.GetPointIds().SetId(i,i)
pdo.InsertNextCell(polyline.GetCellType(), polyline.GetPointIds())
pdo.SetPoints(pts)
colors = vtk.vtkUnsignedCharArray()
colors.SetNumberOfComponents(1)
colors.SetName( satellite_id + ' Spacecraft Region')
region_dict = {}
unique_regions = np.unique(data['Spacecraft_Region'])
for i in range(len(unique_regions)):
region_dict[unique_regions[i]] = int(i)
for region in data['Spacecraft_Region']:
if region_colors == None:
colors.InsertNextTuple([0])
else:
colors.InsertNextTuple([region_dict[region]])
pdo.GetPointData().AddArray(colors)
def _magnetopause(self, output, time, Bz, Psw, model, coord_sys, return_x_max,
return_title=False):
import numpy as np
import numpy.matlib
from datetime import datetime, timedelta
import pytz
from magnetovis.util import tstr, time2datetime
from magnetovis.objects import rotation_matrix
from hxform import hxform as hx
import paraview.simple as pvs
def mpause_Shue97(Bz, Psw, return_x_max = False):
"""
Magntopause positions from Shue et al. 1997.
[https://doi.org/10.1029/98JA01103]
The magnetopause distance from Earth's center from Shue et al. 1997
is
r = r_0(2/(1+cos(theta)))**alpha
where
r is the location of the magnetopause surface in GSM coordinates.
r_0 depends on the interplanetary magnetic field Bz and the solar wind
dynamic pressure Psw units in Re.
theta is The angle between positive x-axis in GSM coordinates and the r vector
alpha depends on the interplanetary magnetic field Bz and the solar wind
dynamic pressure Psw units in Re.
Parameters:
----------
Bz : float
Interplanetary magnetic field in nT
Psw : float
Solar wind dynamic pressure in nPa.
Returns:
-------
3 numpy array's
Creates 3 numpy array's of the X, Y, and Z coordinates of the
magnetopause according to the Shue et al. 1997 model based on the
solar wind dynamic pressure Psw and the interplanetary magnetic field Bz.
"""
if Bz >= 0:
r_0 = 11.4 + 0.013*Bz * (Psw**(-1/6.6)) # Eqn 12 of Shue et al. 1997
else:
r_0 = 11.4 + 0.14*Bz * (Psw**(-1/6.6)) # Eqn 12 of Shue et al. 1997
alpha = (0.58 - 0.010 * Bz) * (1 + 0.010 * Psw) # Eqn 14 of Shue et al. 1997
if return_x_max:
return r_0 * (2./(1+np.cos(0)))**alpha
stopping_constant = 40/(2**alpha * r_0)
theta_finder_array = np.arange(np.pi/2 , np.pi, 0.01)
for theta in theta_finder_array:
stopping_value = np.cos(theta)/((1 + np.cos(theta))**alpha)
if abs(stopping_value) < stopping_constant:
last_theta = theta
else:
break
last_theta = np.rad2deg(last_theta)
theta_array = [[0]]
all_theta_values = np.flipud(np.linspace(last_theta,0,50))
for theta in all_theta_values:
theta_array = np.pad(theta_array,((1,1),(1,1)),'constant',
constant_values=((theta,theta),(theta,theta)))
theta_array = theta_array.flatten()
m = np.linspace(1,-1,2*len(all_theta_values)+1,endpoint=True)
u = np.matlib.repmat(m,1,len(m)).flatten()
v = np.repeat(m,len(m))
phi_array = np.arctan2(v,u)
theta_array = np.radians(theta_array)
r_array = r_0*( (2/(1+np.cos(theta_array)))**alpha)
X = r_array * np.cos(theta_array)
Y = r_array * np.sin(theta_array) * np.sin(phi_array)
Z = r_array * np.sin(theta_array) * np.cos(phi_array)
points = np.column_stack([X, Y, Z])
print('Created Magnetopause model from Shue et al. 1997.')
return points
def mpause_Roelof_Sibeck93(Bz, Psw, return_x_max = False):
"""
The magnetopause model from Roelof and Sibeck 1993 paper.
DOI: https://doi.org/10.1029/93JA02362
r**2 * s1 * X**2 + s2 * X + s3 = 0
where
r**2 = Y**2 + Z**2
s1 = exp(a00 + a10 * x + a01 * y + a20 * x**2 + a11 * x * y + a02 * y**2)
s2 = exp(b00 + b10 * x + b01 * y + b20 * x**2 + b11 * x * y + b02 * y**2)
s3 = -exp(c00 + c10 * x + c01 * y + c20 * x**2 + c11 * x * y + c02 * y**2)
x = np.log(Psw/P0)/sigma_lnp
y = (Bz - Bz0)/sigma_Bz
a00, a01, a20, a11,... etc are all constants calculated by Roelof and Sibeck
1993.
Parameters:
----------
Bz : float
Interplanetary magnetic field in nT.
Psw : float
Solar wind dynamic pressure in nPa.
return_x_max : bool, optional
When True the function does not create a magnetopause surface. Instead
it only returns the maximum value of X in GSE coordinates so that the
bow shock can use it to calculate the appropriate sub solar distance.
Returns:
-------
3 numpy array's
Creates 3 numpy array's of the X, Y, and Z coordinates of the
magnetopause according to the Roelof and Sibeck 1993 model in GSE.
float
when return_x_max = True, this function returns the max value of X in
GSE coordinates. This is used by Fairfield 1971
"""
P0 = 2.088 # nPa
sigma_lnp = 0.6312 # unit less
Bz0 = -0.1635 # nT
sigma_Bz = 3.489
a00 = -1.764
a10 = -0.299
a01 = -0.151
a20 = -0.246
a11 = 0.050
a02 = 0.476
b00 = 2.934
b10 = -0.076
b01 = -0.129
b20 = -0.012
b11 = 0.079
b02 = 0.0026
c00 = 5.397
c10 = -0.183
c01 = -0.041
c20 = -0.044
c11 = 0.040
c02 = 0.020
x = np.log(float(Psw)/P0)/sigma_lnp
y = (Bz - Bz0)/sigma_Bz
lnA = a00 + a10 * x + a01 * y + a20 * x**2 + a11 * x * y + a02 * y**2
lnB = b00 + b10 * x + b01 * y + b20 * x**2 + b11 * x * y + b02 * y**2
ln_negC = c00 + c10 * x + c01 * y + c20 * x**2 + c11 * x * y + c02 * y**2
s1 = np.exp(lnA)
s2 = np.exp(lnB)
s3 = - np.exp(ln_negC)
sqrt_descriminate = np.sqrt(s2**2 - 4 * (-s1) * (-s3))
x_max = (s2 - sqrt_descriminate)/ (-2 * s1)
x_min = (s2 + sqrt_descriminate)/ (-2 * s1)
if x_min < -40:
x_min = -40
# used to get the max X value of the magnetopause in order to calculate the
# appropriate sub solar distance of the bow shock.
if return_x_max:
return x_max
X = [[x_max]]
all_x_values = np.flipud(np.linspace(x_min,x_max,50))
for x in all_x_values:
X = np.pad(X,((1,1),(1,1)),'constant',constant_values=((x,x),(x,x)))
X = X.flatten()
r = -s1 * X **2 - s2 * X - s3
r[r<0] = 0
r = np.sqrt(r)
m = np.linspace(1,-1,2*len(all_x_values)+1,endpoint=True)
u = np.matlib.repmat(m,1,len(m)).flatten()
v = np.repeat(m,len(m))
phi = np.arctan2(v,u)
Y = r * np.cos(phi)
Z = r * np.sin(phi)
points = np.column_stack([X, Y, Z])
print('Created Magnetopause model from Roelof and Sibeck 1993.')
return points
def mpause_Sibeck_Lopez_Roelof1991(Bz=None, Psw=None,
return_x_max = False):
"""
The magnetopause model from Sibeck, Lopez, and Roelof 1991 paper.
DOI: https://doi.org/10.1029/93JA02362
r**2 * s1 * X**2 + s2 * X + s3 = 0
where
r**2 = Y**2 + Z**2
s1 = 0.14
s2 = 18.2
s3 = -217.2
p0 = 2.04 #
rho = (p0/Psw)**(1/6) #
Parameters:
----------
Bz_or_Psw : float
The parameter has the option of being the interplanetary magnetic field
Bz in nT or the dynamic solar wind pressure Psw in nPa. The choice is
made by the second parameter "option".
option : string, optional
This has two possible values "Bz" or "Psw" which specifies how Bz_or_Psw
should be interpreted as. The default is "Bz".
return_x_max : bool, optional
When True the function does not create a magnetopause surface. Instead
it only returns the maximum value of X in GSE coordinates so that the
bow shock can use it to calculate the appropriate sub solar distance.
Returns:
-------
3 numpy array's
Creates 3 numpy array's of the X, Y, and Z coordinates of the
magnetopause according to the Siebeck, Lopez and Roelof 1991 model
based on the solar wind dynamic pressure or interplanetary magnetic
field in GSE coordinates.
"""
if Psw != None:
s1, s2, s3 = 0.14, 18.2, -217.2
p_0 = 2.04
rho = (p_0 / Psw) ** (1./6)
print('Creating Sibeck Lopez Roelof 1991 mpause model'+
' with Psw = {}'.format(Psw))
elif Bz != None:
rho = 1
print('Creating Sibeck Lopez Roelof 1991' +
' mpause model with Bz = {}'.format(Bz))
if Bz <= -4:
s1, s2, s3 = 0.12, 19.9, -200.6
if Bz < -6:
print('WARNING Bz={}nT which is out of range of valid values'+
'for Sibeck Lopez Roelof 91 magnetopause model. \n'+
'valid values are [-6,6] \n'+
'Using values for Bz in [-6,-4] bin.'.format(Bz))
elif Bz <= -2:
s1, s2, s3 = 0.22, 18.2, -213.4
elif Bz <= 0:
s1, s2, s3 = 0.11, 17.9, -212.8
elif Bz <= 2:
s1, s2, s3 = 0.2, 17.1, -211.5
elif Bz <= 4:
s1, s2, s3 = 0.09, 15.7, -198.3
else:
s1, s2, s3 = 0.13, 13.1, -179.2
if Bz > 6:
print('WARNING Bz={}nT which is out of range of valid values'+
'for Sibeck Lopez Roelof 91 magnetopause model. \n'+
'valid values are [-6,6] \n'+
'Using values for Bz in [4,6] bin.'.format(Bz))
sqrt_descriminate = np.sqrt((s2*rho)**2 - 4 * (s1) * (s3) * rho**2)
x_max = (-s2*rho + sqrt_descriminate)/ (2 * s1)
x_min = (-s2*rho - sqrt_descriminate)/ (2 * s1)
if x_min < -40:
x_min = -40
# used for getting the max value for magnetopause to get proper bowshock
# distance to magnetopause distance ratio
if return_x_max:
return x_max
X = [[x_max]]
all_x_values = np.flipud(np.linspace(x_min,x_max,50))
for x in all_x_values:
X = np.pad(X,((1,1),(1,1)),'constant',constant_values=((x,x),(x,x)))
X = X.flatten()
r = -s1 * X **2 - s2 * rho * X - s3 * rho ** 2
r[r<0] = 0
r = np.sqrt(r)
m = np.linspace(1,-1,2*len(all_x_values)+1,endpoint=True)
u = np.matlib.repmat(m,1,len(m)).flatten()
v = np.repeat(m,len(m))
phi = np.arctan2(v,u)
Y = r * np.cos(phi)
Z = r * np.sin(phi)
print('Created Magnetopause model from Sibeck Lopez Roelof 1991.')
points = np.column_stack([X, Y, Z])
print('\n\n',np.shape(points),'\n\n')
return points
if not return_x_max:
year_limit = datetime(1995, 1, 1, tzinfo=pytz.utc)
if model == 'Sibeck_Lopez_Roelof91':
assert not (Psw == False and Bz == False),\
'when model=Siebck_Lopez_Roelof91 Both Psw and Bz cannot be False.'
assert (Psw == False) or (Bz == False),\
'When model=Siebeck_Lopez_Roelof91 either Psw of Bz has to be False'
# case 1: if time is below year limit then automatically set
# Bz or Psw to nominal values if they are none.
if time[0] < year_limit.year:
if Bz == None:
Bz = 0
print(f'using nominal value for Bz = 0 because year: {time[0]}')
print(f'chosen is below the year limit {year_limit.year}')
if Psw == None:
Psw = 2.04
print(f'using nominal value for Bz because year: {time[0]}')
print(f'chosen is below the year limit {year_limit.year}')
# case 2 if either Bz or Psw are None then use time and hapiclient
# to get Bz and Psw
if Bz == None or Psw == None:
time_str = ""+tstr(time,5)
from hapiclient import hapi, hapitime2datetime
server = 'https://cdaweb.gsfc.nasa.gov/hapi';
dataset = 'OMNI_HRO2_1MIN';
parameters = 'BZ_GSE,Pressure';
opts = {'logging': False, 'usecache': True}
start = time2datetime(time) + timedelta(minutes=-30)
start = start.isoformat()
stop = time2datetime(time) + timedelta(minutes= 30)
stop = stop.isoformat()
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
time_arr = hapitime2datetime(data['Time'])
data['Pressure'][data['Pressure'] == 99.99] = np.nan
data['BZ_GSE'][data['BZ_GSE'] == 9999.99] = np.nan
unixZero = datetime(1970,1,1,tzinfo = time_arr[0].tzinfo)
t1 = np.empty(time_arr.shape)
time_to_interpolate = \
(time2datetime(time).replace(tzinfo=pytz.UTC) - unixZero).total_seconds()
for i in range(len(time_arr)):
t1[i] = (time_arr[i] - unixZero).total_seconds()
else:
time_str = ''
if Bz == None:
if all(np.isnan(data['BZ_GSE'])):
Bz = 0 # Nomnal Value
print('OMNI_HRO2_1MIN has no Bz values for time interval')
print(f'{start} - {stop}.')
print(f'Using nominal value Bz = {Bz} [nT].')
else:
nans = np.isnan(data['BZ_GSE'])
BZ_GSE_OMNI= np.interp(t1, t1[~nans], data['BZ_GSE'][~nans])
Bz = np.interp(time_to_interpolate, t1, BZ_GSE_OMNI)
Bz_str = 'Bz={:.3g}'.format(Bz)
elif Bz is False and model == 'Sibeck_Lopez_Roelof91':
Bz = None
Bz_str = ''
print('Ignoring Bz to produce magnetopause becuase Bz=False and '+
'model = Sibeck_Lopez_Roelof91')
else:
Bz_str = 'Bz={:.3g}'.format(Bz)
if Psw == None:
if all(np.isnan(data['Pressure'])):
Psw = 2.04
print('OMNI_HRO2_1MIN has no pressure values for time interval')
print(f'{start} - {stop}.')
print(f'Using nominal value Psw = {Psw} [nPa].')
else:
nans = np.isnan(data['Pressure'])
pressure_OMNI = np.interp(t1, t1[~nans], data['Pressure'][~nans])
Psw = np.interp(time_to_interpolate, t1, pressure_OMNI)
Psw_str = 'Psw={:.3g}'.format(Psw)
elif Psw is False and model == 'Sibeck_Lopez_Roelof91':
Psw = None
Psw_str = ''
print('Ignoring Psw to produce magnetopause becuase Psw=False and '+
'model = Sibeck_Lopez_Roelof91')
else:
Psw_str = 'Psw={:.3g}'.format(Psw)
if return_title:
return (time_str, Bz_str, Psw_str)
if model == "Shue97":
if return_x_max:
return mpause_Shue97(Bz, Psw, return_x_max)
points = mpause_Shue97(Bz, Psw)
elif model == "Roelof_Sibeck93":
if return_x_max:
return mpause_Roelof_Sibeck93(Bz,Psw, return_x_max)
points = mpause_Roelof_Sibeck93(Bz, Psw)
elif model == 'Sibeck_Lopez_Roelof91':
if return_x_max:
return mpause_Sibeck_Lopez_Roelof1991(Bz=Bz, Psw=Psw,
return_x_max=return_x_max)
points = mpause_Sibeck_Lopez_Roelof1991(Bz, Psw)
if coord_sys != 'GSE':
points = hx.transform(points, time, 'GSE', coord_sys, 'car', 'car')
Bz = hx.transform([0,0,Bz], time, 'GSE', coord_sys, 'car', 'car')[0]
elif time_str == '':
print('Time not used for either extracting Psw and Bz values')
print('or changing coordinate systems.')
# todo make sure that this rotation for all of them or just one.
# according to sscweb magnetopause fortran code there is a 4 degree abberation for
points = np.dot(rotation_matrix((0,0,1), -4 ), points.T ).T
############################################################
####### start of the code to use programmable source #######
############################################################
import vtk
if False:
# this is never meant to run. it is only to get rid of error message
# that output is not defined. output is defined when running
# this script in the programmable source text box.
output = ''
# communication between "script" and "script (RequestInformation)"
executive = self.GetExecutive()
outInfo = executive.GetOutputInformation(0)
exts = [executive.UPDATE_EXTENT().Get(outInfo, i) for i in range(6)]
dims = [exts[1]+1, exts[3]+1, exts[5]+1]
# setting the sgrid exent
output.SetExtent(exts)
# setting up the points and allocate the number of points
pvtk = dsa.numpyTovtkDataArray(points)
pts = vtk.vtkPoints()
pts.Allocate(dims[0] * dims[1] * dims[2])
pts.SetData(pvtk)
output.SetPoints(pts)
# color sections
annotations_list = list(pvs.GetColorTransferFunction('Magnetosphere Surface').Annotations)
if 'Magnetopause' in annotations_list:
value = int(annotations_list[annotations_list.index('Magnetopause')-1])
else:
value = int(1+len(annotations_list)/2)
color_values = np.zeros(points.shape[0]) + value
cvtk = dsa.numpyTovtkDataArray(color_values)
cvtk.SetName("Magnetosphere Surface")
output.GetPointData().AddArray(cvtk)
def _bowshock(self, output, time, model, Bz, Psw, mpause_model,
coord_sys, return_title=False):
"""Show bowshock suraface"""
from datetime import datetime, timedelta
import pytz
from magnetovis.util import tstr, time2datetime
from magnetovis.objects import _magnetopause, rotation_matrix
from hxform import hxform as hx
import numpy as np
import paraview.simple as pvs
def bowshock_Fairfield71(Bz, Psw,
mpause_model='Roelof_Sibeck93'):
"""
Bow shock surface model from Fairfield 1971 paper.
https://doi.org/10.1029/JA076i028p06700
The equation of the Bow Shock is given by:
r = ...
where
Parameters
----------
Bz : float
The interplanetary magnetic field in nano Tesla.
Psw : float
The solar wind dynamic pressure in nano Pascals.
Returns
-------
3 nd.array
3 spatial coordiante arrays of the surface of the bow shock in GSE
coordinate system.
"""
A = 0.0296
B = -0.0381
C = -1.280
D = 45.644
E = -652.10
x_max_pause = _magnetopause(self=None, output=None, time=[2000,1,1,0,0], Bz=Bz, Psw=Psw,
model=mpause_model,
coord_sys='GSE', return_x_max=True, return_title=False)
c1 = (A * C - 2 * D)/(A**2 - 4 * B)
c2 = (4 * E - C**2)/(A**2 - 4 * B)
x_min = -40
bowshock_subs_ratio = 1.3 * x_max_pause
x_max = - np.sqrt(c1**2 + c2) - c1
shift = x_max - bowshock_subs_ratio
x_max = x_max - shift
X = [[x_max]]
all_x_values = np.flipud(np.linspace(x_min,x_max,50))
for x in all_x_values:
X = np.pad(X,((1,1),(1,1)),'constant',constant_values=((x,x),(x,x)))
X = X.flatten()
g = (A * (X + shift) + C)/2
s = g**2 - B * (X + shift)**2 - D * (X + shift) - E
s = np.where(s < 0, 0, s) # to account for negatives under the radical
remainder = -(A * (X + shift) + C)/2
r = np.sqrt(s) + remainder # 5,000
r = np.where(r== remainder, 0, r)
m = np.linspace(1,-1,2*len(all_x_values)+1,endpoint=True)
u = np.matlib.repmat(m,1,len(m)).flatten()
v = np.repeat(m,len(m))
phi = np.arctan2(v,u)
Y = r * np.cos(phi)
Z = r * np.sin(phi)
points = np.column_stack([X, Y, Z])
print('Created Magnetopause model from Fairfield 1971.')
return points
year_limit = datetime(1995, 1, 1,tzinfo=pytz.utc)
if mpause_model == 'Sibeck_Lopez_Roelof91':
assert not (Psw == False and Bz == False),\
'when model=Siebck_Lopez_Roelof91 Both Psw and Bz cannot be False.'
assert (Psw == False) or (Bz == False),\
'When model=Siebeck_Lopez_Roelof91 either Psw of Bz has to be False'
# case 1: if time is below year limit then automatically set
# Bz or Psw to nominal values if they are none.
if time[0] < year_limit.year:
if Bz == None:
Bz = 0
print(f'using nominal value for Bz = 0 because year: {time[0]}')
print(f'chosen is below the year limit {year_limit.year}')
if Psw == None:
Psw = 2.04
print(f'using nominal value for Bz because year: {time[0]}')
print(f'chosen is below the year limit {year_limit.year}')
# case 2 if either Bz or Psw are None then use time and hapiclient
# to get Bz and Psw
if Bz == None or Psw == None:
time_str = ""+tstr(time,5)
from hapiclient import hapi, hapitime2datetime
server = 'https://cdaweb.gsfc.nasa.gov/hapi';
dataset = 'OMNI_HRO2_1MIN';
parameters = 'BZ_GSE,Pressure';
opts = {'logging': False, 'usecache': True}
start = time2datetime(time) + timedelta(minutes=-30)
start = start.isoformat()
stop = time2datetime(time) + timedelta(minutes= 30)
stop = stop.isoformat()
data, meta = hapi(server, dataset, parameters, start, stop, **opts)
time_arr = hapitime2datetime(data['Time'])
data['Pressure'][data['Pressure'] == 99.99] = np.nan
data['BZ_GSE'][data['BZ_GSE'] == 9999.99] = np.nan
unixZero = datetime(1970,1,1,tzinfo = time_arr[0].tzinfo)
t1 = np.empty(time_arr.shape)
time_to_interpolate = \
(time2datetime(time).replace(tzinfo=pytz.UTC) - unixZero).total_seconds()
for i in range(len(time_arr)):
t1[i] = (time_arr[i] - unixZero).total_seconds()
else:
time_str = ''
if Bz == None:
if all(np.isnan(data['BZ_GSE'])):
Bz = 0 # Nomnal Value
print('OMNI_HRO2_1MIN has no Bz values for time interval')
print(f'{start} - {stop}.')
print(f'Using nominal value Bz = {Bz} [nT].')
else:
nans = np.isnan(data['BZ_GSE'])
BZ_GSE_OMNI= np.interp(t1, t1[~nans], data['BZ_GSE'][~nans])
Bz = np.interp(time_to_interpolate, t1, BZ_GSE_OMNI)
Bz_str = 'Bz={:.3g}'.format(Bz)
elif Bz is False and mpause_model == 'Sibeck_Lopez_Roelof91':
Bz = None
Bz_str = ''
print('Ignoring Bz to produce magnetopause becuase Bz=False and '+
'model = Sibeck_Lopez_Roelof91')
else:
Bz_str = 'Bz={:.3g}'.format(Bz)
if Psw == None:
if all(np.isnan(data['Pressure'])):
Psw = 2.04
print('OMNI_HRO2_1MIN has no pressure values for time interval')
print(f'{start} - {stop}.')
print(f'Using nominal value Psw = {Psw} [nPa].')
else:
nans = np.isnan(data['Pressure'])
pressure_OMNI = np.interp(t1, t1[~nans], data['Pressure'][~nans])
Psw = np.interp(time_to_interpolate, t1, pressure_OMNI)
Psw_str = 'Psw={:.3g}'.format(Psw)
elif Psw is False and mpause_model == 'Sibeck_Lopez_Roelof91':
Psw = None
Psw_str = ''
print('Ignoring Psw to produce magnetopause becuase Psw=False and '+
'model = Sibeck_Lopez_Roelof91')
else:
Psw_str = 'Psw={:.3g}'.format(Psw)
if return_title:
return (time_str, Bz_str, Psw_str)
# Fairfield 1971 stated that the abberation
# is 4 degrees. Later Fairfield revised the number to be 4.82 degrees
# after the rotation the new axis of symmetry has moved 0.313 from
# the new positive y-axis (post rotation)
# according to Tipsod Fortran code notes.
if model == 'Fairfield71':
rot_deg = -4.82
translate = np.array([[0, 0.313, 0]])
points = bowshock_Fairfield71(Bz, Psw, mpause_model)
points = np.dot(rotation_matrix((0,0,1), rot_deg,), points.T).T + np.dot(rotation_matrix((0,0,1), rot_deg), translate.T).T
if coord_sys != 'GSE':
points = hx.transform(points, time, 'GSE', coord_sys, 'car', 'car')
Bz = hx.transform([0,0,Bz], time, 'GSE', coord_sys, 'car', 'car')[0]
############################################################
####### start of the code to use programmable source #######
############################################################
import vtk
if False:
# this is never meant to run. it is only to get rid of error message
# that output is not defined. output is defined when running
# this script in the programmable source text box.
output = ''
# communication between "script" and "script (RequestInformation)"
executive = self.GetExecutive()
outInfo = executive.GetOutputInformation(0)
exts = [executive.UPDATE_EXTENT().Get(outInfo, i) for i in range(6)]
dims = [exts[1]+1, exts[3]+1, exts[5]+1]
# setting the sgrid exent
output.SetExtent(exts)
# setting up the points and allocate the number of points
pts = vtk.vtkPoints()
pts.Allocate(dims[0] * dims[1] * dims[2])
# color sections
annotations_list = list(pvs.GetColorTransferFunction('Magnetosphere Surface').Annotations)
if 'Bowshock' in annotations_list:
value = int(annotations_list[annotations_list.index('Bowshock')-1])
else:
value = int(1+len(annotations_list)/2)
colors = vtk.vtkUnsignedCharArray()
colors.SetNumberOfComponents(1)
colors.SetName("Magnetosphere Surface")
# insert points into vtkPoints
i = 0
for point in points:
pts.InsertPoint(i, point[0], point[1], point[2])
i += 1
colors.InsertNextTuple([value])
output.SetPoints(pts)
output.GetPointData().AddArray(colors)
def magnetopause(time, Bz=None, Psw=None, model='Shue97', coord_sys='GSM',
color=[0,1,0,0.5], representation='Surface',
out_dir=tempfile.gettempdir(), png_fn=None,
renderView=None, render=True, show=True,
fileName=None, camera=None, take_screenshot=False,
return_x_max = False):
return objs_wrapper(time=time, Bz=Bz, Psw=Psw, model=model, coord_sys=coord_sys,
color=color, representation=representation,
out_dir=out_dir, png_fn=png_fn, renderView=renderView, render=render,
show=show,
fileName=fileName, camera=camera, take_screenshot=take_screenshot,
return_x_max=return_x_max, obj='Magnetopause')
def bowshock(time, model='Fairfield71', Bz = None, Psw = None,
mpause_model='Roelof_Sibeck93',
coord_sys='GSM',
color=[0,.3,.35,1], representation='Surface',
out_dir=tempfile.gettempdir(), png_fn=None,
renderView=None, render=True, show=True):
return objs_wrapper(time=time, Bz=Bz, Psw=Psw, model=model,
mpause_model=mpause_model, coord_sys=coord_sys,
color=color, representation=representation,
out_dir=out_dir, png_fn=png_fn,
renderView=renderView, render=render,
show=show, obj='Bowshock')
def satellite(time_o, time_f, satellite_id,
coord_sys='GSM',
color=[1,0,0,1],
representation='Surface',
tube_radius=None,
shader_preset=None,
region_colors=None,
out_dir=tempfile.gettempdir(),
renderView=None,
render=True,
show=True):
return objs_wrapper(time_o=time_o, time_f=time_f, satellite_id=satellite_id,
coord_sys=coord_sys,
color=color,
representation=representation,
tube_radius=tube_radius,
shader_preset=shader_preset,
region_colors=region_colors,
out_dir=tempfile.gettempdir(),
renderView=renderView,
render=render,
show=show, obj = 'satellite')
def axis(time, val, coord_sys='GSM', lims=[-20,20], tick_spacing=1, tick_length=1,
label=True, representation = 'Surface',
renderView=None, render=True, show=True, debug=False):
return objs_wrapper(time=time, val=val, coord_sys=coord_sys,
lims=lims, tick_spacing=tick_spacing, tick_length=tick_length,
label=label, representation=representation,
renderView=renderView, render=render, show=show,
debug=debug, obj='axis')
def _axis(self, time, val, coord_sys, lims,
tick_spacing, tick_length, label):
import numpy as np
from numpy.matlib import repmat
from hxform import hxform as hx
def rotation_matrix(axis, theta):
"""
Return the rotation matrix associated with counterclockwise rotation about
the given axis by theta radians.
"""
theta = np.deg2rad(theta)
axis = np.asarray(axis)
axis = axis / np.sqrt(np.dot(axis, axis))
a = np.cos(theta / 2.0)
b, c, d = -axis * np.sin(theta / 2.0)
aa, bb, cc, dd = a * a, b * b, c * c, d * d
bc, ad, ac, ab, bd, cd = b * c, a * d, a * c, a * b, b * d, c * d
return np.array([[aa + bb - cc - dd, 2 * (bc + ad), 2 * (bd - ac)],
[2 * (bc - ad), aa + cc - bb - dd, 2 * (cd + ab)],
[2 * (bd + ac), 2 * (cd - ab), aa + dd - bb - cc]])
assert lims[0] < lims[1], 'first element of lims have fewer elements than the second'
if lims[0] > 0 or lims[1] < 0:
tick_array = np.arange(lims[0], lims[1], tick_spacing)
else:
tick_array = np.concatenate((np.arange(0,lims[0]-tick_spacing,-tick_spacing),np.arange(0,lims[1]+tick_spacing,tick_spacing)))
tick_array = np.sort(np.unique(tick_array))
ends = np.array([[lims[0],0,0],[lims[1],0,0]])
# pos_end = [lims[0],0,0]
# 0,-1,0 0,1,0
tick_ends = np.array([[-1,0],[1,0],[0,-1],[0,1]])* tick_length
tick_ends = repmat(tick_ends,tick_array.size,1)
tick_array = np.repeat(tick_array,4)
points = np.zeros((tick_array.size,3))
points[:,0] = tick_array
points[:,1:3] = tick_ends
if val != "X":
if val == "Y":
rot_axis = (0,0,1)
else:
rot_axis = (0,-1,0) # rotation right hand rule on rotation axis
rot_mat = rotation_matrix(rot_axis, 90)
points = np.dot(rot_mat, points.T).T
ends = np.dot(ends, rot_mat)
if coord_sys != 'GSM':
ends = hx.transform(ends, time, 'GSM', coord_sys, 'car', 'car')
points = hx.transform(points, time, 'GSM', coord_sys, 'car', 'car')
############################################################
####### start of the code to use programmable source #######
############################################################
import vtk
import paraview.simple as pvs
pdo = self.GetPolyDataOutput()
pdo.Allocate(points.shape[0] + ends.shape[0] , 1)
pts = vtk.vtkPoints()
# color sections
colors = vtk.vtkIntArray()
colors.SetNumberOfComponents(1)
colors.SetName("{} axes".format(coord_sys))
if val == 'X':
scalar_value = 0
elif val == 'Y':
scalar_value = 1
elif val == 'Z':
scalar_value = 2
tick_value = -1
start = True
id_counter = 0
for tick in points:
pts.InsertPoint(id_counter,tick[0],tick[1],tick[2])
colors.InsertNextTuple([tick_value])
if start:
start = False
tick_line = vtk.vtkPolyLine()
tick_line.GetPointIds().SetNumberOfIds(2)
tick_line.GetPointIds().SetId(0,id_counter)
else:
start = True
tick_line.GetPointIds().SetId(1,id_counter)
pdo.InsertNextCell(tick_line.GetCellType(), tick_line.GetPointIds())
id_counter += 1
axis_polyline = vtk.vtkPolyLine()
axis_polyline.GetPointIds().SetNumberOfIds(2)
pts.InsertPoint(id_counter,ends[0,0],ends[0,1],ends[0,2])
pts.InsertPoint(id_counter+1,ends[1,0],ends[1,1],ends[1,2])
axis_polyline.GetPointIds().SetId(0,id_counter)
axis_polyline.GetPointIds().SetId(1,id_counter+1)
colors.InsertNextTuple([scalar_value])
colors.InsertNextTuple([scalar_value])
pdo.InsertNextCell(axis_polyline.GetCellType(), axis_polyline.GetPointIds())
pdo.SetPoints(pts)
pdo.GetPointData().AddArray(colors)
def neutralsheet(time=None, psi=None,
Rh=8, G=10, Lw=10, d=4,
extend=[[-40,-5],[-15,15]], NxNy=[40,40],
coord_sys='GSM',
model='tsyganenko95',
color=[1,0,0,0.5],
representation='Surface',
out_dir=tempfile.gettempdir(), png_fn=None,
return_sheet=False,
renderView=None,
render=True,
show=True,
debug=False):
return objs_wrapper(time=time, psi=psi, Rh=Rh, G=G, Lw=Lw, d=d,
extend=extend, NxNy=NxNy,
coord_sys=coord_sys, model=model, color=color,
representation=representation,
out_dir=out_dir, png_fn=png_fn,
return_sheet=return_sheet,
renderView=renderView, render=render, show=show, debug=debug,
obj='Neutralsheet')
# used to execute code inside of the programmable source script box.
if False:
# the assignment below is to get rid of warning and error messages.
kwargs = ''
# the variables below are defined inside of programmable source not here.
self = ''
output = ''
if "kwargs" in vars():
if kwargs['obj'] == 'dipole field':
_dipole_field(self, output, time=kwargs['time'], extend=kwargs['extend'], NxNyNz=kwargs['NxNyNz'],
coord_sys=kwargs['coord_sys'])
if kwargs['obj'] == 'satellite':
_satellite(self, time_o=kwargs['time_o'], time_f=kwargs['time_f'],
satellite_id=kwargs['satellite_id'],
coord_sys=kwargs['coord_sys'],
region_colors=kwargs['region_colors'])
elif kwargs['obj'] == 'Magnetopause':
_magnetopause(self, output, time=kwargs['time'], Bz=kwargs['Bz'],
Psw=kwargs['Psw'], model=kwargs['model'],
coord_sys=kwargs['coord_sys'],
return_x_max=kwargs['return_x_max'])
elif kwargs['obj'] == 'Bowshock':
_bowshock(self, output, time=kwargs['time'], Bz=kwargs['Bz'],
Psw=kwargs['Psw'], model=kwargs['model'],
mpause_model=kwargs['mpause_model'],
coord_sys=kwargs['coord_sys'])
elif kwargs['obj'] == 'Neutralsheet':
_neutralsheet(self, output, time=kwargs['time'], psi=kwargs['psi'],
Rh=kwargs['Rh'], G=kwargs['G'], Lw=kwargs['Lw'],
d=kwargs['d'], extend=kwargs['extend'],
NxNy=kwargs['NxNy'], coord_sys=kwargs['coord_sys'],
model=kwargs['model'],
return_sheet=kwargs['return_sheet'])
elif kwargs['obj'] == 'Plasmasheet':
_plasmasheet(self, output, time=kwargs['time'], psi=kwargs['psi'],
Rh=kwargs['Rh'], G=kwargs['G'], Lw=kwargs['Lw'],
d=kwargs['d'], extend=kwargs['extend'],
NxNy=kwargs['NxNy'], coord_sys=kwargs['coord_sys'],
model=kwargs['model'],
return_sheet=kwargs['return_sheet'])
elif kwargs['obj'] == 'axis':
_axis(self, time=kwargs['time'], val=kwargs['val'],
coord_sys=kwargs['coord_sys'], lims=kwargs['lims'],
tick_spacing=kwargs['tick_spacing'], tick_length=kwargs['tick_length'],label=kwargs['label'])
elif kwargs['obj'] == 'Plasmapause':
_plasmapause(self, output, N=kwargs['N'] , time=kwargs['time'],
coord_sys=kwargs['coord_sys'])
elif kwargs['obj'] == 'latitude':
_latitude_lines(self, time=kwargs['time'], coord_sys=kwargs['coord_sys'],
increment=kwargs['increment'], color=kwargs['color'])
elif kwargs['obj'] == 'longitude':
_longitude_lines(self, time=kwargs['time'], coord_sys=kwargs['coord_sys'],
increment=kwargs['increment'], color=kwargs['color']) | 37.936099 | 155 | 0.569176 |
acdf30bb7beee40e1fdf0254cb3ecb1e92558e34 | 2,607 | py | Python | detector/YOLOX/yolox/utils/ema.py | collector-m/UniTrack | e8e56e164f2dd40ba590a19ed7a4a75d8da7e2eb | [
"MIT"
] | 240 | 2021-06-20T13:50:42.000Z | 2022-03-31T05:08:29.000Z | detector/YOLOX/yolox/utils/ema.py | collector-m/UniTrack | e8e56e164f2dd40ba590a19ed7a4a75d8da7e2eb | [
"MIT"
] | 27 | 2021-07-12T01:19:39.000Z | 2021-12-27T08:05:08.000Z | detector/YOLOX/yolox/utils/ema.py | collector-m/UniTrack | e8e56e164f2dd40ba590a19ed7a4a75d8da7e2eb | [
"MIT"
] | 24 | 2021-07-01T09:48:24.000Z | 2022-03-14T06:39:46.000Z | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
import math
from copy import deepcopy
import torch
import torch.nn as nn
def is_parallel(model):
"""check if model is in parallel mode."""
import apex
parallel_type = (
nn.parallel.DataParallel,
nn.parallel.DistributedDataParallel,
apex.parallel.distributed.DistributedDataParallel,
)
return isinstance(model, parallel_type)
def copy_attr(a, b, include=(), exclude=()):
# Copy attributes from b to a, options to only include [...] and to exclude [...]
for k, v in b.__dict__.items():
if (len(include) and k not in include) or k.startswith("_") or k in exclude:
continue
else:
setattr(a, k, v)
class ModelEMA:
"""
Model Exponential Moving Average from https://github.com/rwightman/pytorch-image-models
Keep a moving average of everything in the model state_dict (parameters and buffers).
This is intended to allow functionality like
https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage
A smoothed version of the weights is necessary for some training schemes to perform well.
This class is sensitive where it is initialized in the sequence of model init,
GPU assignment and distributed training wrappers.
"""
def __init__(self, model, decay=0.9999, updates=0):
"""
Args:
model (nn.Module): model to apply EMA.
decay (float): ema decay reate.
updates (int): counter of EMA updates.
"""
# Create EMA(FP32)
self.ema = deepcopy(model.module if is_parallel(model) else model).eval()
self.updates = updates
# decay exponential ramp (to help early epochs)
self.decay = lambda x: decay * (1 - math.exp(-x / 2000))
for p in self.ema.parameters():
p.requires_grad_(False)
def update(self, model):
# Update EMA parameters
with torch.no_grad():
self.updates += 1
d = self.decay(self.updates)
msd = (
model.module.state_dict() if is_parallel(model) else model.state_dict()
) # model state_dict
for k, v in self.ema.state_dict().items():
if v.dtype.is_floating_point:
v *= d
v += (1.0 - d) * msd[k].detach()
def update_attr(self, model, include=(), exclude=("process_group", "reducer")):
# Update EMA attributes
copy_attr(self.ema, model, include, exclude)
| 35.22973 | 93 | 0.622555 |
acdf31fb0737441500b86d0cf0d9f454134014a0 | 28,973 | py | Python | sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2020_06_01/aio/operations/_agent_pools_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2020_06_01/aio/operations/_agent_pools_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2020_06_01/aio/operations/_agent_pools_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class AgentPoolsOperations:
"""AgentPoolsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.containerservice.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> AsyncIterable["_models.AgentPoolListResult"]:
"""Gets a list of agent pools in the specified managed cluster.
Gets a list of agent pools in the specified managed cluster. The operation returns properties
of each agent pool.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AgentPoolListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerservice.v2020_06_01.models.AgentPoolListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', min_length=1),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('AgentPoolListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools'} # type: ignore
async def get(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> "_models.AgentPool":
"""Gets the agent pool.
Gets the details of the agent pool by managed cluster and resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPool, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2020_06_01.models.AgentPool
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', min_length=1),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
'agentPoolName': self._serialize.url("agent_pool_name", agent_pool_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
parameters: "_models.AgentPool",
**kwargs: Any
) -> "_models.AgentPool":
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', min_length=1),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
'agentPoolName': self._serialize.url("agent_pool_name", agent_pool_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AgentPool')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AgentPool', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
parameters: "_models.AgentPool",
**kwargs: Any
) -> AsyncLROPoller["_models.AgentPool"]:
"""Creates or updates an agent pool.
Creates or updates an agent pool in the specified managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:param parameters: Parameters supplied to the Create or Update an agent pool operation.
:type parameters: ~azure.mgmt.containerservice.v2020_06_01.models.AgentPool
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AgentPool or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerservice.v2020_06_01.models.AgentPool]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPool"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AgentPool', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', min_length=1),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
'agentPoolName': self._serialize.url("agent_pool_name", agent_pool_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', min_length=1),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
'agentPoolName': self._serialize.url("agent_pool_name", agent_pool_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes an agent pool.
Deletes the agent pool in the specified managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
agent_pool_name=agent_pool_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', min_length=1),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
'agentPoolName': self._serialize.url("agent_pool_name", agent_pool_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}'} # type: ignore
async def get_upgrade_profile(
self,
resource_group_name: str,
resource_name: str,
agent_pool_name: str,
**kwargs: Any
) -> "_models.AgentPoolUpgradeProfile":
"""Gets upgrade profile for an agent pool.
Gets the details of the upgrade profile for an agent pool with a specified resource group and
managed cluster name.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param agent_pool_name: The name of the agent pool.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPoolUpgradeProfile, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2020_06_01.models.AgentPoolUpgradeProfile
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolUpgradeProfile"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get_upgrade_profile.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', min_length=1),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
'agentPoolName': self._serialize.url("agent_pool_name", agent_pool_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPoolUpgradeProfile', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_upgrade_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/agentPools/{agentPoolName}/upgradeProfiles/default'} # type: ignore
async def get_available_agent_pool_versions(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> "_models.AgentPoolAvailableVersions":
"""Gets a list of supported versions for the specified agent pool.
Gets a list of supported versions for the specified agent pool.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AgentPoolAvailableVersions, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2020_06_01.models.AgentPoolAvailableVersions
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AgentPoolAvailableVersions"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get_available_agent_pool_versions.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', min_length=1),
'resourceName': self._serialize.url("resource_name", resource_name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AgentPoolAvailableVersions', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_available_agent_pool_versions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/availableAgentPoolVersions'} # type: ignore
| 51.279646 | 247 | 0.672695 |
acdf35a94c432d7820367c8771d73e2538743070 | 48,813 | py | Python | python/ccxt/bitz.py | dgdiginex/ccxt | cccd590576cbf48d26cf9e3f65cc54fdd466a139 | [
"MIT"
] | 1 | 2021-02-08T21:56:13.000Z | 2021-02-08T21:56:13.000Z | python/ccxt/bitz.py | yucelalbar/ccxt | 672510401fba809172fac8272e1af463c778358a | [
"MIT"
] | null | null | null | python/ccxt/bitz.py | yucelalbar/ccxt | 672510401fba809172fac8272e1af463c778358a | [
"MIT"
] | 2 | 2020-10-13T03:24:08.000Z | 2020-10-15T06:25:07.000Z | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import OnMaintenance
class bitz(Exchange):
def describe(self):
return self.deep_extend(super(bitz, self).describe(), {
'id': 'bitz',
'name': 'Bit-Z',
'countries': ['HK'],
'rateLimit': 2000,
'version': 'v2',
'userAgent': self.userAgents['chrome'],
'has': {
'cancelOrder': True,
'cancelOrders': True,
'createOrder': True,
'createMarketOrder': False,
'fetchBalance': True,
'fetchDeposits': True,
'fetchClosedOrders': True,
'fetchMarkets': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTransactions': False,
'fetchWithdrawals': True,
},
'timeframes': {
'1m': '1min',
'5m': '5min',
'15m': '15min',
'30m': '30min',
'1h': '60min',
'4h': '4hour',
'1d': '1day',
'5d': '5day',
'1w': '1week',
'1M': '1mon',
},
'hostname': 'apiv2.bitz.com',
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/87443304-fec5e000-c5fd-11ea-98f8-ba8e67f7eaff.jpg',
'api': {
'market': 'https://{hostname}',
'trade': 'https://{hostname}',
'assets': 'https://{hostname}',
},
'www': 'https://www.bitz.com',
'doc': 'https://apidoc.bitz.com/en/',
'fees': 'https://www.bitz.com/fee?type=1',
'referral': 'https://u.bitz.com/register?invite_code=1429193',
},
'api': {
'market': {
'get': [
'ticker',
'depth',
'order', # trades
'tickerall',
'kline',
'symbolList',
'getServerTime',
'currencyRate',
'currencyCoinRate',
'coinRate',
],
},
'trade': {
'post': [
'addEntrustSheet',
'cancelEntrustSheet',
'cancelAllEntrustSheet',
'getUserHistoryEntrustSheet', # closed orders
'getUserNowEntrustSheet', # open orders
'getEntrustSheetInfo', # order
'depositOrWithdraw', # transactions
],
},
'assets': {
'post': [
'getUserAssets',
],
},
},
'fees': {
'trading': {
'maker': 0.002,
'taker': 0.002,
},
'funding': {
'withdraw': {
'BTC': '0.5%',
'DKKT': '0.5%',
'ETH': 0.01,
'USDT': '0.5%',
'LTC': '0.5%',
'FCT': '0.5%',
'LSK': '0.5%',
'HXI': '0.8%',
'ZEC': '0.5%',
'DOGE': '0.5%',
'MZC': '0.5%',
'ETC': '0.5%',
'GXS': '0.5%',
'XPM': '0.5%',
'PPC': '0.5%',
'BLK': '0.5%',
'XAS': '0.5%',
'HSR': '0.5%',
'NULS': 5.0,
'VOISE': 350.0,
'PAY': 1.5,
'EOS': 0.6,
'YBCT': 35.0,
'OMG': 0.3,
'OTN': 0.4,
'BTX': '0.5%',
'QTUM': '0.5%',
'DASH': '0.5%',
'GAME': '0.5%',
'BCH': '0.5%',
'GNT': 9.0,
'SSS': 1500.0,
'ARK': '0.5%',
'PART': '0.5%',
'LEO': '0.5%',
'DGB': '0.5%',
'ZSC': 130.0,
'VIU': 350.0,
'BTG': '0.5%',
'ARN': 10.0,
'VTC': '0.5%',
'BCD': '0.5%',
'TRX': 200.0,
'HWC': '0.5%',
'UNIT': '0.5%',
'OXY': '0.5%',
'MCO': 0.3500,
'SBTC': '0.5%',
'BCX': '0.5%',
'ETF': '0.5%',
'PYLNT': 0.4000,
'XRB': '0.5%',
'ETP': '0.5%',
},
},
},
'precision': {
'amount': 8,
'price': 8,
},
'options': {
'fetchOHLCVVolume': True,
'fetchOHLCVWarning': True,
'lastNonceTimestamp': 0,
},
'commonCurrencies': {
# https://github.com/ccxt/ccxt/issues/3881
# https://support.bit-z.pro/hc/en-us/articles/360007500654-BOX-BOX-Token-
'BOX': 'BOX Token',
'LEO': 'LeoCoin',
'XRB': 'NANO',
'PXC': 'Pixiecoin',
'VTC': 'VoteCoin',
'TTC': 'TimesChain',
},
'exceptions': {
# '200': Success
'-102': ExchangeError, # Invalid parameter
'-103': AuthenticationError, # Verification failed
'-104': ExchangeNotAvailable, # Network Error-1
'-105': AuthenticationError, # Invalid api signature
'-106': ExchangeNotAvailable, # Network Error-2
'-109': AuthenticationError, # Invalid scretKey
'-110': DDoSProtection, # The number of access requests exceeded
'-111': PermissionDenied, # Current IP is not in the range of trusted IP
'-112': OnMaintenance, # Service is under maintenance
'-114': RateLimitExceeded, # The number of daily requests has reached the limit
'-117': AuthenticationError, # The apikey expires
'-100015': AuthenticationError, # Trade password error
'-100044': ExchangeError, # Fail to request data
'-100101': ExchangeError, # Invalid symbol
'-100201': ExchangeError, # Invalid symbol
'-100301': ExchangeError, # Invalid symbol
'-100401': ExchangeError, # Invalid symbol
'-100302': ExchangeError, # Type of K-line error
'-100303': ExchangeError, # Size of K-line error
'-200003': AuthenticationError, # Please set trade password
'-200005': PermissionDenied, # This account can not trade
'-200025': ExchangeNotAvailable, # Temporary trading halt
'-200027': InvalidOrder, # Price Error
'-200028': InvalidOrder, # Amount must be greater than 0
'-200029': InvalidOrder, # Number must be between %s and %d
'-200030': InvalidOrder, # Over price range
'-200031': InsufficientFunds, # Insufficient assets
'-200032': ExchangeError, # System error. Please contact customer service
'-200033': ExchangeError, # Fail to trade
'-200034': OrderNotFound, # The order does not exist
'-200035': OrderNotFound, # Cancellation error, order filled
'-200037': InvalidOrder, # Trade direction error
'-200038': ExchangeError, # Trading Market Error
'-200055': OrderNotFound, # Order record does not exist
'-300069': AuthenticationError, # api_key is illegal
'-300101': ExchangeError, # Transaction type error
'-300102': InvalidOrder, # Price or number cannot be less than 0
'-300103': AuthenticationError, # Trade password error
'-301001': ExchangeNotAvailable, # Network Error-3
},
})
def fetch_markets(self, params={}):
response = self.marketGetSymbolList(params)
#
# { status: 200,
# msg: "",
# data: { ltc_btc: { id: "1",
# name: "ltc_btc",
# coinFrom: "ltc",
# coinTo: "btc",
# numberFloat: "4",
# priceFloat: "8",
# status: "1",
# minTrade: "0.010",
# maxTrade: "500000000.000"},
# qtum_usdt: { id: "196",
# name: "qtum_usdt",
# coinFrom: "qtum",
# coinTo: "usdt",
# numberFloat: "4",
# priceFloat: "2",
# status: "1",
# minTrade: "0.100",
# maxTrade: "500000000.000"}, },
# time: 1535969146,
# microtime: "0.66955600 1535969146",
# source: "api" }
#
markets = self.safe_value(response, 'data')
ids = list(markets.keys())
result = []
for i in range(0, len(ids)):
id = ids[i]
market = markets[id]
numericId = self.safe_string(market, 'id')
baseId = self.safe_string(market, 'coinFrom')
quoteId = self.safe_string(market, 'coinTo')
base = baseId.upper()
quote = quoteId.upper()
base = self.safe_currency_code(base)
quote = self.safe_currency_code(quote)
symbol = base + '/' + quote
precision = {
'amount': self.safe_integer(market, 'numberFloat'),
'price': self.safe_integer(market, 'priceFloat'),
}
result.append({
'info': market,
'id': id,
'numericId': numericId,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': True,
'precision': precision,
'limits': {
'amount': {
'min': self.safe_float(market, 'minTrade'),
'max': self.safe_float(market, 'maxTrade'),
},
'price': {
'min': math.pow(10, -precision['price']),
'max': None,
},
'cost': {
'min': None,
'max': None,
},
},
})
return result
def fetch_balance(self, params={}):
self.load_markets()
response = self.assetsPostGetUserAssets(params)
#
# {
# status: 200,
# msg: "",
# data: {
# cny: 0,
# usd: 0,
# btc_total: 0,
# info: [{
# "name": "zpr",
# "num": "37.49067275",
# "over": "37.49067275",
# "lock": "0.00000000",
# "btc": "0.00000000",
# "usd": "0.00000000",
# "cny": "0.00000000",
# }],
# },
# time: 1535983966,
# microtime: "0.70400500 1535983966",
# source: "api",
# }
#
balances = self.safe_value(response['data'], 'info')
result = {'info': response}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'name')
code = self.safe_currency_code(currencyId)
account = self.account()
account['used'] = self.safe_float(balance, 'lock')
account['total'] = self.safe_float(balance, 'num')
account['free'] = self.safe_float(balance, 'over')
result[code] = account
return self.parse_balance(result)
def parse_ticker(self, ticker, market=None):
#
# { symbol: "eth_btc",
# quoteVolume: "3905.72",
# volume: "97058.21",
# priceChange: "-1.72",
# priceChange24h: "-1.65",
# askPrice: "0.03971272",
# askQty: "0.0663",
# bidPrice: "0.03961469",
# bidQty: "19.5451",
# open: "0.04036769",
# high: "0.04062988",
# low: "0.03956123",
# now: "0.03970100",
# firstId: 115567767,
# lastId: 115795316,
# dealCount: 14078,
# numberPrecision: 4,
# pricePrecision: 8,
# cny: "1959.05",
# usd: "287.10",
# krw: "318655.82" }
#
timestamp = None
marketId = self.safe_string(ticker, 'symbol')
symbol = self.safe_symbol(marketId, market, '_')
last = self.safe_float(ticker, 'now')
open = self.safe_float(ticker, 'open')
change = None
average = None
if last is not None and open is not None:
change = last - open
average = self.sum(last, open) / 2
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'bidPrice'),
'bidVolume': self.safe_float(ticker, 'bidQty'),
'ask': self.safe_float(ticker, 'askPrice'),
'askVolume': self.safe_float(ticker, 'askQty'),
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': self.safe_float(ticker, 'priceChange24h'),
'average': average,
'baseVolume': self.safe_float(ticker, 'volume'),
'quoteVolume': self.safe_float(ticker, 'quoteVolume'),
'info': ticker,
}
def parse_microtime(self, microtime):
if microtime is None:
return microtime
parts = microtime.split(' ')
milliseconds = float(parts[0])
seconds = int(parts[1])
total = self.sum(seconds, milliseconds)
return int(total * 1000)
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.marketGetTicker(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: { symbol: "eth_btc",
# quoteVolume: "3905.72",
# volume: "97058.21",
# priceChange: "-1.72",
# priceChange24h: "-1.65",
# askPrice: "0.03971272",
# askQty: "0.0663",
# bidPrice: "0.03961469",
# bidQty: "19.5451",
# open: "0.04036769",
# high: "0.04062988",
# low: "0.03956123",
# now: "0.03970100",
# firstId: 115567767,
# lastId: 115795316,
# dealCount: 14078,
# numberPrecision: 4,
# pricePrecision: 8,
# cny: "1959.05",
# usd: "287.10",
# krw: "318655.82" },
# time: 1535970397,
# microtime: "0.76341900 1535970397",
# source: "api" }
#
ticker = self.parse_ticker(response['data'], market)
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
return self.extend(ticker, {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
})
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
request = {}
if symbols is not None:
ids = self.market_ids(symbols)
request['symbols'] = ','.join(ids)
response = self.marketGetTickerall(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: { ela_btc: { symbol: "ela_btc",
# quoteVolume: "0.00",
# volume: "3.28",
# priceChange: "0.00",
# priceChange24h: "0.00",
# askPrice: "0.00147984",
# askQty: "5.4580",
# bidPrice: "0.00120230",
# bidQty: "12.5384",
# open: "0.00149078",
# high: "0.00149078",
# low: "0.00149078",
# now: "0.00149078",
# firstId: 115581219,
# lastId: 115581219,
# dealCount: 1,
# numberPrecision: 4,
# pricePrecision: 8,
# cny: "73.66",
# usd: "10.79",
# krw: "11995.03" } },
# time: 1535971578,
# microtime: "0.39854200 1535971578",
# source: "api" }
#
tickers = self.safe_value(response, 'data')
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
result = {}
ids = list(tickers.keys())
for i in range(0, len(ids)):
id = ids[i]
ticker = tickers[id]
market = None
if id in self.markets_by_id:
market = self.markets_by_id[id]
ticker = self.parse_ticker(tickers[id], market)
symbol = ticker['symbol']
if symbol is None:
if market is not None:
symbol = market['symbol']
else:
baseId, quoteId = id.split('_')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
if symbol is not None:
result[symbol] = self.extend(ticker, {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
})
return self.filter_by_array(result, 'symbol', symbols)
def fetch_time(self, params={}):
response = self.marketGetGetServerTime(params)
#
# {
# "status":200,
# "msg":"",
# "data":[],
# "time":1555490875,
# "microtime":"0.35994200 1555490875",
# "source":"api"
# }
#
return self.safe_timestamp(response, 'time')
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
request = {
'symbol': self.market_id(symbol),
}
response = self.marketGetDepth(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: { asks: [["10.00000000", "0.4426", "4.4260"],
# ["1.00000000", "0.8339", "0.8339"],
# ["0.91700000", "0.0500", "0.0458"],
# ["0.20000000", "0.1000", "0.0200"],
# ["0.03987120", "16.1262", "0.6429"],
# ["0.03986120", "9.7523", "0.3887"] ],
# bids: [["0.03976145", "0.0359", "0.0014"],
# ["0.03973401", "20.9493", "0.8323"],
# ["0.03967970", "0.0328", "0.0013"],
# ["0.00000002", "10000.0000", "0.0002"],
# ["0.00000001", "231840.7500", "0.0023"]],
# coinPair: "eth_btc" },
# time: 1535974778,
# microtime: "0.04017400 1535974778",
# source: "api" }
#
orderbook = self.safe_value(response, 'data')
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
return self.parse_order_book(orderbook, timestamp)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {id: 115807453,
# t: "19:36:24",
# T: 1535974584,
# p: "0.03983296",
# n: "0.1000",
# s: "buy" },
#
id = self.safe_string(trade, 'id')
timestamp = self.safe_timestamp(trade, 'T')
symbol = None
if market is not None:
symbol = market['symbol']
price = self.safe_float(trade, 'p')
amount = self.safe_float(trade, 'n')
cost = None
if price is not None:
if amount is not None:
cost = self.price_to_precision(symbol, amount * price)
side = self.safe_string(trade, 's')
return {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': id,
'order': None,
'type': 'limit',
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
'info': trade,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.marketGetOrder(self.extend(request, params))
#
# { status: 200,
# msg: "",
# data: [{id: 115807453,
# t: "19:36:24",
# T: 1535974584,
# p: "0.03983296",
# n: "0.1000",
# s: "buy" },
# {id: 115806811,
# t: "19:33:19",
# T: 1535974399,
# p: "0.03981135",
# n: "9.4612",
# s: "sell" } ],
# time: 1535974583,
# microtime: "0.57118100 1535974583",
# source: "api" }
#
return self.parse_trades(response['data'], market, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
#
# {
# time: "1535973420000",
# open: "0.03975084",
# high: "0.03975084",
# low: "0.03967700",
# close: "0.03967700",
# volume: "12.4733",
# datetime: "2018-09-03 19:17:00"
# }
#
return [
self.safe_integer(ohlcv, 'time'),
self.safe_float(ohlcv, 'open'),
self.safe_float(ohlcv, 'high'),
self.safe_float(ohlcv, 'low'),
self.safe_float(ohlcv, 'close'),
self.safe_float(ohlcv, 'volume'),
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
duration = self.parse_timeframe(timeframe) * 1000
market = self.market(symbol)
request = {
'symbol': market['id'],
'resolution': self.timeframes[timeframe],
}
if limit is not None:
request['size'] = min(limit, 300) # 1-300
if since is not None:
request['to'] = self.sum(since, limit * duration * 1000)
else:
if since is not None:
raise ArgumentsRequired(self.id + ' fetchOHLCV requires a limit argument if the since argument is specified')
response = self.marketGetKline(self.extend(request, params))
#
# {
# status: 200,
# msg: "",
# data: {
# bars: [
# {time: "1535973420000", open: "0.03975084", high: "0.03975084", low: "0.03967700", close: "0.03967700", volume: "12.4733", datetime: "2018-09-03 19:17:00"},
# {time: "1535955480000", open: "0.04009900", high: "0.04016745", low: "0.04009900", close: "0.04012074", volume: "74.4803", datetime: "2018-09-03 14:18:00"},
# ],
# resolution: "1min",
# symbol: "eth_btc",
# from: "1535973420000",
# to: "1535955480000",
# size: 300
# },
# time: 1535973435,
# microtime: "0.56462100 1535973435",
# source: "api"
# }
#
data = self.safe_value(response, 'data', {})
bars = self.safe_value(data, 'bars', [])
return self.parse_ohlcvs(bars, market, timeframe, since, limit)
def parse_order_status(self, status):
statuses = {
'0': 'open',
'1': 'open', # partially filled
'2': 'closed', # filled
'3': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# "id": "693248739", # order id
# "uId": "2074056", # uid
# "price": "100", # price
# "number": "10", # number
# "numberOver": "10", # undealed
# "flag": "sale", # flag
# "status": "0", # unfilled
# "coinFrom": "vtc",
# "coinTo": "dkkt",
# "numberDeal": "0" # dealed
# }
#
id = self.safe_string(order, 'id')
symbol = None
if market is None:
baseId = self.safe_string(order, 'coinFrom')
quoteId = self.safe_string(order, 'coinTo')
if (baseId is not None) and (quoteId is not None):
marketId = baseId + '_' + quoteId
if marketId in self.markets_by_id:
market = self.safe_value(self.markets_by_id, marketId)
else:
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
if market is not None:
symbol = market['symbol']
side = self.safe_string(order, 'flag')
if side is not None:
side = 'sell' if (side == 'sale') else 'buy'
price = self.safe_float(order, 'price')
amount = self.safe_float(order, 'number')
remaining = self.safe_float(order, 'numberOver')
filled = self.safe_float(order, 'numberDeal')
timestamp = self.safe_integer(order, 'timestamp')
if timestamp is None:
timestamp = self.safe_timestamp(order, 'created')
cost = self.safe_float(order, 'orderTotalPrice')
if price is not None:
if filled is not None:
cost = filled * price
status = self.parse_order_status(self.safe_string(order, 'status'))
return {
'id': id,
'clientOrderId': None,
'datetime': self.iso8601(timestamp),
'timestamp': timestamp,
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': 'limit',
'side': side,
'price': price,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'trades': None,
'fee': None,
'info': order,
'average': None,
}
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
if type != 'limit':
raise ExchangeError(self.id + ' createOrder allows limit orders only')
market = self.market(symbol)
orderType = '1' if (side == 'buy') else '2'
if not self.password:
raise ExchangeError(self.id + ' createOrder() requires you to set exchange.password = "YOUR_TRADING_PASSWORD"(a trade password is NOT THE SAME as your login password)')
request = {
'symbol': market['id'],
'type': orderType,
'price': self.price_to_precision(symbol, price),
'number': self.amount_to_precision(symbol, amount),
'tradePwd': self.password,
}
response = self.tradePostAddEntrustSheet(self.extend(request, params))
#
# {
# "status": 200,
# "msg": "",
# "data": {
# "id": "693248739", # order id
# "uId": "2074056", # uid
# "price": "100", # price
# "number": "10", # number
# "numberOver": "10", # undealed
# "flag": "sale", # flag
# "status": "0", # unfilled
# "coinFrom": "vtc",
# "coinTo": "dkkt",
# "numberDeal": "0" # dealed
# },
# "time": "1533035297",
# "microtime": "0.41892000 1533035297",
# "source": "api",
# }
#
timestamp = self.parse_microtime(self.safe_string(response, 'microtime'))
order = self.extend({
'timestamp': timestamp,
}, response['data'])
return self.parse_order(order, market)
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
request = {
'entrustSheetId': id,
}
response = self.tradePostCancelEntrustSheet(self.extend(request, params))
#
# {
# "status":200,
# "msg":"",
# "data":{
# "updateAssetsData":{
# "coin":"bz",
# "over":"1000.00000000",
# "lock":"-1000.00000000"
# },
# "assetsInfo":{
# "coin":"bz",
# "over":"9999.99999999",
# "lock":"9999.99999999"
# }
# },
# "time":"1535464383",
# "microtime":"0.91558000 1535464383",
# "source":"api"
# }
#
return response
def cancel_orders(self, ids, symbol=None, params={}):
self.load_markets()
request = {
'ids': ','.join(ids),
}
response = self.tradePostCancelEntrustSheet(self.extend(request, params))
#
# {
# "status":200,
# "msg":"",
# "data":{
# "744173808":{
# "updateAssetsData":{
# "coin":"bz",
# "over":"100.00000000",
# "lock":"-100.00000000"
# },
# "assetsInfo":{
# "coin":"bz",
# "over":"899.99999999",
# "lock":"19099.99999999"
# }
# },
# "744173809":{
# "updateAssetsData":{
# "coin":"bz",
# "over":"100.00000000",
# "lock":"-100.00000000"
# },
# "assetsInfo":{
# "coin":"bz",
# "over":"999.99999999",
# "lock":"18999.99999999"
# }
# }
# },
# "time":"1535525649",
# "microtime":"0.05009400 1535525649",
# "source":"api"
# }
#
return response
def fetch_order(self, id, symbol=None, params={}):
self.load_markets()
request = {
'entrustSheetId': id,
}
response = self.tradePostGetEntrustSheetInfo(self.extend(request, params))
#
# {
# "status":200,
# "msg":"",
# "data":{
# "id":"708279852",
# "uId":"2074056",
# "price":"100.00000000",
# "number":"10.0000",
# "total":"0.00000000",
# "numberOver":"10.0000",
# "numberDeal":"0.0000",
# "flag":"sale",
# "status":"0", #0:unfilled, 1:partial deal, 2:all transactions, 3:already cancelled
# "coinFrom":"bz",
# "coinTo":"usdt",
# "orderTotalPrice":"0",
# "created":"1533279876"
# },
# "time":"1533280294",
# "microtime":"0.36859200 1533280294",
# "source":"api"
# }
#
return self.parse_order(response['data'])
def fetch_orders_with_method(self, method, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOpenOrders requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'coinFrom': market['baseId'],
'coinTo': market['quoteId'],
# 'type': 1, # optional integer, 1 = buy, 2 = sell
# 'page': 1, # optional integer
# 'pageSize': 100, # optional integer, max 100
# 'startTime': 1510235730, # optional integer timestamp in seconds
# 'endTime': 1510235730, # optional integer timestamp in seconds
}
if limit is not None:
request['page'] = 1
request['pageSize'] = limit
if since is not None:
request['startTime'] = int(since / 1000)
# request['endTime'] = int(since / 1000)
response = getattr(self, method)(self.extend(request, params))
#
# {
# "status": 200,
# "msg": "",
# "data": {
# "data": [
# {
# "id": "693248739",
# "uid": "2074056",
# "price": "100.00000000",
# "number": "10.0000",
# "total": "0.00000000",
# "numberOver": "0.0000",
# "numberDeal": "0.0000",
# "flag": "sale",
# "status": "3", # 0:unfilled, 1:partial deal, 2:all transactions, 3:already cancelled
# "isNew": "N",
# "coinFrom": "vtc",
# "coinTo": "dkkt",
# "created": "1533035300",
# },
# {
# "id": "723086996",
# "uid": "2074056",
# "price": "100.00000000",
# "number": "10.0000",
# "total": "0.00000000",
# "numberOver": "0.0000",
# "numberDeal": "0.0000",
# "flag": "sale",
# "status": "3",
# "isNew": "N",
# "coinFrom": "bz",
# "coinTo": "usdt",
# "created": "1533523568",
# },
# ],
# "pageInfo": {
# "limit": "10",
# "offest": "0",
# "current_page": "1",
# "page_size": "10",
# "total_count": "17",
# "page_count": "2",
# }
# },
# "time": "1533279329",
# "microtime": "0.15305300 1533279329",
# "source": "api"
# }
#
orders = self.safe_value(response['data'], 'data', [])
return self.parse_orders(orders, None, since, limit)
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('tradePostGetUserHistoryEntrustSheet', symbol, since, limit, params)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('tradePostGetUserNowEntrustSheet', symbol, since, limit, params)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('tradePostGetUserHistoryEntrustSheet', symbol, since, limit, params)
def parse_transaction_status(self, status):
statuses = {
'1': 'pending',
'2': 'pending',
'3': 'pending',
'4': 'ok',
'5': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# {
# "id": '96275',
# "uid": '2109073',
# "wallet": '0xf4c4141c0127bc37b1d0c409a091920eba13ada7',
# "txid": '0xb7adfa52aa566f9ac112e3c01f77bd91179b19eab12092a9a5a8b33d5086e31d',
# "confirm": '12',
# "number": '0.50000000',
# "status": 4,
# "updated": '1534944168605',
# "addressUrl": 'https://etherscan.io/address/',
# "txidUrl": 'https://etherscan.io/tx/',
# "description": 'Ethereum',
# "coin": 'eth',
# "memo": ''
# }
#
# {
# "id":"397574",
# "uid":"2033056",
# "wallet":"1AG1gZvQAYu3WBvgg7p4BMMghQD2gE693k",
# "txid":"",
# "confirm":"0",
# "number":"1000.00000000",
# "status":1,
# "updated":"0",
# "addressUrl":"http://omniexplorer.info/lookupadd.aspx?address=",
# "txidUrl":"http://omniexplorer.info/lookuptx.aspx?txid=",
# "description":"Tether",
# "coin":"usdt",
# "memo":""
# }
#
# {
# "id":"153606",
# "uid":"2033056",
# "wallet":"1AG1gZvQAYu3WBvgg7p4BMMghQD2gE693k",
# "txid":"aa2b179f84cd6dedafd41845e0fbf7f01e14c0d71ea3140d03d6f5a9ccd93199",
# "confirm":"0",
# "number":"761.11110000",
# "status":4,
# "updated":"1536726133579",
# "addressUrl":"http://omniexplorer.info/lookupadd.aspx?address=",
# "txidUrl":"http://omniexplorer.info/lookuptx.aspx?txid=",
# "description":"Tether",
# "coin":"usdt",
# "memo":""
# }
#
timestamp = self.safe_integer(transaction, 'updated')
if timestamp == 0:
timestamp = None
currencyId = self.safe_string(transaction, 'coin')
code = self.safe_currency_code(currencyId, currency)
type = self.safe_string_lower(transaction, 'type')
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
return {
'id': self.safe_string(transaction, 'id'),
'txid': self.safe_string(transaction, 'txid'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'address': self.safe_string(transaction, 'wallet'),
'tag': self.safe_string(transaction, 'memo'),
'type': type,
'amount': self.safe_float(transaction, 'number'),
'currency': code,
'status': status,
'updated': timestamp,
'fee': None,
'info': transaction,
}
def parse_transactions_by_type(self, type, transactions, code=None, since=None, limit=None):
result = []
for i in range(0, len(transactions)):
transaction = self.parse_transaction(self.extend({
'type': type,
}, transactions[i]))
result.append(transaction)
return self.filter_by_currency_since_limit(result, code, since, limit)
def parse_transaction_type(self, type):
types = {
'deposit': 1,
'withdrawal': 2,
}
return self.safe_integer(types, type, type)
def fetch_deposits(self, code=None, since=None, limit=None, params={}):
return self.fetch_transactions_for_type('deposit', code, since, limit, params)
def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
return self.fetch_transactions_for_type('withdrawal', code, since, limit, params)
def fetch_transactions_for_type(self, type, code=None, since=None, limit=None, params={}):
if code is None:
raise ArgumentsRequired(self.id + ' fetchTransactions() requires a currency `code` argument')
self.load_markets()
currency = self.currency(code)
request = {
'coin': currency['id'],
'type': self.parse_transaction_type(type),
}
if since is not None:
request['startTime'] = int(since / str(1000))
if limit is not None:
request['page'] = 1
request['pageSize'] = limit
response = self.tradePostDepositOrWithdraw(self.extend(request, params))
transactions = self.safe_value(response['data'], 'data', [])
return self.parse_transactions_by_type(type, transactions, code, since, limit)
def nonce(self):
currentTimestamp = self.seconds()
if currentTimestamp > self.options['lastNonceTimestamp']:
self.options['lastNonceTimestamp'] = currentTimestamp
self.options['lastNonce'] = 100000
self.options['lastNonce'] = self.sum(self.options['lastNonce'], 1)
return self.options['lastNonce']
def sign(self, path, api='market', method='GET', params={}, headers=None, body=None):
baseUrl = self.implode_params(self.urls['api'][api], {'hostname': self.hostname})
url = baseUrl + '/' + self.capitalize(api) + '/' + path
query = None
if api == 'market':
query = self.urlencode(params)
if len(query):
url += '?' + query
else:
self.check_required_credentials()
body = self.rawencode(self.keysort(self.extend({
'apiKey': self.apiKey,
'timeStamp': self.seconds(),
'nonce': self.nonce(),
}, params)))
body += '&sign=' + self.hash(self.encode(body + self.secret))
headers = {'Content-type': 'application/x-www-form-urlencoded'}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
status = self.safe_string(response, 'status')
if status is not None:
feedback = self.id + ' ' + body
#
# {"status":-107,"msg":"","data":"","time":1535968848,"microtime":"0.89092200 1535968848","source":"api"}
#
if status == '200':
#
# {"status":200,"msg":"","data":-200031,"time":1535999806,"microtime":"0.85476800 1535999806","source":"api"}
#
code = self.safe_integer(response, 'data')
if code is not None:
self.throw_exactly_matched_exception(self.exceptions, code, feedback)
raise ExchangeError(feedback)
else:
return # no error
self.throw_exactly_matched_exception(self.exceptions, status, feedback)
raise ExchangeError(feedback)
| 41.827763 | 182 | 0.414705 |
acdf35ff90dd9e533cc351d05325b6180c3b6151 | 204 | py | Python | trimDoubleQuotes.py | Jens26/shortWrappersPython | b01200f3509cf65567963f094c6c73dc8cabbcb9 | [
"MIT"
] | null | null | null | trimDoubleQuotes.py | Jens26/shortWrappersPython | b01200f3509cf65567963f094c6c73dc8cabbcb9 | [
"MIT"
] | null | null | null | trimDoubleQuotes.py | Jens26/shortWrappersPython | b01200f3509cf65567963f094c6c73dc8cabbcb9 | [
"MIT"
] | null | null | null | def trunc(arr=[], *args):
for x in arr:
print(",".join(x))
/*
The above function is used to trim all the double quotes available in the list
arr = [["2","#","4"], ["546","456","67"]]
trunc(arr);
*/ | 20.4 | 79 | 0.583333 |
acdf364e2de668c54f546f56f227e1ac9ce4f5ca | 7,194 | py | Python | l2run/tests/test_observations.py | MultiBeerBandits/learning-to-run | c73661be406edec1fc02e28f9e87f4107f68b1e2 | [
"MIT"
] | 1 | 2018-07-24T06:59:06.000Z | 2018-07-24T06:59:06.000Z | l2run/tests/test_observations.py | MultiBeerBandits/learning-to-run | c73661be406edec1fc02e28f9e87f4107f68b1e2 | [
"MIT"
] | 1 | 2018-05-19T14:09:03.000Z | 2018-05-19T14:09:03.000Z | l2run/tests/test_observations.py | MultiBeerBandits/learning-to-run | c73661be406edec1fc02e28f9e87f4107f68b1e2 | [
"MIT"
] | null | null | null | import sys
sys.path.append("..")
from env_wrapper import create_environment
from replay_buffer import ReplayBufferFlip
import numpy as np
from numpy.testing import assert_almost_equal
"""
elements of the observation vector in order
names = ["pelvis_x", "pelvis_y"] 2
names += [joint + "_" + var for (joint, var) in product(["hip_left","hip_right","knee_left","knee_right","ankle_left","ankle_right"],
["rz", "vrz"])] 12
names += ["ground_pelvis_rot", "ground_pelvis_vel_rot"] 2
names += [body_part + "_" + var for (body_part, var) in product(
["head", "torso", "toes_left", "toes_right", "talus_left", "talus_right"],
["x", "y"])] 12
names += ["com_x", "com_y", "com_vel_x", "com_vel_y"] 4
names += ["pelvis_vel_x", "pelvis_vel_y"] 2
"""
# test the base observation vector consistency
def obs_vector_consistency(exclude_centering):
env = create_environment(False, False, 1, 0, exclude_centering)
shift = int(exclude_centering)
for _ in range(100):
# take some random action
env.step(env.action_space.sample())
# check consistency between state desc and obs vector
# plus the order of obs (used in action flip)
desc = env.get_state_desc()
obs = env.get_observation_basic()
# check pelvis coordinates
centering_x = desc['body_pos']['pelvis'][0]
if not exclude_centering:
assert_almost_equal(centering_x, obs[0])
pelvis_y = desc['body_pos']['pelvis'][1]
assert_almost_equal(pelvis_y, obs[1 - shift])
# check joint and speed
joint_pos = desc['joint_pos']
joint_vel = desc['joint_vel']
# hips
assert_almost_equal(joint_pos['hip_l'][0], obs[2 - shift])
assert_almost_equal(joint_vel['hip_l'][0], obs[3 - shift])
assert_almost_equal(joint_pos['hip_r'][0], obs[4 - shift])
assert_almost_equal(joint_vel['hip_r'][0], obs[5 - shift])
# knees
assert_almost_equal(joint_pos['knee_l'][0], obs[6 - shift])
assert_almost_equal(joint_vel['knee_l'][0], obs[7 - shift])
assert_almost_equal(joint_pos['knee_r'][0], obs[8 - shift])
assert_almost_equal(joint_vel['knee_r'][0], obs[9 - shift])
# ankles
assert_almost_equal(joint_pos['ankle_l'][0], obs[10 - shift])
assert_almost_equal(joint_vel['ankle_l'][0], obs[11 - shift])
assert_almost_equal(joint_pos['ankle_r'][0], obs[12 - shift])
assert_almost_equal(joint_vel['ankle_r'][0], obs[13 - shift])
# ground pelvis
assert_almost_equal(joint_pos['ground_pelvis'][0], obs[14 - shift])
assert_almost_equal(joint_vel['ground_pelvis'][0], obs[15 - shift])
# check body part coordinates
body_pos = desc['body_pos']
# head
assert_almost_equal(body_pos['head'][0], obs[16 - shift] + centering_x)
assert_almost_equal(body_pos['head'][1], obs[17 - shift])
# torso
assert_almost_equal(body_pos['torso'][0], obs[18 - shift] + centering_x)
assert_almost_equal(body_pos['torso'][1], obs[19 - shift])
# toes
assert_almost_equal(body_pos['toes_l'][0], obs[20 - shift] + centering_x)
assert_almost_equal(body_pos['toes_l'][1], obs[21 - shift])
assert_almost_equal(body_pos['toes_r'][0], obs[22 - shift] + centering_x)
assert_almost_equal(body_pos['toes_r'][1], obs[23 - shift])
# talus
assert_almost_equal(body_pos['talus_l'][0], obs[24 - shift] + centering_x)
assert_almost_equal(body_pos['talus_l'][1], obs[25 - shift])
assert_almost_equal(body_pos['talus_r'][0], obs[26 - shift] + centering_x)
assert_almost_equal(body_pos['talus_r'][1], obs[27 - shift])
# check center of mass
com_pos = desc['misc']['mass_center_pos']
com_vel = desc['misc']['mass_center_vel']
assert_almost_equal(com_pos[0], obs[28 - shift] + centering_x)
assert_almost_equal(com_pos[1], obs[29 - shift])
assert_almost_equal(com_vel[0], obs[30 - shift])
assert_almost_equal(com_vel[1], obs[31 - shift])
# check pelvis speed
assert_almost_equal(desc['body_vel']['pelvis'][0], obs[32 - shift])
assert_almost_equal(desc['body_vel']['pelvis'][1], obs[33 - shift])
def test_state_flip(exclude_centering):
env = create_environment(False, False, 1, 0, exclude_centering)
b = ReplayBufferFlip(2, True, env.get_observation_names(),
env.action_space.shape,
env.observation_space.shape)
shift = int(exclude_centering)
env.reset()
for _ in range(100):
obs = env.step(env.action_space.sample())[0]
fobs = b.swap_states(np.matrix(obs)).tolist()[0]
assert(len(obs) == 34 - shift)
assert(len(obs) == len(fobs))
# pelvis does not change
assert_almost_equal(obs[0:2 - shift], fobs[0:2 - shift])
# hip
assert_almost_equal(obs[2 - shift:4 - shift], fobs[4 - shift:6 - shift])
assert_almost_equal(obs[4 - shift:6 - shift], fobs[2 - shift:4 - shift])
# knee
assert_almost_equal(obs[6 - shift:8 - shift], fobs[8 - shift:10 - shift])
assert_almost_equal(obs[8 - shift:10 - shift], fobs[6 - shift:8 - shift])
# ankle
assert_almost_equal(obs[10 - shift:12 - shift], fobs[12 - shift:14 - shift])
assert_almost_equal(obs[12 - shift:14 - shift], fobs[10 - shift:12 - shift])
# up to torso nothing changes
assert_almost_equal(obs[14 - shift:20 - shift], fobs[14 - shift:20 - shift])
# toes
assert_almost_equal(obs[20 - shift:22 - shift], fobs[22 - shift:24 - shift])
assert_almost_equal(obs[22 - shift:24 - shift], fobs[20 - shift:22 - shift])
# talus
assert_almost_equal(obs[24 - shift:26 - shift], fobs[26 - shift:28 - shift])
assert_almost_equal(obs[26 - shift:28 - shift], fobs[24 - shift:26 - shift])
# center of mass does not change
assert_almost_equal(obs[28 - shift:32 - shift], fobs[28 - shift:32 - shift])
# pelvis speed does not change
assert_almost_equal(obs[32 - shift:34 - shift], fobs[32 - shift:34 - shift])
# we discovered that ['body_pos']['pelvis'][0:2] == ['joint_pos']['ground_pelvis'][1:3]
# and ['body_vel']['pelvis'][0:2] == ['joint_vel']['ground_pelvis'][1:3]
def pay_attention_always_equal():
env = create_environment(False, False, 1, 0, False)
env.reset()
for _ in range(100):
obs = env.step(env.action_space.sample())[0]
pelvis_xy = env.get_state_desc()['body_pos']['pelvis'][0:2]
ground_pelvis_xy = env.get_state_desc()['joint_pos']['ground_pelvis'][1:3]
assert_almost_equal(pelvis_xy, ground_pelvis_xy)
pelvis_vel = env.get_state_desc()['body_vel']['pelvis'][0:2]
ground_pelvis_vel = env.get_state_desc()['joint_vel']['ground_pelvis'][1:3]
assert_almost_equal(pelvis_vel, ground_pelvis_vel)
if __name__ == '__main__':
obs_vector_consistency(False)
test_state_flip(False)
pay_attention_always_equal() | 48.938776 | 145 | 0.628996 |
acdf36638e2272d676dbf5c5ea0ba3f4e2f3f5d3 | 9,722 | py | Python | spade/presence.py | himanshudabas/spade | a23ba50eb21d241d1a4c4c55cd9db24c00a74318 | [
"MIT"
] | 1 | 2019-04-09T06:04:54.000Z | 2019-04-09T06:04:54.000Z | spade/presence.py | Newsr30420/spade | a23ba50eb21d241d1a4c4c55cd9db24c00a74318 | [
"MIT"
] | null | null | null | spade/presence.py | Newsr30420/spade | a23ba50eb21d241d1a4c4c55cd9db24c00a74318 | [
"MIT"
] | null | null | null | import aioxmpp
from aioxmpp import PresenceState, PresenceShow
class ContactNotFound(Exception):
""" """
pass
class PresenceManager(object):
""" """
def __init__(self, agent):
self.agent = agent
self.client = agent.client
self.roster = self.client.summon(aioxmpp.RosterClient)
self.presenceclient = self.client.summon(aioxmpp.PresenceClient)
self.presenceserver = self.client.summon(aioxmpp.PresenceServer)
self._contacts = {}
self.approve_all = False
self.presenceclient.on_bare_available.connect(self._on_bare_available)
self.presenceclient.on_available.connect(self._on_available)
self.presenceclient.on_bare_unavailable.connect(self._on_bare_unavailable)
self.presenceclient.on_unavailable.connect(self._on_unavailable)
self.presenceclient.on_changed.connect(self._on_changed)
self.roster.on_subscribe.connect(self._on_subscribe)
self.roster.on_subscribed.connect(self._on_subscribed)
self.roster.on_unsubscribe.connect(self._on_unsubscribe)
self.roster.on_unsubscribed.connect(self._on_unsubscribed)
@property
def state(self):
"""
The currently set presence state (as aioxmpp.PresenceState)
which is broadcast when the client connects and when the presence is
re-emitted.
This attribute cannot be written. It does not reflect the actual
presence seen by others. For example when the client is in fact
offline, others will see unavailable presence no matter what is set
here.
Returns:
aioxmpp.PresenceState: the presence state of the agent
"""
return self.presenceserver.state
@property
def status(self):
"""
The currently set textual presence status which is broadcast when the
client connects and when the presence is re-emitted.
This attribute cannot be written. It does not reflect the actual
presence seen by others. For example when the client is in fact
offline, others will see unavailable presence no matter what is set
here.
Returns:
dict: a dict with the status in different languages (default key is None)
"""
return self.presenceserver.status
@property
def priority(self):
"""
The currently set priority which is broadcast when the client connects
and when the presence is re-emitted.
This attribute cannot be written. It does not reflect the actual
presence seen by others. For example when the client is in fact
offline, others will see unavailable presence no matter what is set
here.
Returns:
int: the priority of the connection
"""
return self.presenceserver.priority
def is_available(self):
"""
Returns the available flag from the state
Returns:
bool: wether the agent is available or not
"""
return self.state.available
def set_available(self, show=None):
"""
Sets the agent availability to True.
Args:
show (aioxmpp.PresenceShow, optional): the show state of the presence (Default value = None)
"""
show = self.state.show if show is None else show
self.set_presence(PresenceState(available=True, show=show))
def set_unavailable(self):
"""Sets the agent availability to False."""
show = PresenceShow.NONE
self.set_presence(PresenceState(available=False, show=show))
def set_presence(self, state=None, status=None, priority=None):
"""
Change the presence broadcast by the client.
If the client is currently connected, the new presence is broadcast immediately.
Args:
state(aioxmpp.PresenceState, optional): New presence state to broadcast (Default value = None)
status(dict or str, optional): New status information to broadcast (Default value = None)
priority (int, optional): New priority for the resource (Default value = None)
"""
state = state if state is not None else self.state
status = status if status is not None else self.status
priority = priority if priority is not None else self.priority
self.presenceserver.set_presence(state, status, priority)
def get_contacts(self):
"""
Returns list of contacts
Returns:
dict: the roster of contacts
"""
for jid, item in self.roster.items.items():
try:
self._contacts[jid.bare()].update(item.export_as_json())
except KeyError:
self._contacts[jid.bare()] = item.export_as_json()
return self._contacts
def get_contact(self, jid):
"""
Returns a contact
Args:
jid (aioxmpp.JID): jid of the contact
Returns:
dict: the roster of contacts
"""
try:
return self.get_contacts()[jid.bare()]
except KeyError:
raise ContactNotFound
except AttributeError:
raise AttributeError("jid must be an aioxmpp.JID object")
def _update_roster_with_presence(self, stanza):
""" """
if stanza.from_.bare() == self.agent.jid.bare():
return
try:
self._contacts[stanza.from_.bare()].update({"presence": stanza})
except KeyError:
self._contacts[stanza.from_.bare()] = {"presence": stanza}
def subscribe(self, peer_jid):
"""
Asks for subscription
Args:
peer_jid (str): the JID you ask for subscriptiion
"""
self.roster.subscribe(aioxmpp.JID.fromstr(peer_jid).bare())
def unsubscribe(self, peer_jid):
"""
Asks for unsubscription
Args:
peer_jid (str): the JID you ask for unsubscriptiion
"""
self.roster.unsubscribe(aioxmpp.JID.fromstr(peer_jid).bare())
def approve(self, peer_jid):
"""
Approve a subscription request from jid
Args:
peer_jid (str): the JID to approve
"""
self.roster.approve(aioxmpp.JID.fromstr(peer_jid).bare())
def _on_bare_available(self, stanza):
""" """
self._update_roster_with_presence(stanza)
self.on_available(str(stanza.from_), stanza)
def _on_available(self, full_jid, stanza):
""" """
self._update_roster_with_presence(stanza)
self.on_available(str(stanza.from_), stanza)
def _on_unavailable(self, full_jid, stanza):
""" """
self._update_roster_with_presence(stanza)
self.on_unavailable(str(stanza.from_), stanza)
def _on_bare_unavailable(self, stanza):
""" """
self._update_roster_with_presence(stanza)
self.on_unavailable(str(stanza.from_), stanza)
def _on_changed(self, from_, stanza):
""" """
self._update_roster_with_presence(stanza)
def _on_subscribe(self, stanza):
""" """
if self.approve_all:
self.roster.approve(stanza.from_.bare())
else:
self.on_subscribe(str(stanza.from_))
def _on_subscribed(self, stanza):
""" """
self.on_subscribed(str(stanza.from_))
def _on_unsubscribe(self, stanza):
""" """
if self.approve_all:
self.client.stream.enqueue(
aioxmpp.Presence(type_=aioxmpp.structs.PresenceType.UNSUBSCRIBED,
to=stanza.from_.bare())
)
else:
self.on_unsubscribe(str(stanza.from_))
def _on_unsubscribed(self, stanza):
""" """
self.on_unsubscribed(str(stanza.from_))
def on_subscribe(self, peer_jid):
"""
Callback called when a subscribe query is received.
To ve overloaded by user.
Args:
peer_jid (str): the JID of the agent asking for subscription
"""
pass # pragma: no cover
def on_subscribed(self, peer_jid):
"""
Callback called when a subscribed message is received.
To ve overloaded by user.
Args:
peer_jid (str): the JID of the agent that accepted subscription
"""
pass # pragma: no cover
def on_unsubscribe(self, peer_jid):
"""
Callback called when an unsubscribe query is received.
To ve overloaded by user.
Args:
peer_jid (str): the JID of the agent asking for unsubscription
"""
pass # pragma: no cover
def on_unsubscribed(self, peer_jid):
"""
Callback called when an unsubscribed message is received.
To ve overloaded by user.
Args:
peer_jid (str): the JID of the agent that unsubscribed
"""
pass # pragma: no cover
def on_available(self, peer_jid, stanza):
"""
Callback called when a contact becomes available.
To ve overloaded by user.
Args:
peer_jid (str): the JID of the agent that is available
stanza (aioxmpp.Presence): The presence message containing type, show, priority and status values.
"""
pass # pragma: no cover
def on_unavailable(self, peer_jid, stanza):
"""
Callback called when a contact becomes unavailable.
To ve overloaded by user.
Args:
peer_jid (str): the JID of the agent that is unavailable
stanza (aioxmpp.Presence): The presence message containing type, show, priority and status values.
"""
pass # pragma: no cover
| 30.863492 | 108 | 0.622814 |
acdf36b8ddcbd7bb20f5f866239b4a1d3eff24c7 | 341 | py | Python | Stack/leetcode844. Backspace String Compare.py | aurora314156/leetcode | 7fb6096f9af255e46c69d83254b58b1558e082d8 | [
"MIT"
] | null | null | null | Stack/leetcode844. Backspace String Compare.py | aurora314156/leetcode | 7fb6096f9af255e46c69d83254b58b1558e082d8 | [
"MIT"
] | null | null | null | Stack/leetcode844. Backspace String Compare.py | aurora314156/leetcode | 7fb6096f9af255e46c69d83254b58b1558e082d8 | [
"MIT"
] | null | null | null | class Solution:
def backspaceCompare(self, S: str, T: str) -> bool:
def getStr(Str):
stack = []
for ss in Str:
if ss != "#":
stack.append(ss)
elif stack:
stack.pop()
return stack
return getStr(S) == getStr(T)
| 28.416667 | 55 | 0.410557 |
acdf3701b0b8fcaaa709c55cae5737ba884ac888 | 937 | py | Python | bter.py | Tulip-HFT/market-crawler | a6572459a1b6dd1609d61e01c01f197911c8b144 | [
"MIT"
] | null | null | null | bter.py | Tulip-HFT/market-crawler | a6572459a1b6dd1609d61e01c01f197911c8b144 | [
"MIT"
] | null | null | null | bter.py | Tulip-HFT/market-crawler | a6572459a1b6dd1609d61e01c01f197911c8b144 | [
"MIT"
] | null | null | null | import urllib2
import json
import interface
class Bter(interface.MarketExplorer):
def __init__(self):
pass
def exchange_name(self):
return 'bter'
def markets(self):
opener = urllib2.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0')] # silly API.
req = opener.open('http://data.bter.com/api/1/marketlist')
js = json.loads(req.read())
markets = []
for obj in js['data']:
market_str = obj['pair']
# bter logic: vol_a = float, vol_b = str
vol = obj['vol_a'] + float(obj['vol_b'].replace(',', ''))
# only add the market if it has nonzero volume. I don't know exactly what
# vol_a and vol_b are, but it seems to work.
if vol:
market = market_str.partition('_')
markets.append(self.create_market(market[0], market[2]))
return markets
| 30.225806 | 85 | 0.567769 |
acdf38008d3da351afabf07e4e1ae30ae550712c | 5,839 | py | Python | amplify/agent/containers/abstract.py | digideskio/digidesk-amplified | 547f899d6fd47dc726df28ee90bf3511f02bd6cf | [
"BSD-2-Clause"
] | null | null | null | amplify/agent/containers/abstract.py | digideskio/digidesk-amplified | 547f899d6fd47dc726df28ee90bf3511f02bd6cf | [
"BSD-2-Clause"
] | null | null | null | amplify/agent/containers/abstract.py | digideskio/digidesk-amplified | 547f899d6fd47dc726df28ee90bf3511f02bd6cf | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import time
import hashlib
import abc
from collections import defaultdict
from threading import current_thread
from gevent import queue
from amplify.agent.context import context
from amplify.agent.statsd import StatsdClient
from amplify.agent.eventd import EventdClient
from amplify.agent.metad import MetadClient
from amplify.agent.configd import ConfigdClient
from amplify.agent.util.threads import spawn
__author__ = "Mike Belov"
__copyright__ = "Copyright (C) Nginx, Inc. All rights reserved."
__credits__ = ["Mike Belov", "Andrei Belov", "Ivan Poluyanov", "Oleg Mamontov", "Andrew Alexeev"]
__license__ = ""
__maintainer__ = "Mike Belov"
__email__ = "dedm@nginx.com"
def definition_id(definition):
"""
Returns object id based on its definition
:param definition: dict with object definition
:return: md5 based on it
"""
definition_string = str(map(lambda x: u'%s:%s' % (x, definition[x]), sorted(definition.keys())))
result = hashlib.md5(definition_string).hexdigest()
return result
class AbstractContainer(object):
type = None
def __init__(self, object_configs=None):
self.objects = {}
self.object_configs = object_configs if object_configs else {}
self.intervals = context.app_config['containers'][self.type]['poll_intervals']
self.last_discover = 0
def schedule_cloud_commands(self):
"""
Reads global cloud command queue and applies commands to specific objects
"""
pass
def _discover_objects(self):
"""
Wrapper for _discover_objects - runs discovering with period
"""
if time.time() > self.last_discover + self.intervals['discover']:
self.discover_objects()
context.log.debug('%s objects: %s' % (self.type, self.objects.keys()))
def discover_objects(self):
"""
Abstract discovering method
Should be overrided in subclasses and set self.objects = {obj_id: obj_instance}
"""
pass
def stop_objects(self):
"""
Quietly stops all container objects
"""
for obj in self.objects.itervalues():
obj.stop(unregister=False)
self.objects = {}
def run_objects(self):
"""
Starts all objects
"""
for obj in self.objects.itervalues():
obj.start()
def sleep(self):
time.sleep(self.intervals['discover'])
class AbstractObject(object):
"""
Abstract object. Supervisor for collectors.
"""
type = None
def __init__(self, definition=None, data=None):
self.definition = {} if definition is None else definition
self.definition['type'] = self.type
self.id = definition_id(self.definition)
self.data = data
self.intervals = context.app_config['containers'][self.type]['poll_intervals'] or {'default': 10}
self.running = False
self.need_restart = False
self.threads = []
self.collectors = []
self.filters = []
self.queue = queue.Queue()
# data clients
self.statsd = StatsdClient(object=self, interval=max(self.intervals.values()))
self.eventd = EventdClient(object=self)
self.metad = MetadClient(object=self)
self.configd = ConfigdClient(object=self)
# register data clients
context.eventd.register(self.type, self.id, self.eventd)
context.statsd.register(self.type, self.id, self.statsd)
context.metad.register(self.type, self.id, self.metad)
context.configd.register(self.type, self.id, self.configd)
def start(self):
"""
Starts all of the object's collector threads
"""
if not self.running:
context.log.debug('starting object %s' % self.id)
for collector in self.collectors:
self.threads.append(spawn(collector.run))
self.running = True
def stop(self, unregister=True):
context.log.debug('halting object %s' % self.id)
self.running = False
if unregister:
for container in (context.statsd, context.metad, context.eventd, context.configd):
container.unregister(self.type, self.id)
context.log.debug('object %s stopped' % self.id)
class AbstractCollector(object):
"""
Abstract data collector
Runs in a thread and collects specific data
"""
short_name = None
def __init__(self, object=None, interval=None):
self.object = object
self.interval = interval
self.statsd = object.statsd
self.metad = object.metad
self.eventd = object.eventd
self.configd = object.configd
self.previous_values = defaultdict(dict) # for deltas
def run(self):
"""
Common collector cycle
1. Collect data
2. Sleep
3. Stop if object stopped
"""
current_thread().name = self.short_name
context.setup_thread_id()
try:
while True:
context.inc_action_id()
if self.object.running:
self._collect()
self._sleep()
else:
break
except:
context.log.error('%s failed' % self.object.id, exc_info=True)
raise
def _collect(self):
start_time = time.time()
try:
self.collect()
except:
raise
finally:
end_time = time.time()
context.log.debug('%s collect in %.3f' % (self.object.id, end_time - start_time))
def _sleep(self):
time.sleep(self.interval)
@abc.abstractmethod
def collect(self):
"""
Real collect method
Override it
"""
pass
| 29.94359 | 105 | 0.614146 |
acdf38d82d6d1a8a54860549e20ce07de8395bde | 802 | py | Python | objects/Karma.py | refekt/Husker-Bot | eb9a75afddf64890738f1194c088a097bd0d3777 | [
"Unlicense"
] | 4 | 2019-07-10T00:28:43.000Z | 2019-10-18T14:27:45.000Z | objects/Karma.py | refekt/Husker-Bot | eb9a75afddf64890738f1194c088a097bd0d3777 | [
"Unlicense"
] | 1 | 2019-07-11T10:21:06.000Z | 2019-07-11T10:21:06.000Z | objects/Karma.py | refekt/Husker-Bot | eb9a75afddf64890738f1194c088a097bd0d3777 | [
"Unlicense"
] | null | null | null | from typing import AnyStr
from utilities.constants import CommandError
from utilities.mysql import sqlUpdateKarma, Process_MySQL
class KarmaUser:
weight_msg = 0.25
weight_react = 1
def __init__(
self, user_id: int, user_name: AnyStr, positive: float, negative: float
):
self.user_id = user_id
self.user_name = user_name
self.positive = positive
self.negative = negative
self.total = positive * negative
def update(self, msg: bool = False, react: bool = False):
if msg and not react:
value = self.weight_msg
elif react and not msg:
value = self.weight_react
else:
raise CommandError("Unable to update karma.")
Process_MySQL(query=sqlUpdateKarma, values=value)
| 26.733333 | 79 | 0.648379 |
acdf38e7048837c62eb34cff04a2a4a26e8eb06c | 7,132 | py | Python | exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/scripts/inventory/zabbix.py | tr3ck3r/linklight | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | [
"MIT"
] | null | null | null | exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/scripts/inventory/zabbix.py | tr3ck3r/linklight | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | [
"MIT"
] | null | null | null | exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/scripts/inventory/zabbix.py | tr3ck3r/linklight | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# (c) 2013, Greg Buehler
# (c) 2018, Filippo Ferrazini
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Zabbix Server external inventory script.
========================================
Returns hosts and hostgroups from Zabbix Server.
If you want to run with --limit against a host group with space in the
name, use asterisk. For example --limit="Linux*servers".
Configuration is read from `zabbix.ini`.
Tested with Zabbix Server 2.0.6, 3.2.3 and 3.4.
"""
from __future__ import print_function
import os
import sys
import argparse
from ansible.module_utils.six.moves import configparser
try:
from zabbix_api import ZabbixAPI
except Exception:
print("Error: Zabbix API library must be installed: pip install zabbix-api.",
file=sys.stderr)
sys.exit(1)
import json
class ZabbixInventory(object):
def read_settings(self):
config = configparser.SafeConfigParser()
conf_path = './zabbix.ini'
if not os.path.exists(conf_path):
conf_path = os.path.dirname(os.path.realpath(__file__)) + '/zabbix.ini'
if os.path.exists(conf_path):
config.read(conf_path)
# server
if config.has_option('zabbix', 'server'):
self.zabbix_server = config.get('zabbix', 'server')
# login
if config.has_option('zabbix', 'username'):
self.zabbix_username = config.get('zabbix', 'username')
if config.has_option('zabbix', 'password'):
self.zabbix_password = config.get('zabbix', 'password')
# ssl certs
if config.has_option('zabbix', 'validate_certs'):
if config.get('zabbix', 'validate_certs') in ['false', 'False', False]:
self.validate_certs = False
# host inventory
if config.has_option('zabbix', 'read_host_inventory'):
if config.get('zabbix', 'read_host_inventory') in ['true', 'True', True]:
self.read_host_inventory = True
# host interface
if config.has_option('zabbix', 'use_host_interface'):
if config.get('zabbix', 'use_host_interface') in ['false', 'False', False]:
self.use_host_interface = False
def read_cli(self):
parser = argparse.ArgumentParser()
parser.add_argument('--host')
parser.add_argument('--list', action='store_true')
self.options = parser.parse_args()
def hoststub(self):
return {
'hosts': []
}
def get_host(self, api, name):
api_query = {'output': 'extend', 'selectGroups': 'extend', "filter": {"host": [name]}}
if self.use_host_interface:
api_query['selectInterfaces'] = ['useip', 'ip', 'dns']
if self.read_host_inventory:
api_query['selectInventory'] = "extend"
data = {'ansible_ssh_host': name}
if self.use_host_interface or self.read_host_inventory:
try:
hosts_data = api.host.get(api_query)[0]
if 'interfaces' in hosts_data:
# use first interface only
if hosts_data['interfaces'][0]['useip'] == 0:
data['ansible_ssh_host'] = hosts_data['interfaces'][0]['dns']
else:
data['ansible_ssh_host'] = hosts_data['interfaces'][0]['ip']
if ('inventory' in hosts_data) and (hosts_data['inventory']):
data.update(hosts_data['inventory'])
except IndexError:
# Host not found in zabbix
pass
return data
def get_list(self, api):
api_query = {'output': 'extend', 'selectGroups': 'extend'}
if self.use_host_interface:
api_query['selectInterfaces'] = ['useip', 'ip', 'dns']
if self.read_host_inventory:
api_query['selectInventory'] = "extend"
hosts_data = api.host.get(api_query)
data = {'_meta': {'hostvars': {}}}
data[self.defaultgroup] = self.hoststub()
for host in hosts_data:
hostname = host['name']
hostvars = dict()
data[self.defaultgroup]['hosts'].append(hostname)
for group in host['groups']:
groupname = group['name']
if groupname not in data:
data[groupname] = self.hoststub()
data[groupname]['hosts'].append(hostname)
if 'interfaces' in host:
# use first interface only
if host['interfaces'][0]['useip'] == 0:
hostvars['ansible_ssh_host'] = host['interfaces'][0]['dns']
else:
hostvars['ansible_ssh_host'] = host['interfaces'][0]['ip']
if ('inventory' in host) and (host['inventory']):
hostvars.update(host['inventory'])
data['_meta']['hostvars'][hostname] = hostvars
return data
def __init__(self):
self.defaultgroup = 'group_all'
self.zabbix_server = None
self.zabbix_username = None
self.zabbix_password = None
self.validate_certs = True
self.read_host_inventory = False
self.use_host_interface = True
self.meta = {}
self.read_settings()
self.read_cli()
if self.zabbix_server and self.zabbix_username:
try:
api = ZabbixAPI(server=self.zabbix_server, validate_certs=self.validate_certs)
api.login(user=self.zabbix_username, password=self.zabbix_password)
# zabbix_api tries to exit if it cannot parse what the zabbix server returned
# so we have to use SystemExit here
except (Exception, SystemExit) as e:
print("Error: Could not login to Zabbix server. Check your zabbix.ini.", file=sys.stderr)
sys.exit(1)
if self.options.host:
data = self.get_host(api, self.options.host)
print(json.dumps(data, indent=2))
elif self.options.list:
data = self.get_list(api)
print(json.dumps(data, indent=2))
else:
print("usage: --list ..OR.. --host <hostname>", file=sys.stderr)
sys.exit(1)
else:
print("Error: Configuration of server and credentials are required. See zabbix.ini.", file=sys.stderr)
sys.exit(1)
ZabbixInventory()
| 36.203046 | 114 | 0.587914 |
acdf39bc68c0ef66dc0bc5f475a5266f5a5eb63f | 4,494 | py | Python | hyphalnet/proteomics.py | Lthura/hyphalnet | 80dfb93615939181201692968d820eccf02a7ffc | [
"MIT"
] | null | null | null | hyphalnet/proteomics.py | Lthura/hyphalnet | 80dfb93615939181201692968d820eccf02a7ffc | [
"MIT"
] | null | null | null | hyphalnet/proteomics.py | Lthura/hyphalnet | 80dfb93615939181201692968d820eccf02a7ffc | [
"MIT"
] | null | null | null | import pandas as pd
import re
import numpy as np
def downloadPDCfile():
print("Not sure how this will work just yet")
def normals_from_manifest(fname):
"""
Parses PDC sample manifest
Parameters
--------
fname: chr, name of file
Return
--------
Dictionary of normal samples by disease type
"""
dat = pd.read_csv(fname, sep=',')
return dat.groupby("Disease Type")['Aliquot Submitter ID'].apply(list).to_dict()
def map_ncbi_to_gene(tdat):
""" takes a parsed file and returns dictionary of gene maps"""
tdat = tdat.loc[~tdat['Gene'].isin(list(['Mean', 'Median', 'StdDev']))]
return dict(zip(tdat['Gene'], [str(int(a)) for a in tdat['NCBIGeneID']]))
def parsePDCfile(fpath='data/CPTAC2_Breast_Prospective_Collection_BI_Proteome.tmt10.tsv'):
"""
Takes a PDC file ending in .tmt10.tsv or .itraq.tsv and creates
tidied data frame with Gene, Patient, logratio and diffFromMean values
Parameters
----------
fpath : chr, optional
DESCRIPTION. The default is 'data/CPTAC2_Breast_Prospective_Collection_BI_Proteome.tmt10.tsv'.
Return
-------
None.
"""
dat = pd.read_csv(fpath, sep='\t')
newdat = dat[['Gene', 'NCBIGeneID']]
#retrieve log ratios
pat = re.compile('.*[0-9]+\ Log Ratio')
pats = list(filter(pat.match, dat.keys()))
for pat in pats:
up_pat = pat.replace(' Log Ratio', '')
newdat[up_pat] = dat[pat]
#now tidy data by log ratio by patient
tdat = pd.melt(newdat, id_vars=['Gene', 'NCBIGeneID'],\
var_name='Patient', value_name='logratio')
return tdat
def getProtsByPatient(tdf, namemapper=None, column='logratio', quantThresh=0.01):
"""
Gets proteins from tidied data frame into dictionary for OI
Parameters
----------
tdf : Data frame
Data frame indexed by Gene with 'Patient' column representing \
patient name, and 'diffFromMean' column
Returns
-------
Dictionary of dictionaries
"""
#compute patient level quantiles
# gene_means=tdat.groupby('Gene')['logratio'].mean()
pquants = pd.DataFrame({'thresh':tdf.groupby("Patient")[column].quantile(1.0-quantThresh)})
tdat = tdf.merge(pquants, on='Patient')
if namemapper is not None:
nm = pd.DataFrame.from_dict(namemapper, orient='index', columns=['Prot'])
nm.loc[:, 'Gene'] = nm.index
tdat = tdat.merge(nm, on='Gene')
else:
tdat.rename(columns={'Gene':'Prot'}, inplace=True)
tdat = tdat.assign(topProt=tdat[column] > tdat['thresh'])
selvals = tdat[tdat['topProt']]
dprots = selvals.groupby('Patient')['Prot'].apply(list).to_dict()
dvals = selvals.groupby("Patient")[column].apply(list).to_dict()
res = {}
for k in dprots.keys():
res[k] = dict(zip(dprots[k], dvals[k]))
return res
def getTumorNorm(tdf, normSamps, namemapper=None, column='logratio', quantThresh=0.01):
"""
Gets per-patient tumor values compared to pooled normal
TODO: update to do matched normal instead
"""
tumSamps = set([a for a in tdf['Patient'] if a not in normSamps])
#separate data frame by tumor vs normal
normVals = tdf[tdf.Patient.isin(normSamps)]
tumVals = tdf[tdf.Patient.isin(tumSamps)]
#TODO: match tumor/normal samples, for now just get mean for each gene
meanVals = normVals.groupby('Gene')[column].apply(np.mean)
#subtract mean log ratio vales
tumMat = tumVals.pivot(index='Gene', columns='Patient', values=column)
diffs = tumMat.subtract(meanVals, axis=0)
diffs['Gene'] = diffs.index
fd = diffs.melt(id_vars='Gene',value_vars=tumMat.columns,\
value_name='diffsToNormal', var_name='Patient')
#now calculate absolute value to get top quantile
fd['absVal'] = np.abs(fd['diffsToNormal'])
dquants = pd.DataFrame({'thresh':fd.groupby("Patient")['absVal'].quantile(1.0-quantThresh)})
#which genes/patientss are abve that threshold
fd = fd.merge(dquants, on='Patient')
fd = fd.assign(topProt=fd['absVal'] > fd['thresh'])
selvals = fd[fd['topProt']]
dprots = selvals.groupby('Patient')['Gene'].apply(list).to_dict()
dvals = selvals.groupby("Patient")['absVal'].apply(list).to_dict() #can't do neg prizes
#return those values
res = {}
for k in dprots.keys():
res[k] = dict(zip(dprots[k], dvals[k]))
return res
| 33.044118 | 102 | 0.633956 |
acdf39f1bf42e2906071ba728b7d3a0b5a07546a | 660 | py | Python | test.py | KirtoXX/gloun_detection | 974171b707b720a74c60b632d223663b2413245b | [
"MIT"
] | null | null | null | test.py | KirtoXX/gloun_detection | 974171b707b720a74c60b632d223663b2413245b | [
"MIT"
] | null | null | null | test.py | KirtoXX/gloun_detection | 974171b707b720a74c60b632d223663b2413245b | [
"MIT"
] | null | null | null | from mxnet import gluon
import mxnet.ndarray as nd
import numpy as np
from gluoncv import model_zoo
from light_head_rcnn import My_LHRCNN
from faster_rcnn import Faster_rcnn
from gluoncv import data
import matplotlib.pyplot as plt
from gluoncv import utils
def main():
net = Faster_rcnn(pretrained_base=False)
net.load_parameters('weights/frcnn_0.pkl')
im_fname = 'data/train/img/00e93901213fb80e1fc75be43dd12f2eb93894ea.jpg'
x,orig_img = data.transforms.presets.rcnn.load_test(im_fname)
box_ids, scores, bboxes = net(x)
ax = utils.viz.plot_bbox(orig_img, bboxes, scores, box_ids)
plt.show()
if __name__ == '__main__':
main() | 30 | 76 | 0.768182 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.