code stringlengths 2k 1.04M | repo_path stringlengths 5 517 | parsed_code stringlengths 0 1.04M | quality_prob float64 0.02 0.95 | learning_prob float64 0.02 0.93 |
|---|---|---|---|---|
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import itertools
import random
class PCFG(nn.Module):
def __init__(self, nt_states, t_states):
super(PCFG, self).__init__()
self.nt_states = nt_states
self.t_states = t_states
self.states = nt_states + t_states
self.huge = 1e9
def logadd(self, x, y):
d = torch.max(x,y)
return torch.log(torch.exp(x-d) + torch.exp(y-d)) + d
def logsumexp(self, x, dim=1):
d = torch.max(x, dim)[0]
if x.dim() == 1:
return torch.log(torch.exp(x - d).sum(dim)) + d
else:
return torch.log(torch.exp(x - d.unsqueeze(dim).expand_as(x)).sum(dim)) + d
def _inside(self, unary_scores, rule_scores, root_scores):
#inside step
#unary scores : b x n x T
#rule scores : b x NT x (NT+T) x (NT+T)
#root : b x NT
# statistics
batch_size = unary_scores.size(0)
n = unary_scores.size(1)
# uses conventional python numbering scheme: [s, t] represents span [s, t)
# this scheme facilitates fast computation
# f[s, t] = logsumexp(f[s, :] * f[:, t])
self.beta = unary_scores.new(batch_size, n + 1, n + 1, self.states).fill_(-self.huge)
# initialization: f[k, k+1]
for k in range(n):
for state in range(self.t_states):
self.beta[:, k, k+1, self.nt_states + state] = unary_scores[:, k, state]
# span length w, at least 2
for w in np.arange(2, n+1):
# start point s
for s in range(n-w+1):
t = s + w
f = lambda x:torch.logsumexp(x.view(batch_size, self.nt_states, -1), dim=2)
if w == 2:
tmp = self.beta[:, s, s+1, self.nt_states:].unsqueeze(2).unsqueeze(1) \
+ self.beta[:, s+1, t, self.nt_states:].unsqueeze(1).unsqueeze(2) \
+ rule_scores[:, :, self.nt_states:, self.nt_states:]
tmp = f(tmp)
elif w == 3:
tmp1 = self.beta[:, s, s+1, self.nt_states:].unsqueeze(2).unsqueeze(1) \
+ self.beta[:, s+1, t, :self.nt_states].unsqueeze(1).unsqueeze(2) \
+ rule_scores[:, :, self.nt_states:, :self.nt_states]
tmp2 = self.beta[:, s, t-1, :self.nt_states].unsqueeze(2).unsqueeze(1) \
+ self.beta[:, t-1, t, self.nt_states:].unsqueeze(1).unsqueeze(2) \
+ rule_scores[:, :, :self.nt_states, self.nt_states:]
tmp = self.logadd(f(tmp1), f(tmp2))
elif w >= 4:
tmp1 = self.beta[:, s, s+1, self.nt_states:].unsqueeze(2).unsqueeze(1) \
+ self.beta[:, s+1, t, :self.nt_states].unsqueeze(1).unsqueeze(2) \
+ rule_scores[:, :, self.nt_states:, :self.nt_states]
tmp2 = self.beta[:, s, t-1, :self.nt_states].unsqueeze(2).unsqueeze(1) \
+ self.beta[:, t-1, t, self.nt_states:].unsqueeze(1).unsqueeze(2) \
+ rule_scores[:, :, :self.nt_states, self.nt_states:]
tmp3 = self.beta[:, s, s+2:t-1, :self.nt_states].unsqueeze(3).unsqueeze(1) \
+ self.beta[:, s+2:t-1, t, :self.nt_states].unsqueeze(1).unsqueeze(3) \
+ rule_scores[:, :, :self.nt_states, :self.nt_states].unsqueeze(2)
tmp = self.logadd(self.logadd(f(tmp1), f(tmp2)), f(tmp3))
self.beta[:, s, t, :self.nt_states] = tmp
log_Z = self.beta[:, 0, n, :self.nt_states] + root_scores
log_Z = self.logsumexp(log_Z, 1)
return log_Z
def _viterbi(self, unary_scores, rule_scores, root_scores):
#unary scores : b x n x T
#rule scores : b x NT x (NT+T) x (NT+T)
batch_size = unary_scores.size(0)
n = unary_scores.size(1)
# dummy rules
rule_scores = torch.cat([rule_scores, \
rule_scores.new(batch_size, self.t_states, self.states, self.states) \
.fill_(-self.huge)], dim=1)
self.scores = unary_scores.new(batch_size, n+1, n+1, self.states).fill_(-self.huge)
self.bp = unary_scores.new(batch_size, n+1, n+1, self.states).fill_(-1)
self.left_bp = unary_scores.new(batch_size, n+1, n+1, self.states).fill_(-1)
self.right_bp = unary_scores.new(batch_size, n+1, n+1, self.states).fill_(-1)
self.argmax = unary_scores.new(batch_size, n, n).fill_(-1)
self.argmax_tags = unary_scores.new(batch_size, n).fill_(-1)
self.spans = [[] for _ in range(batch_size)]
for k in range(n):
for state in range(self.t_states):
self.scores[:, k, k + 1, self.nt_states + state] = unary_scores[:, k, state]
for w in np.arange(2, n+1):
for s in range(n-w+1):
t = s + w
tmp = self.scores[:, s, s+1:t, :].unsqueeze(3).unsqueeze(1) \
+ self.scores[:, s+1:t, t, :].unsqueeze(1).unsqueeze(3) \
+ rule_scores.unsqueeze(2)
# view once and marginalize
tmp, max_pos = torch.max(tmp.view(batch_size, self.states, -1), dim=2)
# step by step marginalization
# tmp = self.logsumexp(tmp, dim=4)
# tmp = self.logsumexp(tmp, dim=3)
# tmp = self.logsumexp(tmp, dim=2)
max_idx = max_pos / (self.states * self.states) + s + 1
left_child = (max_pos % (self.states * self.states)) / self.states
right_child = max_pos % self.states
self.scores[:, s, t, :self.nt_states] = tmp[:, :self.nt_states]
self.bp[:, s, t, :self.nt_states] = max_idx[:, :self.nt_states]
self.left_bp[:, s, t, :self.nt_states] = left_child[:, :self.nt_states]
self.right_bp[:, s, t, :self.nt_states] = right_child[:, :self.nt_states]
max_score = self.scores[:, 0, n, :self.nt_states] + root_scores
max_score, max_idx = torch.max(max_score, 1)
for b in range(batch_size):
self._backtrack(b, 0, n, max_idx[b].item())
return self.scores, self.argmax, self.spans
def _backtrack(self, b, s, t, state):
u = int(self.bp[b][s][t][state])
assert(s < t), "s: %d, t %d"%(s, t)
left_state = int(self.left_bp[b][s][t][state])
right_state = int(self.right_bp[b][s][t][state])
self.argmax[b][s][t-1] = 1
if s == t-1:
self.spans[b].insert(0, (s, t-1, state))
self.argmax_tags[b][s] = state - self.nt_states
return None
else:
self.spans[b].insert(0, (s, t-1, state))
self._backtrack(b, s, u, left_state)
self._backtrack(b, u, t, right_state)
return None | PCFG.py |
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import itertools
import random
class PCFG(nn.Module):
def __init__(self, nt_states, t_states):
super(PCFG, self).__init__()
self.nt_states = nt_states
self.t_states = t_states
self.states = nt_states + t_states
self.huge = 1e9
def logadd(self, x, y):
d = torch.max(x,y)
return torch.log(torch.exp(x-d) + torch.exp(y-d)) + d
def logsumexp(self, x, dim=1):
d = torch.max(x, dim)[0]
if x.dim() == 1:
return torch.log(torch.exp(x - d).sum(dim)) + d
else:
return torch.log(torch.exp(x - d.unsqueeze(dim).expand_as(x)).sum(dim)) + d
def _inside(self, unary_scores, rule_scores, root_scores):
#inside step
#unary scores : b x n x T
#rule scores : b x NT x (NT+T) x (NT+T)
#root : b x NT
# statistics
batch_size = unary_scores.size(0)
n = unary_scores.size(1)
# uses conventional python numbering scheme: [s, t] represents span [s, t)
# this scheme facilitates fast computation
# f[s, t] = logsumexp(f[s, :] * f[:, t])
self.beta = unary_scores.new(batch_size, n + 1, n + 1, self.states).fill_(-self.huge)
# initialization: f[k, k+1]
for k in range(n):
for state in range(self.t_states):
self.beta[:, k, k+1, self.nt_states + state] = unary_scores[:, k, state]
# span length w, at least 2
for w in np.arange(2, n+1):
# start point s
for s in range(n-w+1):
t = s + w
f = lambda x:torch.logsumexp(x.view(batch_size, self.nt_states, -1), dim=2)
if w == 2:
tmp = self.beta[:, s, s+1, self.nt_states:].unsqueeze(2).unsqueeze(1) \
+ self.beta[:, s+1, t, self.nt_states:].unsqueeze(1).unsqueeze(2) \
+ rule_scores[:, :, self.nt_states:, self.nt_states:]
tmp = f(tmp)
elif w == 3:
tmp1 = self.beta[:, s, s+1, self.nt_states:].unsqueeze(2).unsqueeze(1) \
+ self.beta[:, s+1, t, :self.nt_states].unsqueeze(1).unsqueeze(2) \
+ rule_scores[:, :, self.nt_states:, :self.nt_states]
tmp2 = self.beta[:, s, t-1, :self.nt_states].unsqueeze(2).unsqueeze(1) \
+ self.beta[:, t-1, t, self.nt_states:].unsqueeze(1).unsqueeze(2) \
+ rule_scores[:, :, :self.nt_states, self.nt_states:]
tmp = self.logadd(f(tmp1), f(tmp2))
elif w >= 4:
tmp1 = self.beta[:, s, s+1, self.nt_states:].unsqueeze(2).unsqueeze(1) \
+ self.beta[:, s+1, t, :self.nt_states].unsqueeze(1).unsqueeze(2) \
+ rule_scores[:, :, self.nt_states:, :self.nt_states]
tmp2 = self.beta[:, s, t-1, :self.nt_states].unsqueeze(2).unsqueeze(1) \
+ self.beta[:, t-1, t, self.nt_states:].unsqueeze(1).unsqueeze(2) \
+ rule_scores[:, :, :self.nt_states, self.nt_states:]
tmp3 = self.beta[:, s, s+2:t-1, :self.nt_states].unsqueeze(3).unsqueeze(1) \
+ self.beta[:, s+2:t-1, t, :self.nt_states].unsqueeze(1).unsqueeze(3) \
+ rule_scores[:, :, :self.nt_states, :self.nt_states].unsqueeze(2)
tmp = self.logadd(self.logadd(f(tmp1), f(tmp2)), f(tmp3))
self.beta[:, s, t, :self.nt_states] = tmp
log_Z = self.beta[:, 0, n, :self.nt_states] + root_scores
log_Z = self.logsumexp(log_Z, 1)
return log_Z
def _viterbi(self, unary_scores, rule_scores, root_scores):
#unary scores : b x n x T
#rule scores : b x NT x (NT+T) x (NT+T)
batch_size = unary_scores.size(0)
n = unary_scores.size(1)
# dummy rules
rule_scores = torch.cat([rule_scores, \
rule_scores.new(batch_size, self.t_states, self.states, self.states) \
.fill_(-self.huge)], dim=1)
self.scores = unary_scores.new(batch_size, n+1, n+1, self.states).fill_(-self.huge)
self.bp = unary_scores.new(batch_size, n+1, n+1, self.states).fill_(-1)
self.left_bp = unary_scores.new(batch_size, n+1, n+1, self.states).fill_(-1)
self.right_bp = unary_scores.new(batch_size, n+1, n+1, self.states).fill_(-1)
self.argmax = unary_scores.new(batch_size, n, n).fill_(-1)
self.argmax_tags = unary_scores.new(batch_size, n).fill_(-1)
self.spans = [[] for _ in range(batch_size)]
for k in range(n):
for state in range(self.t_states):
self.scores[:, k, k + 1, self.nt_states + state] = unary_scores[:, k, state]
for w in np.arange(2, n+1):
for s in range(n-w+1):
t = s + w
tmp = self.scores[:, s, s+1:t, :].unsqueeze(3).unsqueeze(1) \
+ self.scores[:, s+1:t, t, :].unsqueeze(1).unsqueeze(3) \
+ rule_scores.unsqueeze(2)
# view once and marginalize
tmp, max_pos = torch.max(tmp.view(batch_size, self.states, -1), dim=2)
# step by step marginalization
# tmp = self.logsumexp(tmp, dim=4)
# tmp = self.logsumexp(tmp, dim=3)
# tmp = self.logsumexp(tmp, dim=2)
max_idx = max_pos / (self.states * self.states) + s + 1
left_child = (max_pos % (self.states * self.states)) / self.states
right_child = max_pos % self.states
self.scores[:, s, t, :self.nt_states] = tmp[:, :self.nt_states]
self.bp[:, s, t, :self.nt_states] = max_idx[:, :self.nt_states]
self.left_bp[:, s, t, :self.nt_states] = left_child[:, :self.nt_states]
self.right_bp[:, s, t, :self.nt_states] = right_child[:, :self.nt_states]
max_score = self.scores[:, 0, n, :self.nt_states] + root_scores
max_score, max_idx = torch.max(max_score, 1)
for b in range(batch_size):
self._backtrack(b, 0, n, max_idx[b].item())
return self.scores, self.argmax, self.spans
def _backtrack(self, b, s, t, state):
u = int(self.bp[b][s][t][state])
assert(s < t), "s: %d, t %d"%(s, t)
left_state = int(self.left_bp[b][s][t][state])
right_state = int(self.right_bp[b][s][t][state])
self.argmax[b][s][t-1] = 1
if s == t-1:
self.spans[b].insert(0, (s, t-1, state))
self.argmax_tags[b][s] = state - self.nt_states
return None
else:
self.spans[b].insert(0, (s, t-1, state))
self._backtrack(b, s, u, left_state)
self._backtrack(b, u, t, right_state)
return None | 0.767429 | 0.581244 |
from __future__ import print_function
from django.conf import settings
from django.test.client import Client
from django_medusa.log import get_logger, finalize_logger
import mimetypes
import os
__all__ = ['COMMON_MIME_MAPS', 'BaseStaticSiteRenderer']
# Since mimetypes.get_extension() gets the "first known" (alphabetically),
# we get supid behavior like "text/plain" mapping to ".bat". This list
# overrides some file types we will surely use, to eliminate a call to
# mimetypes.get_extension() except in unusual cases.
COMMON_MIME_MAPS = {
"text/plain": ".txt",
"text/html": ".html",
"text/javascript": ".js",
"application/javascript": ".js",
"text/json": ".json",
"application/json": ".json",
"text/css": ".css",
}
class RenderError(Exception):
"""
Exception thrown during a rendering error.
"""
pass
class BaseStaticSiteRenderer(object):
"""
This default renderer writes the given URLs (defined in get_paths())
into static files on the filesystem by getting the view's response
through the Django testclient.
"""
def __init__(self):
self.client = None
@classmethod
def initialize_output(cls):
"""
Things that should be done only once to the output directory BEFORE
rendering occurs (i.e. setting up a config file, creating dirs,
creating an external resource, starting an atomic deploy, etc.)
Management command calls this once before iterating over all
renderer instances.
"""
# Store logger on BaseStaticSiteRenderer so that all derivative classes
# can access this instance.
BaseStaticSiteRenderer.logger = get_logger()
@classmethod
def finalize_output(cls):
"""
Things that should be done only once to the output directory AFTER
rendering occurs (i.e. writing end of config file, setting up
permissions, calling an external "deploy" method, finalizing an
atomic deploy, etc.)
Management command calls this once after iterating over all
renderer instances.
"""
finalize_logger()
BaseStaticSiteRenderer.logger = None
def get_paths(self):
""" Override this in a subclass to define the URLs to process """
raise NotImplementedError
@property
def paths(self):
""" Property that memoizes get_paths. """
p = getattr(self, "_paths", None)
if not p:
p = self.get_paths()
self._paths = p
return p
def _render(self, path=None, view=None):
client = self.client
if not client:
client = Client()
response = client.get(path)
if response.status_code != 200:
raise RenderError(
"Path {0} did not return status 200".format(path))
return response
@classmethod
def get_outpath(cls, path, content_type):
# Get non-absolute path
path = path[1:] if path.startswith('/') else path
# Resolves to a file, not a directory
if not path.endswith('/'):
return path
return os.path.join(path, cls.get_dirsuffix(content_type))
@classmethod
def get_dirsuffix(cls, content_type):
mime = content_type.split(';', 1)[0]
return ('index' +
(COMMON_MIME_MAPS.get(mime, mimetypes.guess_extension(mime)) or
'.html'))
def render_path(self, path=None, view=None):
raise NotImplementedError
def generate(self):
arglist = ((path, None) for path in self.paths)
if getattr(settings, "MEDUSA_MULTITHREAD", False):
from multiprocessing import Pool, cpu_count, Queue
generator = PageGenerator(self)
self.logger.info("Generating with up to %s processes...",
cpu_count())
pool = Pool(cpu_count())
retval = pool.map(generator, arglist, chunksize=1)
pool.close()
else:
self.client = Client()
generator = PageGenerator(self)
retval = map(generator, arglist)
return retval
class PageGenerator(object):
"""
Helper class to bounce things back into the renderer instance, since
multiprocessing is unable to transfer a bound method object into a pickle.
"""
def __init__(self, renderer):
self.renderer = renderer
def __call__(self, args):
path = args[0]
logger = self.renderer.logger
try:
logger.info("Generating %s...", path)
retval = self.renderer.render_path(*args)
logger.info("Generated %s successfully", path)
return retval
except:
logger.error("Could not generate %s", path, exc_info=True) | django_medusa/renderers/base.py | from __future__ import print_function
from django.conf import settings
from django.test.client import Client
from django_medusa.log import get_logger, finalize_logger
import mimetypes
import os
__all__ = ['COMMON_MIME_MAPS', 'BaseStaticSiteRenderer']
# Since mimetypes.get_extension() gets the "first known" (alphabetically),
# we get supid behavior like "text/plain" mapping to ".bat". This list
# overrides some file types we will surely use, to eliminate a call to
# mimetypes.get_extension() except in unusual cases.
COMMON_MIME_MAPS = {
"text/plain": ".txt",
"text/html": ".html",
"text/javascript": ".js",
"application/javascript": ".js",
"text/json": ".json",
"application/json": ".json",
"text/css": ".css",
}
class RenderError(Exception):
"""
Exception thrown during a rendering error.
"""
pass
class BaseStaticSiteRenderer(object):
"""
This default renderer writes the given URLs (defined in get_paths())
into static files on the filesystem by getting the view's response
through the Django testclient.
"""
def __init__(self):
self.client = None
@classmethod
def initialize_output(cls):
"""
Things that should be done only once to the output directory BEFORE
rendering occurs (i.e. setting up a config file, creating dirs,
creating an external resource, starting an atomic deploy, etc.)
Management command calls this once before iterating over all
renderer instances.
"""
# Store logger on BaseStaticSiteRenderer so that all derivative classes
# can access this instance.
BaseStaticSiteRenderer.logger = get_logger()
@classmethod
def finalize_output(cls):
"""
Things that should be done only once to the output directory AFTER
rendering occurs (i.e. writing end of config file, setting up
permissions, calling an external "deploy" method, finalizing an
atomic deploy, etc.)
Management command calls this once after iterating over all
renderer instances.
"""
finalize_logger()
BaseStaticSiteRenderer.logger = None
def get_paths(self):
""" Override this in a subclass to define the URLs to process """
raise NotImplementedError
@property
def paths(self):
""" Property that memoizes get_paths. """
p = getattr(self, "_paths", None)
if not p:
p = self.get_paths()
self._paths = p
return p
def _render(self, path=None, view=None):
client = self.client
if not client:
client = Client()
response = client.get(path)
if response.status_code != 200:
raise RenderError(
"Path {0} did not return status 200".format(path))
return response
@classmethod
def get_outpath(cls, path, content_type):
# Get non-absolute path
path = path[1:] if path.startswith('/') else path
# Resolves to a file, not a directory
if not path.endswith('/'):
return path
return os.path.join(path, cls.get_dirsuffix(content_type))
@classmethod
def get_dirsuffix(cls, content_type):
mime = content_type.split(';', 1)[0]
return ('index' +
(COMMON_MIME_MAPS.get(mime, mimetypes.guess_extension(mime)) or
'.html'))
def render_path(self, path=None, view=None):
raise NotImplementedError
def generate(self):
arglist = ((path, None) for path in self.paths)
if getattr(settings, "MEDUSA_MULTITHREAD", False):
from multiprocessing import Pool, cpu_count, Queue
generator = PageGenerator(self)
self.logger.info("Generating with up to %s processes...",
cpu_count())
pool = Pool(cpu_count())
retval = pool.map(generator, arglist, chunksize=1)
pool.close()
else:
self.client = Client()
generator = PageGenerator(self)
retval = map(generator, arglist)
return retval
class PageGenerator(object):
"""
Helper class to bounce things back into the renderer instance, since
multiprocessing is unable to transfer a bound method object into a pickle.
"""
def __init__(self, renderer):
self.renderer = renderer
def __call__(self, args):
path = args[0]
logger = self.renderer.logger
try:
logger.info("Generating %s...", path)
retval = self.renderer.render_path(*args)
logger.info("Generated %s successfully", path)
return retval
except:
logger.error("Could not generate %s", path, exc_info=True) | 0.669313 | 0.12847 |
import subprocess
import os
import sys
def saferun(cmd, debug=True):
try:
if debug:
color = 3
cmdline = " ".join(cmd)
print('\033[1;3{}m{}\033[0m'.format(color, cmdline))
return subprocess.check_output(cmd).strip().decode("utf-8")
except:
return None
def serviceNameGet():
with open("msa.cfg", "rt") as f:
for line in f.readlines():
if line.startswith("s:/ms/name="):
serviceName = line[11:].strip()
return serviceName
return None
def currentDir():
return os.path.realpath(os.getcwd())
def msbIPAddress():
return saferun(("sudo", "docker", "inspect", "--format", "{{ .NetworkSettings.IPAddress }}", "msb"))
def volumeGet(container):
return saferun(("sudo", "docker", "inspect", "--format", "{{ .Config.Labels.VOLUME }}", container))
def dockerGateway():
cmd = ("sudo", "docker", "network", "inspect", "bridge", "--format", '{{(index .IPAM.Config 0).Gateway}}')
return saferun(cmd)
def dockerRun(name, pwd, msbIP, backrun):
cmd = ["sudo", "docker", "run", "-it", "--name", name]
if backrun:
cmd.extend(["-d"])
cmd.extend(["-v", "/tmp/.conf.%s:/tmp/conf" % name])
cmd.extend(["-v", "%s:/root/ms" % pwd])
cmd.extend(["-e", "MSBHOST=%s" % msbIP])
cmd.extend(["-e", "DOCKER_GATEWAY=%s" % dockerGateway()])
for k,v in os.environ.items():
# -v : MHV_aaa=bbb => -v "aaa:bbb"
# -e : MHE_aaa=bbb => -e "aaa=bbb"
if k.startswith("MHV_"):
cmd.extend(["-v", "%s:%s" % (k[4:], v)])
if k.startswith("MHE_"):
cmd.extend(["-e", "%s=%s" % (k[4:], v)])
volumeMap = volumeGet(name)
if volumeMap and volumeMap[0] != "<":
cmd.extend(["-v", volumeMap])
cmd.extend(["msa"])
return saferun(cmd)
def dockerKill(name):
saferun(("sudo", "docker", "stop", name))
saferun(("sudo", "docker", "rm", name))
def main():
if "--help" in sys.argv:
print("1. Fetch the MSB's IPAddress and set to the container")
print("2. Get the container name from ./msa.cfg")
print("3. Share current directory to container's /root/ms")
print("4. Run the docker container")
print("Usage: msahere.py [-k:kill] [-b:backrun]")
return
name = serviceNameGet()
pwd = currentDir()
msbIP = msbIPAddress()
backrun = "-b" in sys.argv
killold = "-k" in sys.argv
if killold:
dockerKill(name)
name = name + ".localrun"
dockerRun(name, pwd, msbIP, backrun)
if __name__ == "__main__":
main() | tools/msahere.py |
import subprocess
import os
import sys
def saferun(cmd, debug=True):
try:
if debug:
color = 3
cmdline = " ".join(cmd)
print('\033[1;3{}m{}\033[0m'.format(color, cmdline))
return subprocess.check_output(cmd).strip().decode("utf-8")
except:
return None
def serviceNameGet():
with open("msa.cfg", "rt") as f:
for line in f.readlines():
if line.startswith("s:/ms/name="):
serviceName = line[11:].strip()
return serviceName
return None
def currentDir():
return os.path.realpath(os.getcwd())
def msbIPAddress():
return saferun(("sudo", "docker", "inspect", "--format", "{{ .NetworkSettings.IPAddress }}", "msb"))
def volumeGet(container):
return saferun(("sudo", "docker", "inspect", "--format", "{{ .Config.Labels.VOLUME }}", container))
def dockerGateway():
cmd = ("sudo", "docker", "network", "inspect", "bridge", "--format", '{{(index .IPAM.Config 0).Gateway}}')
return saferun(cmd)
def dockerRun(name, pwd, msbIP, backrun):
cmd = ["sudo", "docker", "run", "-it", "--name", name]
if backrun:
cmd.extend(["-d"])
cmd.extend(["-v", "/tmp/.conf.%s:/tmp/conf" % name])
cmd.extend(["-v", "%s:/root/ms" % pwd])
cmd.extend(["-e", "MSBHOST=%s" % msbIP])
cmd.extend(["-e", "DOCKER_GATEWAY=%s" % dockerGateway()])
for k,v in os.environ.items():
# -v : MHV_aaa=bbb => -v "aaa:bbb"
# -e : MHE_aaa=bbb => -e "aaa=bbb"
if k.startswith("MHV_"):
cmd.extend(["-v", "%s:%s" % (k[4:], v)])
if k.startswith("MHE_"):
cmd.extend(["-e", "%s=%s" % (k[4:], v)])
volumeMap = volumeGet(name)
if volumeMap and volumeMap[0] != "<":
cmd.extend(["-v", volumeMap])
cmd.extend(["msa"])
return saferun(cmd)
def dockerKill(name):
saferun(("sudo", "docker", "stop", name))
saferun(("sudo", "docker", "rm", name))
def main():
if "--help" in sys.argv:
print("1. Fetch the MSB's IPAddress and set to the container")
print("2. Get the container name from ./msa.cfg")
print("3. Share current directory to container's /root/ms")
print("4. Run the docker container")
print("Usage: msahere.py [-k:kill] [-b:backrun]")
return
name = serviceNameGet()
pwd = currentDir()
msbIP = msbIPAddress()
backrun = "-b" in sys.argv
killold = "-k" in sys.argv
if killold:
dockerKill(name)
name = name + ".localrun"
dockerRun(name, pwd, msbIP, backrun)
if __name__ == "__main__":
main() | 0.210523 | 0.094218 |
from typing import Optional
import torch as _torch
def mask_padded_values(xs: _torch.FloatTensor, n: _torch.LongTensor,
mask_value: float = -float('inf'),
mutate: bool = False):
"""Turns padded values into given mask value.
Args:
xs: A tensor of size (batch_size, list_size, 1) containing padded
values.
n: A tensor of size (batch_size) containing list size of each query.
mask_value: The value to mask with (default: -inf).
mutate: Whether to mutate the values of xs or return a copy.
"""
mask = _torch.repeat_interleave(
_torch.arange(xs.shape[1], device=xs.device).reshape((1, xs.shape[1])),
xs.shape[0], dim=0)
n_mask = _torch.repeat_interleave(
n.reshape((n.shape[0], 1)), xs.shape[1], dim=1)
if not mutate:
xs = xs.clone()
xs[mask >= n_mask] = mask_value
return xs
def tiebreak_argsort(
x: _torch.FloatTensor,
descending: bool = True,
generator: Optional[_torch.Generator] = None) -> _torch.LongTensor:
"""Computes a per-row argsort of matrix x with random tiebreaks.
Args:
x: A 2D tensor where each row will be argsorted.
descending: Whether to sort in descending order.
Returns:
A 2D tensor of the same size as x, where each row is the argsort of x,
with ties broken randomly.
"""
rng_kwargs = {"generator": generator} if generator is not None else {}
p = _torch.randperm(x.shape[1], device=x.device, **rng_kwargs)
return p[_torch.argsort(x[:, p], descending=descending)]
def rank_by_score(
scores: _torch.FloatTensor,
n: _torch.LongTensor,
generator: Optional[_torch.Generator] = None) -> _torch.LongTensor:
"""Sorts scores in decreasing order.
This method ensures that padded documents are placed last and ties are
broken randomly.
Args:
scores: A tensor of size (batch_size, list_size, 1) or
(batch_size, list_size) containing scores.
n: A tensor of size (batch_size) containing list size of each query.
"""
if scores.dim() == 3:
scores = scores.reshape((scores.shape[0], scores.shape[1]))
return tiebreak_argsort(mask_padded_values(scores, n), generator=generator)
def rank_by_plackettluce(
scores: _torch.FloatTensor, n: _torch.LongTensor,
generator: Optional[_torch.Generator] = None) -> _torch.LongTensor:
"""Samples a ranking from a plackett luce distribution.
This method ensures that padded documents are placed last.
Args:
scores: A tensor of size (batch_size, list_size, 1) or
(batch_size, list_size) containing scores.
n: A tensor of size (batch_size) containing list size of each query.
"""
if scores.dim() == 3:
scores = scores.reshape((scores.shape[0], scores.shape[1]))
masked_scores = mask_padded_values(scores, n)
# This implementation uses reservoir sampling, which comes down to doing
# Uniform(0, 1) ^ (1 / p) and then sorting by the resulting values. The
# following implementation is a numerically stable variant that operates in
# log-space.
log_p = _torch.nn.LogSoftmax(dim=1)(masked_scores)
rng_kwargs = {"generator": generator} if generator is not None else {}
u = _torch.rand(log_p.shape, device=scores.device, **rng_kwargs)
r = _torch.log(-_torch.log(u)) - log_p
return tiebreak_argsort(r, descending=False, generator=generator)
def batch_pairs(x: _torch.Tensor) -> _torch.Tensor:
"""Returns a pair matrix
This matrix contains all pairs (i, j) as follows:
p[_, i, j, 0] = x[_, i]
p[_, i, j, 1] = x[_, j]
Args:
x: The input batch of dimension (batch_size, list_size) or
(batch_size, list_size, 1).
Returns:
Two tensors of size (batch_size, list_size ^ 2, 2) containing
all pairs.
"""
if x.dim() == 2:
x = x.reshape((x.shape[0], x.shape[1], 1))
# Construct broadcasted x_{:,i,0...list_size}
x_ij = _torch.repeat_interleave(x, x.shape[1], dim=2)
# Construct broadcasted x_{:,0...list_size,i}
x_ji = _torch.repeat_interleave(x.permute(0, 2, 1), x.shape[1], dim=1)
return _torch.stack([x_ij, x_ji], dim=3) | pytorchltr/utils/tensor_operations.py | from typing import Optional
import torch as _torch
def mask_padded_values(xs: _torch.FloatTensor, n: _torch.LongTensor,
mask_value: float = -float('inf'),
mutate: bool = False):
"""Turns padded values into given mask value.
Args:
xs: A tensor of size (batch_size, list_size, 1) containing padded
values.
n: A tensor of size (batch_size) containing list size of each query.
mask_value: The value to mask with (default: -inf).
mutate: Whether to mutate the values of xs or return a copy.
"""
mask = _torch.repeat_interleave(
_torch.arange(xs.shape[1], device=xs.device).reshape((1, xs.shape[1])),
xs.shape[0], dim=0)
n_mask = _torch.repeat_interleave(
n.reshape((n.shape[0], 1)), xs.shape[1], dim=1)
if not mutate:
xs = xs.clone()
xs[mask >= n_mask] = mask_value
return xs
def tiebreak_argsort(
x: _torch.FloatTensor,
descending: bool = True,
generator: Optional[_torch.Generator] = None) -> _torch.LongTensor:
"""Computes a per-row argsort of matrix x with random tiebreaks.
Args:
x: A 2D tensor where each row will be argsorted.
descending: Whether to sort in descending order.
Returns:
A 2D tensor of the same size as x, where each row is the argsort of x,
with ties broken randomly.
"""
rng_kwargs = {"generator": generator} if generator is not None else {}
p = _torch.randperm(x.shape[1], device=x.device, **rng_kwargs)
return p[_torch.argsort(x[:, p], descending=descending)]
def rank_by_score(
scores: _torch.FloatTensor,
n: _torch.LongTensor,
generator: Optional[_torch.Generator] = None) -> _torch.LongTensor:
"""Sorts scores in decreasing order.
This method ensures that padded documents are placed last and ties are
broken randomly.
Args:
scores: A tensor of size (batch_size, list_size, 1) or
(batch_size, list_size) containing scores.
n: A tensor of size (batch_size) containing list size of each query.
"""
if scores.dim() == 3:
scores = scores.reshape((scores.shape[0], scores.shape[1]))
return tiebreak_argsort(mask_padded_values(scores, n), generator=generator)
def rank_by_plackettluce(
scores: _torch.FloatTensor, n: _torch.LongTensor,
generator: Optional[_torch.Generator] = None) -> _torch.LongTensor:
"""Samples a ranking from a plackett luce distribution.
This method ensures that padded documents are placed last.
Args:
scores: A tensor of size (batch_size, list_size, 1) or
(batch_size, list_size) containing scores.
n: A tensor of size (batch_size) containing list size of each query.
"""
if scores.dim() == 3:
scores = scores.reshape((scores.shape[0], scores.shape[1]))
masked_scores = mask_padded_values(scores, n)
# This implementation uses reservoir sampling, which comes down to doing
# Uniform(0, 1) ^ (1 / p) and then sorting by the resulting values. The
# following implementation is a numerically stable variant that operates in
# log-space.
log_p = _torch.nn.LogSoftmax(dim=1)(masked_scores)
rng_kwargs = {"generator": generator} if generator is not None else {}
u = _torch.rand(log_p.shape, device=scores.device, **rng_kwargs)
r = _torch.log(-_torch.log(u)) - log_p
return tiebreak_argsort(r, descending=False, generator=generator)
def batch_pairs(x: _torch.Tensor) -> _torch.Tensor:
"""Returns a pair matrix
This matrix contains all pairs (i, j) as follows:
p[_, i, j, 0] = x[_, i]
p[_, i, j, 1] = x[_, j]
Args:
x: The input batch of dimension (batch_size, list_size) or
(batch_size, list_size, 1).
Returns:
Two tensors of size (batch_size, list_size ^ 2, 2) containing
all pairs.
"""
if x.dim() == 2:
x = x.reshape((x.shape[0], x.shape[1], 1))
# Construct broadcasted x_{:,i,0...list_size}
x_ij = _torch.repeat_interleave(x, x.shape[1], dim=2)
# Construct broadcasted x_{:,0...list_size,i}
x_ji = _torch.repeat_interleave(x.permute(0, 2, 1), x.shape[1], dim=1)
return _torch.stack([x_ij, x_ji], dim=3) | 0.963205 | 0.722405 |
from tests.integration import asserts
def test_provider_user_can_be_created(provider_account_user, provider_account_params):
asserts.assert_resource(provider_account_user)
asserts.assert_resource_params(provider_account_user, provider_account_params)
def test_provider_user_list(api):
accounts = api.provider_accounts.list()
assert len(accounts) > 0
def test_provider_user_can_be_read(api, provider_account_user, provider_account_params):
account = api.provider_account_users.read(provider_account_user.entity_id)
asserts.assert_resource(account)
asserts.assert_resource_params(account, provider_account_params)
def test_resource_role_change(provider_account_user):
assert provider_account_user['role'] == 'member'
updated = provider_account_user.set_role_admin()
assert updated['role'] == 'admin'
def test_api_role_change(api, provider_account_user):
assert provider_account_user['role'] == 'member'
updated = api.provider_account_users.set_role_admin(provider_account_user.entity_id)
assert updated['role'] == 'admin'
def test_api_read_permissions(api, provider_account_user):
provider_account_user.set_role_admin()
response = api.provider_account_users.permissions_read(provider_account_user.entity_id)
permissions = response['permissions']
assert 'portal' in permissions['allowed_sections']
def test_resource_read_permissions(provider_account_user):
provider_account_user.set_role_admin()
response = provider_account_user.permissions_read()
permissions = response['permissions']
assert 'portal' in permissions['allowed_sections']
def test_resource_update_permissions(service, provider_account_user):
provider_account_user.set_role_member()
response = provider_account_user.permissions_update()
permissions = response['permissions']
assert 'portal' not in permissions['allowed_sections']
assert service['id'] not in permissions['allowed_service_ids']
response = provider_account_user.permissions_update(
allowed_services=[service['id']], allowed_sections=['portal'])
permissions = response['permissions']
assert 'portal' in permissions['allowed_sections']
assert service['id'] in permissions['allowed_service_ids'] | tests/integration/test_integration_provider_account_users.py | from tests.integration import asserts
def test_provider_user_can_be_created(provider_account_user, provider_account_params):
asserts.assert_resource(provider_account_user)
asserts.assert_resource_params(provider_account_user, provider_account_params)
def test_provider_user_list(api):
accounts = api.provider_accounts.list()
assert len(accounts) > 0
def test_provider_user_can_be_read(api, provider_account_user, provider_account_params):
account = api.provider_account_users.read(provider_account_user.entity_id)
asserts.assert_resource(account)
asserts.assert_resource_params(account, provider_account_params)
def test_resource_role_change(provider_account_user):
assert provider_account_user['role'] == 'member'
updated = provider_account_user.set_role_admin()
assert updated['role'] == 'admin'
def test_api_role_change(api, provider_account_user):
assert provider_account_user['role'] == 'member'
updated = api.provider_account_users.set_role_admin(provider_account_user.entity_id)
assert updated['role'] == 'admin'
def test_api_read_permissions(api, provider_account_user):
provider_account_user.set_role_admin()
response = api.provider_account_users.permissions_read(provider_account_user.entity_id)
permissions = response['permissions']
assert 'portal' in permissions['allowed_sections']
def test_resource_read_permissions(provider_account_user):
provider_account_user.set_role_admin()
response = provider_account_user.permissions_read()
permissions = response['permissions']
assert 'portal' in permissions['allowed_sections']
def test_resource_update_permissions(service, provider_account_user):
provider_account_user.set_role_member()
response = provider_account_user.permissions_update()
permissions = response['permissions']
assert 'portal' not in permissions['allowed_sections']
assert service['id'] not in permissions['allowed_service_ids']
response = provider_account_user.permissions_update(
allowed_services=[service['id']], allowed_sections=['portal'])
permissions = response['permissions']
assert 'portal' in permissions['allowed_sections']
assert service['id'] in permissions['allowed_service_ids'] | 0.647352 | 0.623234 |
from copy import deepcopy
from typing import Any
from typing import Dict, Mapping, TypeVar, Union
T = TypeVar("T", bound=Dict[str, Any])
def merge_configs(
base_config: T, new_config_layer: Mapping[str, Any], path_from_root: str = "$.", inplace: bool = False
) -> T:
"""
Merges two dictionaries recursively.
In case of conflict, updates the values in base_config by the ones provided in new_config_layer.
The returned object is of the same type as base_config, allowing to use subclasses of dict for specific
use cases.
This function is based on a (very) similar function in the contracts codebase.
:param base_config: Base config dictionary.
:param new_config_layer: New config dictionary.
:param path_from_root: Current path inside the dictionary. Used for logging of errors as this function is recursive.
:param inplace: Whether the base_config dictionary should be updated in place.
:return: The merged configuration dictionary. This object is of the same type as base_config and is actually
the same object as base_config if the inplace flag is set to True.
"""
dict_type = type(base_config)
new_config = base_config if inplace else deepcopy(base_config)
for k, v in new_config_layer.items():
if isinstance(v, dict):
if k not in new_config:
new_config[k] = dict_type()
elif not isinstance(new_config[k], dict):
raise Exception(
f"Invalid config merge at node {path_from_root}{k}. Base layer is not a dict, but "
f"was overwritten by a dictionary."
)
new_config[k] = merge_configs(
dict_type(**new_config[k]), v, path_from_root=path_from_root + k + ".", inplace=True
)
else:
if k in new_config and isinstance(new_config[k], dict):
raise Exception(
f"Invalid config merge at node {path_from_root}{k}. Base layer is a dict, but "
f"was overwritten by a non-dictionary."
)
else:
new_config[k] = new_config_layer[k]
return new_config
def convert_to_bool(value_str: str) -> bool:
"""
Converts string values to their boolean equivalents.
:param value_str: Value string.
:return: The boolean value represented by `value_str`, if any.
:raise: A ValueError if it is not possible to convert the value string to bool.
"""
conversion_dict = {"true": True, "t": True, "y": True, "false": False, "f": False, "n": False}
try:
return conversion_dict[value_str.lower()]
except KeyError as e:
raise ValueError(f"Not a boolean value: {value_str}") from e
def convert_str_value(value_str: str) -> Union[str, int, float, bool]:
"""
Attempts to convert a string value to the most logical type.
Supported types are bool, float and int.
:param value_str: Value to convert, in string format.
:return: The value in its real type, or as a string if no conversion is possible.
"""
try:
return convert_to_bool(value_str)
except ValueError:
pass
try:
return float(value_str)
except ValueError:
pass
try:
return int(value_str)
except ValueError:
return value_str
class ConfigDict(dict):
"""
Dictionary that allows access to its values by attribute for convenience.
"""
def __getattr__(self, item: str) -> Any:
try:
return self[item]
except KeyError as e:
raise AttributeError(f"No such configuration variable or namespace: '{item}'") from e
def cast_values(self) -> None:
"""
Attempts to convert the leaf values of the dictionary to their real type.
"""
for k, v in self.items():
if isinstance(v, ConfigDict):
v.cast_values()
elif isinstance(v, str):
self[k] = convert_str_value(v) | conflex/config_dict.py | from copy import deepcopy
from typing import Any
from typing import Dict, Mapping, TypeVar, Union
T = TypeVar("T", bound=Dict[str, Any])
def merge_configs(
base_config: T, new_config_layer: Mapping[str, Any], path_from_root: str = "$.", inplace: bool = False
) -> T:
"""
Merges two dictionaries recursively.
In case of conflict, updates the values in base_config by the ones provided in new_config_layer.
The returned object is of the same type as base_config, allowing to use subclasses of dict for specific
use cases.
This function is based on a (very) similar function in the contracts codebase.
:param base_config: Base config dictionary.
:param new_config_layer: New config dictionary.
:param path_from_root: Current path inside the dictionary. Used for logging of errors as this function is recursive.
:param inplace: Whether the base_config dictionary should be updated in place.
:return: The merged configuration dictionary. This object is of the same type as base_config and is actually
the same object as base_config if the inplace flag is set to True.
"""
dict_type = type(base_config)
new_config = base_config if inplace else deepcopy(base_config)
for k, v in new_config_layer.items():
if isinstance(v, dict):
if k not in new_config:
new_config[k] = dict_type()
elif not isinstance(new_config[k], dict):
raise Exception(
f"Invalid config merge at node {path_from_root}{k}. Base layer is not a dict, but "
f"was overwritten by a dictionary."
)
new_config[k] = merge_configs(
dict_type(**new_config[k]), v, path_from_root=path_from_root + k + ".", inplace=True
)
else:
if k in new_config and isinstance(new_config[k], dict):
raise Exception(
f"Invalid config merge at node {path_from_root}{k}. Base layer is a dict, but "
f"was overwritten by a non-dictionary."
)
else:
new_config[k] = new_config_layer[k]
return new_config
def convert_to_bool(value_str: str) -> bool:
"""
Converts string values to their boolean equivalents.
:param value_str: Value string.
:return: The boolean value represented by `value_str`, if any.
:raise: A ValueError if it is not possible to convert the value string to bool.
"""
conversion_dict = {"true": True, "t": True, "y": True, "false": False, "f": False, "n": False}
try:
return conversion_dict[value_str.lower()]
except KeyError as e:
raise ValueError(f"Not a boolean value: {value_str}") from e
def convert_str_value(value_str: str) -> Union[str, int, float, bool]:
"""
Attempts to convert a string value to the most logical type.
Supported types are bool, float and int.
:param value_str: Value to convert, in string format.
:return: The value in its real type, or as a string if no conversion is possible.
"""
try:
return convert_to_bool(value_str)
except ValueError:
pass
try:
return float(value_str)
except ValueError:
pass
try:
return int(value_str)
except ValueError:
return value_str
class ConfigDict(dict):
"""
Dictionary that allows access to its values by attribute for convenience.
"""
def __getattr__(self, item: str) -> Any:
try:
return self[item]
except KeyError as e:
raise AttributeError(f"No such configuration variable or namespace: '{item}'") from e
def cast_values(self) -> None:
"""
Attempts to convert the leaf values of the dictionary to their real type.
"""
for k, v in self.items():
if isinstance(v, ConfigDict):
v.cast_values()
elif isinstance(v, str):
self[k] = convert_str_value(v) | 0.875268 | 0.344085 |
from PyQt5 import QtWidgets, uic, QtCore, QtGui
from ui.utils.dialogs import *
from ui.utils.radiomics import *
import sys
class initUI(QtWidgets.QMainWindow):
features = ['first_order', 'glcm', 'gldm', 'glrlm', 'glszm', 'ngtdm', 'shape', 'shape_2D']
def __init__(self):
super(initUI, self).__init__() # Call the inherited classes __init__ method
uic.loadUi('radiomics-feature-extractor.ui', self) # Load the .ui file
# Initialize main window
self._init_main_window()
# Initialize tabs
self._init_input_tab()
self._init_settings_tab()
self._init_features_tab()
self._init_feature_analysis_tab()
self.show() # Show the GUI
def _init_main_window(self):
# Init tab widget
self.tab_widget = self.findChild(QtWidgets.QTabWidget, 'tabWidget')
self.tab_widget.setTabVisible(0, True)
self.tab_widget.setTabVisible(1, False)
self.tab_widget.setTabVisible(2, False)
self.tab_widget.setTabVisible(3, False)
# Disable click on the widget bar using the eventFilter
self.tab_widget.tabBar().installEventFilter(self)
# Init push buttons
self.next_btn = self.findChild(QtWidgets.QPushButton, 'next_btn')
self.back_btn = self.findChild(QtWidgets.QPushButton, 'back_btn')
self.reset_btn = self.findChild(QtWidgets.QPushButton, 'reset_btn')
self.next_btn.clicked.connect(self.next_button_clicked)
self.back_btn.clicked.connect(self.back_button_clicked)
self.reset_btn.clicked.connect(self.reset_button_clicked)
# Init debug mode
self.debug_mode_checkbox = self.findChild(QtWidgets.QCheckBox, 'debug_mode_checkbox')
self.log_text_edit = self.findChild(QtWidgets.QTextEdit, 'log_text_edit')
self.log_label = self.findChild(QtWidgets.QLabel, 'log_label')
self.log_text_edit.setProperty('visible', False)
self.log_label.setProperty('visible', False)
self.debug_mode_checkbox.toggled.connect(self.debug_mode_checkbox_toggled)
def _init_input_tab(self):
self.input_tab = self.findChild(QtWidgets.QWidget, 'input_tab')
# Initialize variables
self.image_file_path = None
self.ROI_file_path = None
self.dataset_path = None
# Init radio buttons
self.single_image_radio = self.findChild(QtWidgets.QRadioButton, 'single_image_radio')
self.single_image_radio.toggled.connect(lambda l: {
self.upload_image_btn.setProperty('enabled', True),
self.upload_ROI_btn.setProperty('enabled', True),
self.upload_csv_btn.setProperty('enabled', False),
self._clear_input()
})
self.batch_images_radio = self.findChild(QtWidgets.QRadioButton, 'batch_images_radio')
self.batch_images_radio.toggled.connect(lambda l: {
self.upload_image_btn.setProperty('enabled', False),
self.upload_ROI_btn.setProperty('enabled', False),
self.upload_csv_btn.setProperty('enabled', True),
self._clear_input()
})
# Init push buttons
self.upload_image_btn = self.findChild(QtWidgets.QPushButton, 'upload_image_btn')
self.upload_ROI_btn = self.findChild(QtWidgets.QPushButton, 'upload_ROI_btn')
self.upload_csv_btn = self.findChild(QtWidgets.QPushButton, 'upload_csv_btn')
self.upload_image_btn.clicked.connect(lambda l: open_dicom_image(self))
self.upload_ROI_btn.clicked.connect(lambda l: open_dicom_ROI(self))
self.upload_csv_btn.clicked.connect(lambda l: open_csv_file(self))
# Init path labels
self.label_image_path = self.findChild(QtWidgets.QLabel, 'label_image_path')
self.label_ROI_path = self.findChild(QtWidgets.QLabel, 'label_ROI_path')
self.label_csv_path = self.findChild(QtWidgets.QLabel, 'label_csv_path')
def _init_settings_tab(self):
self.settings_tab = self.findChild(QtWidgets.QWidget, 'settings_tab')
# Initialize variables
self.is_any_feature_checkbox_selected = False
# Initialize checkboxes
for feature in self.features:
self.__setattr__('checkbox_' + feature, self.findChild(QtWidgets.QCheckBox, 'checkbox_' + feature))
self.__getattribute__('checkbox_' + feature).toggled.connect(self.feature_checkbox_toggled)
# Initialize buttons
self.select_all_none_btn = self.findChild(QtWidgets.QPushButton, 'select_all_none_btn')
self.select_all_none_btn.clicked.connect(self.select_all_none_btn_clicked)
def _init_features_tab(self):
self.extracted_features_table_view = self.findChild(QtWidgets.QTableView, 'extracted_features_table_view')
self.extracted_features_model = QtGui.QStandardItemModel(self)
self.extracted_features_table_view.setModel(self.extracted_features_model)
def _init_feature_analysis_tab(self):
self.feature_importance_column_view = self.findChild(QtWidgets.QColumnView, 'feature_importance_column_view')
self.feature_importance_model = QtGui.QStandardItemModel(self)
self.feature_importance_column_view.setModel(self.feature_importance_model)
@QtCore.pyqtSlot()
def feature_checkbox_toggled(self):
if self._is_any_feature_selected():
self.next_btn.setProperty('enabled', True)
else:
self.next_btn.setProperty('enabled', False)
@QtCore.pyqtSlot()
def select_all_none_btn_clicked(self):
if self.select_all_none_btn.text() == 'Select All':
self.select_all_none_btn.setText('Select None')
self.next_btn.setProperty('enabled', True)
for feature in self.features:
self.__getattribute__('checkbox_' + feature).setProperty('checked', True)
else:
self.select_all_none_btn.setText('Select All')
self.next_btn.setProperty('enabled', False)
for feature in self.features:
self.__getattribute__('checkbox_' + feature).setProperty('checked', False)
@QtCore.pyqtSlot()
def next_button_clicked(self):
# Go to the next tab if current is ready
if self._is_tab_ready():
# For each specific tab execute the logic
# Settings tab
if self.tab_widget.currentIndex() == 1:
# Execute pyradiomics feature extraction
pyradiomics_extraction(self, self.image_file_path, self.ROI_file_path, self.dataset_path)
# Go to the next tab if exists
if self.tab_widget.currentIndex() < self.tab_widget.count() - 1: # -1 because it is not zero based
self.tab_widget.setTabVisible(self.tab_widget.currentIndex() + 1, True)
self.tab_widget.setCurrentIndex(self.tab_widget.currentIndex() + 1)
if not self._is_tab_ready():
# Disable next button
self.next_btn.setProperty('enabled', False)
# Enable back button
self.back_btn.setProperty('enabled', True)
@QtCore.pyqtSlot()
def back_button_clicked(self):
# Go to the previous tab if it exists
if self.tab_widget.currentIndex() > 0:
self.tab_widget.setTabVisible(self.tab_widget.currentIndex(), False)
# self.tab_widget.setCurrentIndex(self.tab_widget.currentIndex() - 1)
# Disable back button if you are on the first tab
if self.tab_widget.currentIndex() == 0:
self.back_btn.setProperty('enabled', False)
# Enable next button
self.next_btn.setProperty('enabled', True)
@QtCore.pyqtSlot()
def debug_mode_checkbox_toggled(self):
self.log_text_edit.setProperty('visible', not self.log_text_edit.property('visible'))
self.log_label.setProperty('visible', not self.log_label.property('visible'))
@QtCore.pyqtSlot()
def reset_button_clicked(self):
self._clear_input()
self._hide_tabs()
self.log_text_edit.clear()
def _is_tab_ready(self):
# Input tab
if self.tab_widget.currentIndex() == 0:
return (self.image_file_path and self.ROI_file_path) or self.dataset_path
# Settings tab
elif self.tab_widget.currentIndex() == 1:
return self._is_any_feature_selected()
# Features tab
elif self.tab_widget.currentIndex() == 2:
return True
def _hide_tabs(self):
# Hide all tabs and move on the first one
tabs_count = self.tab_widget.count() # because it is not zero based
for index in range(tabs_count):
self.tab_widget.setTabVisible(index + 1, False)
self.tab_widget.setCurrentIndex(0)
def _clear_input(self):
self.image_file_path = None
self.ROI_file_path = None
self.dataset_path = None
self.next_btn.setProperty('enabled', False)
self.label_image_path.setText('')
self.label_ROI_path.setText('')
self.label_csv_path.setText('')
def _is_any_feature_selected(self):
for feature in self.features:
feature_checkbox = self.__getattribute__('checkbox_' + feature)
if feature_checkbox.property('checked'):
return True
return False
def eventFilter(self, obj, event):
if event.type() == QtCore.QEvent.KeyPress and (event.key() == 16777217 or event.key() == 16777218):
return True # eat alt+tab or alt+shift+tab key
if event.type() in (QtCore.QEvent.MouseButtonPress, QtCore.QEvent.MouseButtonDblClick):
return True # eat mouse click
else:
# standard event processing
return super(initUI, self).eventFilter(obj, event)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
view = initUI()
app.exec_() | pyradiomics/ui/main.py | from PyQt5 import QtWidgets, uic, QtCore, QtGui
from ui.utils.dialogs import *
from ui.utils.radiomics import *
import sys
class initUI(QtWidgets.QMainWindow):
features = ['first_order', 'glcm', 'gldm', 'glrlm', 'glszm', 'ngtdm', 'shape', 'shape_2D']
def __init__(self):
super(initUI, self).__init__() # Call the inherited classes __init__ method
uic.loadUi('radiomics-feature-extractor.ui', self) # Load the .ui file
# Initialize main window
self._init_main_window()
# Initialize tabs
self._init_input_tab()
self._init_settings_tab()
self._init_features_tab()
self._init_feature_analysis_tab()
self.show() # Show the GUI
def _init_main_window(self):
# Init tab widget
self.tab_widget = self.findChild(QtWidgets.QTabWidget, 'tabWidget')
self.tab_widget.setTabVisible(0, True)
self.tab_widget.setTabVisible(1, False)
self.tab_widget.setTabVisible(2, False)
self.tab_widget.setTabVisible(3, False)
# Disable click on the widget bar using the eventFilter
self.tab_widget.tabBar().installEventFilter(self)
# Init push buttons
self.next_btn = self.findChild(QtWidgets.QPushButton, 'next_btn')
self.back_btn = self.findChild(QtWidgets.QPushButton, 'back_btn')
self.reset_btn = self.findChild(QtWidgets.QPushButton, 'reset_btn')
self.next_btn.clicked.connect(self.next_button_clicked)
self.back_btn.clicked.connect(self.back_button_clicked)
self.reset_btn.clicked.connect(self.reset_button_clicked)
# Init debug mode
self.debug_mode_checkbox = self.findChild(QtWidgets.QCheckBox, 'debug_mode_checkbox')
self.log_text_edit = self.findChild(QtWidgets.QTextEdit, 'log_text_edit')
self.log_label = self.findChild(QtWidgets.QLabel, 'log_label')
self.log_text_edit.setProperty('visible', False)
self.log_label.setProperty('visible', False)
self.debug_mode_checkbox.toggled.connect(self.debug_mode_checkbox_toggled)
def _init_input_tab(self):
self.input_tab = self.findChild(QtWidgets.QWidget, 'input_tab')
# Initialize variables
self.image_file_path = None
self.ROI_file_path = None
self.dataset_path = None
# Init radio buttons
self.single_image_radio = self.findChild(QtWidgets.QRadioButton, 'single_image_radio')
self.single_image_radio.toggled.connect(lambda l: {
self.upload_image_btn.setProperty('enabled', True),
self.upload_ROI_btn.setProperty('enabled', True),
self.upload_csv_btn.setProperty('enabled', False),
self._clear_input()
})
self.batch_images_radio = self.findChild(QtWidgets.QRadioButton, 'batch_images_radio')
self.batch_images_radio.toggled.connect(lambda l: {
self.upload_image_btn.setProperty('enabled', False),
self.upload_ROI_btn.setProperty('enabled', False),
self.upload_csv_btn.setProperty('enabled', True),
self._clear_input()
})
# Init push buttons
self.upload_image_btn = self.findChild(QtWidgets.QPushButton, 'upload_image_btn')
self.upload_ROI_btn = self.findChild(QtWidgets.QPushButton, 'upload_ROI_btn')
self.upload_csv_btn = self.findChild(QtWidgets.QPushButton, 'upload_csv_btn')
self.upload_image_btn.clicked.connect(lambda l: open_dicom_image(self))
self.upload_ROI_btn.clicked.connect(lambda l: open_dicom_ROI(self))
self.upload_csv_btn.clicked.connect(lambda l: open_csv_file(self))
# Init path labels
self.label_image_path = self.findChild(QtWidgets.QLabel, 'label_image_path')
self.label_ROI_path = self.findChild(QtWidgets.QLabel, 'label_ROI_path')
self.label_csv_path = self.findChild(QtWidgets.QLabel, 'label_csv_path')
def _init_settings_tab(self):
self.settings_tab = self.findChild(QtWidgets.QWidget, 'settings_tab')
# Initialize variables
self.is_any_feature_checkbox_selected = False
# Initialize checkboxes
for feature in self.features:
self.__setattr__('checkbox_' + feature, self.findChild(QtWidgets.QCheckBox, 'checkbox_' + feature))
self.__getattribute__('checkbox_' + feature).toggled.connect(self.feature_checkbox_toggled)
# Initialize buttons
self.select_all_none_btn = self.findChild(QtWidgets.QPushButton, 'select_all_none_btn')
self.select_all_none_btn.clicked.connect(self.select_all_none_btn_clicked)
def _init_features_tab(self):
self.extracted_features_table_view = self.findChild(QtWidgets.QTableView, 'extracted_features_table_view')
self.extracted_features_model = QtGui.QStandardItemModel(self)
self.extracted_features_table_view.setModel(self.extracted_features_model)
def _init_feature_analysis_tab(self):
self.feature_importance_column_view = self.findChild(QtWidgets.QColumnView, 'feature_importance_column_view')
self.feature_importance_model = QtGui.QStandardItemModel(self)
self.feature_importance_column_view.setModel(self.feature_importance_model)
@QtCore.pyqtSlot()
def feature_checkbox_toggled(self):
if self._is_any_feature_selected():
self.next_btn.setProperty('enabled', True)
else:
self.next_btn.setProperty('enabled', False)
@QtCore.pyqtSlot()
def select_all_none_btn_clicked(self):
if self.select_all_none_btn.text() == 'Select All':
self.select_all_none_btn.setText('Select None')
self.next_btn.setProperty('enabled', True)
for feature in self.features:
self.__getattribute__('checkbox_' + feature).setProperty('checked', True)
else:
self.select_all_none_btn.setText('Select All')
self.next_btn.setProperty('enabled', False)
for feature in self.features:
self.__getattribute__('checkbox_' + feature).setProperty('checked', False)
@QtCore.pyqtSlot()
def next_button_clicked(self):
# Go to the next tab if current is ready
if self._is_tab_ready():
# For each specific tab execute the logic
# Settings tab
if self.tab_widget.currentIndex() == 1:
# Execute pyradiomics feature extraction
pyradiomics_extraction(self, self.image_file_path, self.ROI_file_path, self.dataset_path)
# Go to the next tab if exists
if self.tab_widget.currentIndex() < self.tab_widget.count() - 1: # -1 because it is not zero based
self.tab_widget.setTabVisible(self.tab_widget.currentIndex() + 1, True)
self.tab_widget.setCurrentIndex(self.tab_widget.currentIndex() + 1)
if not self._is_tab_ready():
# Disable next button
self.next_btn.setProperty('enabled', False)
# Enable back button
self.back_btn.setProperty('enabled', True)
@QtCore.pyqtSlot()
def back_button_clicked(self):
# Go to the previous tab if it exists
if self.tab_widget.currentIndex() > 0:
self.tab_widget.setTabVisible(self.tab_widget.currentIndex(), False)
# self.tab_widget.setCurrentIndex(self.tab_widget.currentIndex() - 1)
# Disable back button if you are on the first tab
if self.tab_widget.currentIndex() == 0:
self.back_btn.setProperty('enabled', False)
# Enable next button
self.next_btn.setProperty('enabled', True)
@QtCore.pyqtSlot()
def debug_mode_checkbox_toggled(self):
self.log_text_edit.setProperty('visible', not self.log_text_edit.property('visible'))
self.log_label.setProperty('visible', not self.log_label.property('visible'))
@QtCore.pyqtSlot()
def reset_button_clicked(self):
self._clear_input()
self._hide_tabs()
self.log_text_edit.clear()
def _is_tab_ready(self):
# Input tab
if self.tab_widget.currentIndex() == 0:
return (self.image_file_path and self.ROI_file_path) or self.dataset_path
# Settings tab
elif self.tab_widget.currentIndex() == 1:
return self._is_any_feature_selected()
# Features tab
elif self.tab_widget.currentIndex() == 2:
return True
def _hide_tabs(self):
# Hide all tabs and move on the first one
tabs_count = self.tab_widget.count() # because it is not zero based
for index in range(tabs_count):
self.tab_widget.setTabVisible(index + 1, False)
self.tab_widget.setCurrentIndex(0)
def _clear_input(self):
self.image_file_path = None
self.ROI_file_path = None
self.dataset_path = None
self.next_btn.setProperty('enabled', False)
self.label_image_path.setText('')
self.label_ROI_path.setText('')
self.label_csv_path.setText('')
def _is_any_feature_selected(self):
for feature in self.features:
feature_checkbox = self.__getattribute__('checkbox_' + feature)
if feature_checkbox.property('checked'):
return True
return False
def eventFilter(self, obj, event):
if event.type() == QtCore.QEvent.KeyPress and (event.key() == 16777217 or event.key() == 16777218):
return True # eat alt+tab or alt+shift+tab key
if event.type() in (QtCore.QEvent.MouseButtonPress, QtCore.QEvent.MouseButtonDblClick):
return True # eat mouse click
else:
# standard event processing
return super(initUI, self).eventFilter(obj, event)
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
view = initUI()
app.exec_() | 0.396302 | 0.056366 |
import os
import sys
import numpy as np
import tensorflow as tf
import os
import numpy as np
from skimage.io import imread
from skimage.transform import resize
sys.path.append('/Users/moon-il')
sys.path.append('/anaconda3/lib/python3.7/site-packages/IPython/extensions')
sys.path.append('/Users/moon-il/.ipython')
sys.path.remove('/Users/moon-il')
sys.path.remove('/anaconda3/lib/python3.7/site-packages/IPython/extensions')
sys.path.remove('/Users/moon-il/.ipython')
# 현재 traine데이터 폴더로 경로를 지정한다.
# train폴더 - drawings, porn, sexy, neutral, hentai메모장을 만들어 놓는다. 해당 메모장에 사진주소로 입력해야 된다.
subset_dir = '/Users/moon-il/Work_Space/PycharmProjects/untitled/nsfw_data_scraper-master/raw_data/train/'
filename_list = os.listdir(subset_dir)
# 현재 그 폴더에 5개의 메모장이 있다.
# 위에서 filename_list에서 DS_Store란 파일이 생겨 직접 지우기
for s in filename_list:
if '.DS_Store' in s:
filename_list.remove('.DS_Store')
break;
# 전체 사진 url, 라벨
filename_list_url = []
filename_list_label = []
# 데이터 출력
for i in filename_list:
print(i)
f = open(subset_dir + i, 'r')
lines = f.readlines()
# 각 url에서 \n없애기
data_t = []
for ii in lines:
data_t.append(ii[:-1])
data_t2 = []
for line in data_t:
try:
data_t2.append(line)
print(line)
filename_list_url.append(line)
filename_list_label.append(i)
except:
pass
if (len(data_t2) == 10000): # 각 라벨당 몇장씩 뽑을지 결정하기 -> 10을 입력한다면 -> 사진 10장씩만 뽑음 -> 총 50장 -> 40장은 train, 10장은 validation
break
# 데이터 변환
from PIL import Image
import os, glob, numpy as np
from sklearn.model_selection import train_test_split
# 파일 리스트 길이 출력
nb_classes = len(filename_list)
# 우린 64*64로 사진shape변환
image_w = 64
image_h = 64
pixels = image_h * image_w * 3
X = []
y = []
# 라벨 데이터 생성
for idx, cat in enumerate(filename_list):
# one-hot 돌리기.
label = [0 for i in range(nb_classes)]
print(label)
label[idx] = 1
print(label)
for i, f in enumerate(filename_list_url):
print(filename_list_label[i])
print(f)
if i % 100 == 0:
print(i)
try:
img = imread(filename_list_url[i]) # shape: (H, W, 3), range: [0, 255]
img = resize(img, (image_w, image_h, 3), mode='constant').astype(np.float32)
data = np.asarray(img)
X.append(data)
y.append(filename_list_label[i])
except:
pass
X = np.array(X)
y = np.array(y)
print(X)
print(y)
# 1 0 0 0 0이면 drawings
# 0 1 0 0 0이면 hentai
X_train, X_test, y_train, y_test = train_test_split(X, y)
xy = (X_train, X_test, y_train, y_test)
np.save("./numpy_data/multi_image_data.npy", xy)
# 직접 경로 수정하기
print("ok", len(y))
# 실행하기 전 해야 할 일
# 1. 현재 traine데이터 폴더로 경로를 지정한다.
# train폴더 - drawings, porn, sexy, neutral, hentai메모장을 만들어 놓는다. 해당 메모장에 사진주소로 입력해야 된다.
# 2. 20라인, 117라인에서 데이터 불러오기(subset_dir), 데이터 변환 저장하기(np.save) 경로를 해당 로컬에 맞게 수정해줘야 한다. | nsfw_trian/train_data_transfer.py | import os
import sys
import numpy as np
import tensorflow as tf
import os
import numpy as np
from skimage.io import imread
from skimage.transform import resize
sys.path.append('/Users/moon-il')
sys.path.append('/anaconda3/lib/python3.7/site-packages/IPython/extensions')
sys.path.append('/Users/moon-il/.ipython')
sys.path.remove('/Users/moon-il')
sys.path.remove('/anaconda3/lib/python3.7/site-packages/IPython/extensions')
sys.path.remove('/Users/moon-il/.ipython')
# 현재 traine데이터 폴더로 경로를 지정한다.
# train폴더 - drawings, porn, sexy, neutral, hentai메모장을 만들어 놓는다. 해당 메모장에 사진주소로 입력해야 된다.
subset_dir = '/Users/moon-il/Work_Space/PycharmProjects/untitled/nsfw_data_scraper-master/raw_data/train/'
filename_list = os.listdir(subset_dir)
# 현재 그 폴더에 5개의 메모장이 있다.
# 위에서 filename_list에서 DS_Store란 파일이 생겨 직접 지우기
for s in filename_list:
if '.DS_Store' in s:
filename_list.remove('.DS_Store')
break;
# 전체 사진 url, 라벨
filename_list_url = []
filename_list_label = []
# 데이터 출력
for i in filename_list:
print(i)
f = open(subset_dir + i, 'r')
lines = f.readlines()
# 각 url에서 \n없애기
data_t = []
for ii in lines:
data_t.append(ii[:-1])
data_t2 = []
for line in data_t:
try:
data_t2.append(line)
print(line)
filename_list_url.append(line)
filename_list_label.append(i)
except:
pass
if (len(data_t2) == 10000): # 각 라벨당 몇장씩 뽑을지 결정하기 -> 10을 입력한다면 -> 사진 10장씩만 뽑음 -> 총 50장 -> 40장은 train, 10장은 validation
break
# 데이터 변환
from PIL import Image
import os, glob, numpy as np
from sklearn.model_selection import train_test_split
# 파일 리스트 길이 출력
nb_classes = len(filename_list)
# 우린 64*64로 사진shape변환
image_w = 64
image_h = 64
pixels = image_h * image_w * 3
X = []
y = []
# 라벨 데이터 생성
for idx, cat in enumerate(filename_list):
# one-hot 돌리기.
label = [0 for i in range(nb_classes)]
print(label)
label[idx] = 1
print(label)
for i, f in enumerate(filename_list_url):
print(filename_list_label[i])
print(f)
if i % 100 == 0:
print(i)
try:
img = imread(filename_list_url[i]) # shape: (H, W, 3), range: [0, 255]
img = resize(img, (image_w, image_h, 3), mode='constant').astype(np.float32)
data = np.asarray(img)
X.append(data)
y.append(filename_list_label[i])
except:
pass
X = np.array(X)
y = np.array(y)
print(X)
print(y)
# 1 0 0 0 0이면 drawings
# 0 1 0 0 0이면 hentai
X_train, X_test, y_train, y_test = train_test_split(X, y)
xy = (X_train, X_test, y_train, y_test)
np.save("./numpy_data/multi_image_data.npy", xy)
# 직접 경로 수정하기
print("ok", len(y))
# 실행하기 전 해야 할 일
# 1. 현재 traine데이터 폴더로 경로를 지정한다.
# train폴더 - drawings, porn, sexy, neutral, hentai메모장을 만들어 놓는다. 해당 메모장에 사진주소로 입력해야 된다.
# 2. 20라인, 117라인에서 데이터 불러오기(subset_dir), 데이터 변환 저장하기(np.save) 경로를 해당 로컬에 맞게 수정해줘야 한다. | 0.098751 | 0.254115 |
__author__ = '<NAME>'
__copyright__ = 'Fraunhofer IDMT'
# imports
import torch
import torch.nn as nn
from torch.autograd import Variable
class BiGRUEncoder(nn.Module):
""" Class for bi-directional gated recurrent units.
"""
def __init__(self, B, T, N):
"""
Args :
B : (int) Batch size
T : (int) Length of the time-sequence.
N : (int) Original dimensionallity of the input.
"""
super(BiGRUEncoder, self).__init__()
self._B = B
self._T = T
self._N = N
# Bi-GRU Encoder
self.gruF = nn.GRUCell(self._N, self._N)
self.gruB = nn.GRUCell(self._N, self._N)
# Initialize the weights
self.initialize_encoder()
def initialize_encoder(self):
"""
Manual weight/bias initialization.
"""
nn.init.orthogonal(self.gruF.weight_hh)
nn.init.xavier_normal(self.gruF.weight_ih)
self.gruF.bias_hh.data.zero_()
self.gruF.bias_ih.data.zero_()
nn.init.orthogonal(self.gruB.weight_hh)
nn.init.xavier_normal(self.gruB.weight_ih)
self.gruB.bias_hh.data.zero_()
self.gruB.bias_ih.data.zero_()
print('Initialization of the Bi-GRU encoder done...')
return None
def forward(self, x_in):
h_enc = Variable(torch.zeros(self._B, self._T, 2 * self._N), requires_grad=False)
# Initialization of the hidden states
h_t_fr = Variable(torch.zeros(self._B, self._N), requires_grad=False)
h_t_bk = Variable(torch.zeros(self._B, self._N), requires_grad=False)
if torch.has_cudnn:
h_enc = h_enc.cuda()
h_t_fr = h_t_fr.cuda()
h_t_bk = h_t_bk.cuda()
for t in range(self._T):
# Bi-GRU Encoding
h_t_fr = self.gruF((x_in[:, t, :]), h_t_fr)
h_t_bk = self.gruB((x_in[:, self._T - t - 1, :]), h_t_bk)
h_t = torch.cat((h_t_fr + x_in[:, t, :], h_t_bk + x_in[:, self._T - t - 1, :]), dim=1)
h_enc[:, t, :] = h_t
return h_enc
class GRUDecoder(nn.Module):
""" Class for GRU decoder.
"""
def __init__(self, B, T, N):
"""
Args :
B : (int) Batch size
T : (int) Length of the time-sequence.
N : (int) Original dimensionallity of the input.
"""
super(GRUDecoder, self).__init__()
self._B = B
self._T = T
self._N = N
# Bi-GRU Encoder
self.gruDec = nn.GRUCell(self._N, self._N)
# Initialize the weights
self.initialize_decoder()
def initialize_decoder(self):
"""
Manual weight/bias initialization.
"""
nn.init.orthogonal(self.gruDec.weight_hh)
nn.init.xavier_normal(self.gruDec.weight_ih)
self.gruDec.bias_hh.data.zero_()
self.gruDec.bias_ih.data.zero_()
print('Initialization of the GRU decoder done...')
return None
def forward(self, h_enc):
h_dec = Variable(torch.zeros(self._B, self._T, self._N), requires_grad=False)
# Initialization of the hidden states
h_h_t = Variable(torch.zeros(self._B, self._N), requires_grad=False)
if torch.has_cudnn:
h_dec = h_dec.cuda()
h_h_t = h_h_t.cuda()
for t in range(self._T):
# Bi-GRU Encoding
h_h_t = self.gruDec((h_enc[:, t, :]), h_h_t)
h_dec[:, t, :] = h_h_t
return h_dec
# EOF | nn_modules/cls_grus.py | __author__ = '<NAME>'
__copyright__ = 'Fraunhofer IDMT'
# imports
import torch
import torch.nn as nn
from torch.autograd import Variable
class BiGRUEncoder(nn.Module):
""" Class for bi-directional gated recurrent units.
"""
def __init__(self, B, T, N):
"""
Args :
B : (int) Batch size
T : (int) Length of the time-sequence.
N : (int) Original dimensionallity of the input.
"""
super(BiGRUEncoder, self).__init__()
self._B = B
self._T = T
self._N = N
# Bi-GRU Encoder
self.gruF = nn.GRUCell(self._N, self._N)
self.gruB = nn.GRUCell(self._N, self._N)
# Initialize the weights
self.initialize_encoder()
def initialize_encoder(self):
"""
Manual weight/bias initialization.
"""
nn.init.orthogonal(self.gruF.weight_hh)
nn.init.xavier_normal(self.gruF.weight_ih)
self.gruF.bias_hh.data.zero_()
self.gruF.bias_ih.data.zero_()
nn.init.orthogonal(self.gruB.weight_hh)
nn.init.xavier_normal(self.gruB.weight_ih)
self.gruB.bias_hh.data.zero_()
self.gruB.bias_ih.data.zero_()
print('Initialization of the Bi-GRU encoder done...')
return None
def forward(self, x_in):
h_enc = Variable(torch.zeros(self._B, self._T, 2 * self._N), requires_grad=False)
# Initialization of the hidden states
h_t_fr = Variable(torch.zeros(self._B, self._N), requires_grad=False)
h_t_bk = Variable(torch.zeros(self._B, self._N), requires_grad=False)
if torch.has_cudnn:
h_enc = h_enc.cuda()
h_t_fr = h_t_fr.cuda()
h_t_bk = h_t_bk.cuda()
for t in range(self._T):
# Bi-GRU Encoding
h_t_fr = self.gruF((x_in[:, t, :]), h_t_fr)
h_t_bk = self.gruB((x_in[:, self._T - t - 1, :]), h_t_bk)
h_t = torch.cat((h_t_fr + x_in[:, t, :], h_t_bk + x_in[:, self._T - t - 1, :]), dim=1)
h_enc[:, t, :] = h_t
return h_enc
class GRUDecoder(nn.Module):
""" Class for GRU decoder.
"""
def __init__(self, B, T, N):
"""
Args :
B : (int) Batch size
T : (int) Length of the time-sequence.
N : (int) Original dimensionallity of the input.
"""
super(GRUDecoder, self).__init__()
self._B = B
self._T = T
self._N = N
# Bi-GRU Encoder
self.gruDec = nn.GRUCell(self._N, self._N)
# Initialize the weights
self.initialize_decoder()
def initialize_decoder(self):
"""
Manual weight/bias initialization.
"""
nn.init.orthogonal(self.gruDec.weight_hh)
nn.init.xavier_normal(self.gruDec.weight_ih)
self.gruDec.bias_hh.data.zero_()
self.gruDec.bias_ih.data.zero_()
print('Initialization of the GRU decoder done...')
return None
def forward(self, h_enc):
h_dec = Variable(torch.zeros(self._B, self._T, self._N), requires_grad=False)
# Initialization of the hidden states
h_h_t = Variable(torch.zeros(self._B, self._N), requires_grad=False)
if torch.has_cudnn:
h_dec = h_dec.cuda()
h_h_t = h_h_t.cuda()
for t in range(self._T):
# Bi-GRU Encoding
h_h_t = self.gruDec((h_enc[:, t, :]), h_h_t)
h_dec[:, t, :] = h_h_t
return h_dec
# EOF | 0.937469 | 0.42471 |
from datanator_query_python.util import mongo_util, file_util
from pymongo.collation import Collation, CollationStrength
import json
class QuerySabioCompound(mongo_util.MongoUtil):
def __init__(self, username=None, password=<PASSWORD>, server=None, authSource='admin',
database='datanator', max_entries=float('inf'), verbose=True, collection_str='sabio_compound',
readPreference='nearest', replicaSet=None):
super().__init__(MongoDB=server,
db=database,
verbose=verbose, max_entries=max_entries, username=username,
password=password, authSource=authSource, readPreference=readPreference,
replicaSet=replicaSet)
self.file_manager = file_util.FileUtil()
self.max_entries = max_entries
self.verbose = verbose
self.db = self.db_obj
self.collection = self.db[collection_str]
self.collation = Collation(locale='en', strength=CollationStrength.SECONDARY)
self.collection_str = collection_str
def get_id_by_name(self, names):
"""Get sabio compound id given compound name
Args:
name (:obj:`list` of :obj:`str`): names of the compound
Return:
(:obj:`list` of :obj:`int`): sabio compound ids
"""
result = []
name_field = 'name'
synonym_field = 'synonyms'
pos_0 = {name_field: {'$in': names}}
pos_1 = {synonym_field: {'$in': names}}
query = {'$or': [pos_0, pos_1]}
projection = {'_id': 1}
docs = self.collection.find(filter=query, projection=projection, collation=self.collation)
for doc in docs:
result.append(doc['_id'])
return result
def get_inchikey_by_name(self, names):
"""Get compound InChIKey using compound names.
Args:
names (:obj:`list` of :obj:`str`): Names of compounds.
Return:
(:obj:`list` of :obj:`str`): List of inchikeys (not in the order of the input list).
"""
result = []
synonym_field = 'synonyms'
pos_0 = {'name': {'$in': names}}
pos_1 = {synonym_field: {'$in': names}}
query = {'$or': [pos_0, pos_1]}
projection = {'inchi_key': 1}
docs = self.collection.find(filter=query, projection=projection, collation=self.collation)
if docs is None:
return result
else:
for doc in docs:
result.append(doc['inchi_key'])
return result | datanator_query_python/query/query_sabio_compound.py | from datanator_query_python.util import mongo_util, file_util
from pymongo.collation import Collation, CollationStrength
import json
class QuerySabioCompound(mongo_util.MongoUtil):
def __init__(self, username=None, password=<PASSWORD>, server=None, authSource='admin',
database='datanator', max_entries=float('inf'), verbose=True, collection_str='sabio_compound',
readPreference='nearest', replicaSet=None):
super().__init__(MongoDB=server,
db=database,
verbose=verbose, max_entries=max_entries, username=username,
password=password, authSource=authSource, readPreference=readPreference,
replicaSet=replicaSet)
self.file_manager = file_util.FileUtil()
self.max_entries = max_entries
self.verbose = verbose
self.db = self.db_obj
self.collection = self.db[collection_str]
self.collation = Collation(locale='en', strength=CollationStrength.SECONDARY)
self.collection_str = collection_str
def get_id_by_name(self, names):
"""Get sabio compound id given compound name
Args:
name (:obj:`list` of :obj:`str`): names of the compound
Return:
(:obj:`list` of :obj:`int`): sabio compound ids
"""
result = []
name_field = 'name'
synonym_field = 'synonyms'
pos_0 = {name_field: {'$in': names}}
pos_1 = {synonym_field: {'$in': names}}
query = {'$or': [pos_0, pos_1]}
projection = {'_id': 1}
docs = self.collection.find(filter=query, projection=projection, collation=self.collation)
for doc in docs:
result.append(doc['_id'])
return result
def get_inchikey_by_name(self, names):
"""Get compound InChIKey using compound names.
Args:
names (:obj:`list` of :obj:`str`): Names of compounds.
Return:
(:obj:`list` of :obj:`str`): List of inchikeys (not in the order of the input list).
"""
result = []
synonym_field = 'synonyms'
pos_0 = {'name': {'$in': names}}
pos_1 = {synonym_field: {'$in': names}}
query = {'$or': [pos_0, pos_1]}
projection = {'inchi_key': 1}
docs = self.collection.find(filter=query, projection=projection, collation=self.collation)
if docs is None:
return result
else:
for doc in docs:
result.append(doc['inchi_key'])
return result | 0.755366 | 0.219358 |
import sys
import socket
import argparse
from worker import calculate_metrix
from worker import export_to_API_dict, write_json, send_to_anodot
from sanity_check import check_format
import conf
import json
import os
import logging
conf.setup_logger('log3', os.path.join(conf.LOGS_DIRNAME, conf.LOGS_SERVER_FILENAME),
conf.LOG_SERVER_MAX_SIZE, conf.LOG_SERVER_MAX_BACKUPS)
log = logging.getLogger('log3')
log.propagate = False
def server_listen(listen, port, n_sockets):
"Run listen mode for the server on all interfaces. Asynchronously handle inputs received."
s = socket.socket()
s.bind((listen, port))
s.listen(n_sockets)
log.info('INFO: Server is now running on (local ip, port) = {}. Server ready.'.format(('0.0.0.0' if listen == '' else '127.0.0.1', port)))
while True:
try:
c, a = s.accept()
except KeyboardInterrupt:
log.warning("WARNING: Server was interrupted by user")
except SystemExit:
log.warning("WARNING: Server was interrupted")
data = b''
while True:
block = c.recv(conf.BUFFER_SIZE)
if not block:
break
data += block
c.close()
if len(data) > 0:
try:
unserialized_input = json.loads(data.decode(encoding='utf-8'))
unserialized_input = [unserialized_input[x] for x in conf.SERVER_API_FORMAT]
input_matrix, output_matrix, label_matrix, timestamp, threshold, agent_id, n_classes = unserialized_input
check_msg, output_matrix, input_matrix, label_matrix = check_format(output_matrix, input_matrix, label_matrix, n_classes)
if check_msg != "":
log.warning("WARNING: Agent ID : {}. Format error in received buffer of size {} : {}".format(agent_id, len(output_matrix), check_msg))
else:
log.info('INFO: Server received buffer of size {}. Origin Agent: {}'.format(len(output_matrix), agent_id))
input_metrix, output_metrix, label_metrix, timestamp = calculate_metrix(input_matrix, output_matrix, label_matrix, timestamp, threshold)
d = export_to_API_dict(input_metrix, output_metrix, label_metrix, timestamp)
if conf.TOKEN is not None:
send_to_anodot(d, conf.TOKEN, agent_id)
else:
write_json(d, agent_id)
except Exception as err:
log.warning("WARNING: Error in receiving buffer : {}".format(err))
def main():
parser = argparse.ArgumentParser(description='MLWatcher server')
parser.add_argument('--listen', required=False, default='all', choices=['all', 'localhost'], help='Listen interface : "localhost" for 127.0.0.1, "all" for 0.0.0.0. Default : all')
parser.add_argument('--port', required=False, default=8000, help="Port where the server side is run. Default : 8000")
parser.add_argument('--n_sockets', required=False, default=5, help="Number of sockets listening on the server side. Default : 5")
args = parser.parse_args()
listen, port, n_sockets = args.listen, int(args.port), int(args.n_sockets)
if listen == 'all':
listen = ''
server_listen(listen, port, n_sockets)
if __name__ == "__main__":
main() | server.py | import sys
import socket
import argparse
from worker import calculate_metrix
from worker import export_to_API_dict, write_json, send_to_anodot
from sanity_check import check_format
import conf
import json
import os
import logging
conf.setup_logger('log3', os.path.join(conf.LOGS_DIRNAME, conf.LOGS_SERVER_FILENAME),
conf.LOG_SERVER_MAX_SIZE, conf.LOG_SERVER_MAX_BACKUPS)
log = logging.getLogger('log3')
log.propagate = False
def server_listen(listen, port, n_sockets):
"Run listen mode for the server on all interfaces. Asynchronously handle inputs received."
s = socket.socket()
s.bind((listen, port))
s.listen(n_sockets)
log.info('INFO: Server is now running on (local ip, port) = {}. Server ready.'.format(('0.0.0.0' if listen == '' else '127.0.0.1', port)))
while True:
try:
c, a = s.accept()
except KeyboardInterrupt:
log.warning("WARNING: Server was interrupted by user")
except SystemExit:
log.warning("WARNING: Server was interrupted")
data = b''
while True:
block = c.recv(conf.BUFFER_SIZE)
if not block:
break
data += block
c.close()
if len(data) > 0:
try:
unserialized_input = json.loads(data.decode(encoding='utf-8'))
unserialized_input = [unserialized_input[x] for x in conf.SERVER_API_FORMAT]
input_matrix, output_matrix, label_matrix, timestamp, threshold, agent_id, n_classes = unserialized_input
check_msg, output_matrix, input_matrix, label_matrix = check_format(output_matrix, input_matrix, label_matrix, n_classes)
if check_msg != "":
log.warning("WARNING: Agent ID : {}. Format error in received buffer of size {} : {}".format(agent_id, len(output_matrix), check_msg))
else:
log.info('INFO: Server received buffer of size {}. Origin Agent: {}'.format(len(output_matrix), agent_id))
input_metrix, output_metrix, label_metrix, timestamp = calculate_metrix(input_matrix, output_matrix, label_matrix, timestamp, threshold)
d = export_to_API_dict(input_metrix, output_metrix, label_metrix, timestamp)
if conf.TOKEN is not None:
send_to_anodot(d, conf.TOKEN, agent_id)
else:
write_json(d, agent_id)
except Exception as err:
log.warning("WARNING: Error in receiving buffer : {}".format(err))
def main():
parser = argparse.ArgumentParser(description='MLWatcher server')
parser.add_argument('--listen', required=False, default='all', choices=['all', 'localhost'], help='Listen interface : "localhost" for 127.0.0.1, "all" for 0.0.0.0. Default : all')
parser.add_argument('--port', required=False, default=8000, help="Port where the server side is run. Default : 8000")
parser.add_argument('--n_sockets', required=False, default=5, help="Number of sockets listening on the server side. Default : 5")
args = parser.parse_args()
listen, port, n_sockets = args.listen, int(args.port), int(args.n_sockets)
if listen == 'all':
listen = ''
server_listen(listen, port, n_sockets)
if __name__ == "__main__":
main() | 0.170197 | 0.062331 |
__all__ = [
'__author__',
'__copyright__',
'__credits__',
'__email__',
'__license__',
'__maintainer__',
'__status__',
'__version__',
]
__author__ = "<NAME>, <NAME>, and <NAME>"
__copyright__ = 'Copyright (c) 2011-2018 Digital Bazaar, Inc.'
__credits__ = ["<NAME>", "<NAME>", "<NAME>","<NAME>"]
__email__ = "<EMAIL>"
__license__ = 'New BSD license'
__maintainer__ = "<NAME>"
__status__ = "Production"
__version__ = "1.0.1"
from eve import Eve
from eve_swagger import get_swagger_blueprint
from invoke import task
def schema():
return {
"test": {},
"users": {
'item_title': 'member',
'schema': {
'username': {
"type": str(),
"minlength": 5,
"maxlength": 25,
},
}
}
}
def openapi_info():
return {
'title': 'Example API',
'version': "v1",
'description': 'an API description',
'termsOfService': 'my terms of service',
'contact': {
'name': 'Dwight (@denzuko) Spencer',
'url': 'http://dwightaspencer.com'
},
'license': {
'name': 'BSD',
'url': 'https://denzuko.github.io/LICENSE.md',
},
'schemes': ['http', 'https']
}
def settings():
""" Define application settings """
return {
"DEBUG": False,
"API_VERSION": 'v1',
"RENDERERS": ['eve.render.JSONRenderer'],
"X_DOMAINS": ['*', 'http://editor.swagger.io' ],
"X_HEADERS": ['Content-Type', 'If-Match'],
"CACHE_CONTROL": 'max-ege=20',
"CACHE_EXPIRES": 20,
"RESOURCE_METHODS": ["GET", "DELETE", "POST"],
"ITEM_METHODS": ["GET", "PUT", "DELETE"],
"SWAGGER_INFO": openapi_info(),
"DOMAIN": schema()
}
@task()
def proxy(ctx):
""" Starts oauth2-proxy """
ctx.run(" ".join([
'oauth2-proxy',
'--provider=github',
'--http-address', f"0.0.0.0:{os.environ.get(PORT, 8080)}",
'--reverse-proxy']), disown=True)
@task
def migrate(ctx):
pass
@task(default=True, pre=[proxy])
def serve(ctx):
app = Eve(auth=None, settings=settings())
app.register_blueprint(get_swagger_blueprint())
app.run(host='0.0.0.0', port=3000) | tasks.py | __all__ = [
'__author__',
'__copyright__',
'__credits__',
'__email__',
'__license__',
'__maintainer__',
'__status__',
'__version__',
]
__author__ = "<NAME>, <NAME>, and <NAME>"
__copyright__ = 'Copyright (c) 2011-2018 Digital Bazaar, Inc.'
__credits__ = ["<NAME>", "<NAME>", "<NAME>","<NAME>"]
__email__ = "<EMAIL>"
__license__ = 'New BSD license'
__maintainer__ = "<NAME>"
__status__ = "Production"
__version__ = "1.0.1"
from eve import Eve
from eve_swagger import get_swagger_blueprint
from invoke import task
def schema():
return {
"test": {},
"users": {
'item_title': 'member',
'schema': {
'username': {
"type": str(),
"minlength": 5,
"maxlength": 25,
},
}
}
}
def openapi_info():
return {
'title': 'Example API',
'version': "v1",
'description': 'an API description',
'termsOfService': 'my terms of service',
'contact': {
'name': 'Dwight (@denzuko) Spencer',
'url': 'http://dwightaspencer.com'
},
'license': {
'name': 'BSD',
'url': 'https://denzuko.github.io/LICENSE.md',
},
'schemes': ['http', 'https']
}
def settings():
""" Define application settings """
return {
"DEBUG": False,
"API_VERSION": 'v1',
"RENDERERS": ['eve.render.JSONRenderer'],
"X_DOMAINS": ['*', 'http://editor.swagger.io' ],
"X_HEADERS": ['Content-Type', 'If-Match'],
"CACHE_CONTROL": 'max-ege=20',
"CACHE_EXPIRES": 20,
"RESOURCE_METHODS": ["GET", "DELETE", "POST"],
"ITEM_METHODS": ["GET", "PUT", "DELETE"],
"SWAGGER_INFO": openapi_info(),
"DOMAIN": schema()
}
@task()
def proxy(ctx):
""" Starts oauth2-proxy """
ctx.run(" ".join([
'oauth2-proxy',
'--provider=github',
'--http-address', f"0.0.0.0:{os.environ.get(PORT, 8080)}",
'--reverse-proxy']), disown=True)
@task
def migrate(ctx):
pass
@task(default=True, pre=[proxy])
def serve(ctx):
app = Eve(auth=None, settings=settings())
app.register_blueprint(get_swagger_blueprint())
app.run(host='0.0.0.0', port=3000) | 0.369884 | 0.086246 |
def run_tests_multi_transfer(config):
scenario = sp.test_scenario()
admin, [alice, bob] = get_addresses()
scenario.h1("Tests multiple transfer")
scenario.table_of_contents()
#-----------------------------------------------------
scenario.h2("Simple transfer")
possessors = [alice]*2
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=bob.address,
amount=1,
token_id=1)
])
]).run(sender=alice)
possessors[0] = bob
possessors[1] = bob
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending 2 times the same token to itself")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=alice.address,
amount=1,
token_id=0)
,
sp.record(to_=alice.address,
amount=1,
token_id=0)
])
]).run(sender=alice)
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending 2 different tokens to 2 different addresses")
possessors = [alice]*2
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=admin.address,
amount=1,
token_id=1)
])
]).run(sender=alice)
possessors[0] = bob
possessors[1] = admin
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending 1 token to someone else and 2 times the same token to itself")
possessors = [alice]*2
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=1)
,
sp.record(to_=alice.address,
amount=1,
token_id=0)
,
sp.record(to_=alice.address,
amount=1,
token_id=0)
])
]).run(sender=alice)
possessors[1] = bob
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending the same token to itself and (then) someone else")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=alice.address,
amount=1,
token_id=0)
,
sp.record(to_=bob.address,
amount=1,
token_id=0)
])
]).run(sender=alice)
possessors[0] = bob
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending the same token to someone else and (then) itself")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=alice.address,
amount=1,
token_id=0)
])
]).run(sender=alice, valid=False)
scenario.p("Transaction has been cancelled")
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending one existing token and 1 non-existing token")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=bob.address,
amount=1,
token_id=1000)
])
]).run(sender=alice, valid=False)
scenario.p("Transaction has been cancelled")
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending 2 non-existing tokens to someone")
contract = create_new_contract(config, admin, scenario, [])
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=1000)
,
sp.record(to_=bob.address,
amount=1,
token_id=1001)
])
]).run(sender=alice, valid=False)
#-----------------------------------------------------
scenario.h2("Sending 2 times the same token to someone")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=bob.address,
amount=1,
token_id=0)
])
]).run(sender=alice, valid=False)
scenario.p("Transaction has been cancelled")
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending the same token to 2 different addresses")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=admin.address,
amount=1,
token_id=0)
])
]).run(sender=alice, valid=False)
scenario.p("Transaction has been cancelled")
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Cannot force a transaction between two addresses")
contract = create_new_contract(config, admin, scenario, [bob]*2)
contract.transfer([
contract.batch_transfer.item(from_=bob.address,
txs=[
sp.record(to_=admin.address,
amount=1,
token_id=0)
,
sp.record(to_=alice.address,
amount=1,
token_id=1)
])
]).run(sender=alice, valid=False) | utests/multi_transfer.py | def run_tests_multi_transfer(config):
scenario = sp.test_scenario()
admin, [alice, bob] = get_addresses()
scenario.h1("Tests multiple transfer")
scenario.table_of_contents()
#-----------------------------------------------------
scenario.h2("Simple transfer")
possessors = [alice]*2
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=bob.address,
amount=1,
token_id=1)
])
]).run(sender=alice)
possessors[0] = bob
possessors[1] = bob
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending 2 times the same token to itself")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=alice.address,
amount=1,
token_id=0)
,
sp.record(to_=alice.address,
amount=1,
token_id=0)
])
]).run(sender=alice)
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending 2 different tokens to 2 different addresses")
possessors = [alice]*2
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=admin.address,
amount=1,
token_id=1)
])
]).run(sender=alice)
possessors[0] = bob
possessors[1] = admin
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending 1 token to someone else and 2 times the same token to itself")
possessors = [alice]*2
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=1)
,
sp.record(to_=alice.address,
amount=1,
token_id=0)
,
sp.record(to_=alice.address,
amount=1,
token_id=0)
])
]).run(sender=alice)
possessors[1] = bob
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending the same token to itself and (then) someone else")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=alice.address,
amount=1,
token_id=0)
,
sp.record(to_=bob.address,
amount=1,
token_id=0)
])
]).run(sender=alice)
possessors[0] = bob
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending the same token to someone else and (then) itself")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=alice.address,
amount=1,
token_id=0)
])
]).run(sender=alice, valid=False)
scenario.p("Transaction has been cancelled")
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending one existing token and 1 non-existing token")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=bob.address,
amount=1,
token_id=1000)
])
]).run(sender=alice, valid=False)
scenario.p("Transaction has been cancelled")
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending 2 non-existing tokens to someone")
contract = create_new_contract(config, admin, scenario, [])
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=1000)
,
sp.record(to_=bob.address,
amount=1,
token_id=1001)
])
]).run(sender=alice, valid=False)
#-----------------------------------------------------
scenario.h2("Sending 2 times the same token to someone")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=bob.address,
amount=1,
token_id=0)
])
]).run(sender=alice, valid=False)
scenario.p("Transaction has been cancelled")
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Sending the same token to 2 different addresses")
possessors = [alice]
contract = create_new_contract(config, admin, scenario, possessors)
contract.transfer([
contract.batch_transfer.item(from_=alice.address,
txs=[
sp.record(to_=bob.address,
amount=1,
token_id=0)
,
sp.record(to_=admin.address,
amount=1,
token_id=0)
])
]).run(sender=alice, valid=False)
scenario.p("Transaction has been cancelled")
ownership_test(scenario, contract, possessors)
#-----------------------------------------------------
scenario.h2("Cannot force a transaction between two addresses")
contract = create_new_contract(config, admin, scenario, [bob]*2)
contract.transfer([
contract.batch_transfer.item(from_=bob.address,
txs=[
sp.record(to_=admin.address,
amount=1,
token_id=0)
,
sp.record(to_=alice.address,
amount=1,
token_id=1)
])
]).run(sender=alice, valid=False) | 0.569494 | 0.331444 |
from unittest import TestCase
from managesf.model.yamlbkd.resources.gitacls import ACLOps
class TestACLOps(TestCase):
def test_gerrit_plugin_config(self):
"""Test that adding extra plugin config options in project.config
does not break validation"""
new = {
'resources': {
'groups': {
'mygid': {
'name': 'coders',
'namespace': '',
'members': ['<EMAIL>'],
}
}
}
}
kwargs = {'file': """[project]
\tdescription = "My awesome project"
[access "refs/*"]
\tread = group coders
\tlabel-Code-Review = -2..+2 group coders
[plugin "reviewers-by-blame"]
\tmaxReviewers = 2
\tignoreDrafts = true
\tignoreSubjectRegEx = WIP(.*)
""",
'groups': ['mygid'],
'name': 'myacl'}
o = ACLOps(None, new)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 0)
def test_extra_validations(self):
kwargs = {'file': 'invalid ACLs !',
'groups': [],
'name': 'myacl'}
o = ACLOps(None, None)
logs = o.extra_validations(**kwargs)
self.assertTrue(logs[0].startswith(
"File contains no section headers."))
self.assertEqual(len(logs), 1)
kwargs = {'file': '',
'groups': [],
'name': 'myacl'}
o = ACLOps(None, None)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 0)
new = {
'resources': {
'groups': {
'mygid': {
'name': 'coders',
'namespace': '',
'members': ['<EMAIL>'],
}
}
}
}
kwargs = {'file': """[project]
\tdescription = "My awesome project"
[access "refs/*"]
\tread = coders
""",
'groups': ['mygid'],
'name': 'myacl'}
o = ACLOps(None, new)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 1)
self.assertIn('ACLs file section (access "refs/*"), key '
'(read) expects a group to be specified (not: coders)',
logs)
kwargs = {'file': """[project]
\tdescription = "My awesome project"
[access "refs/*"]
\tlabel-Code-Review = -2..+2 coders
""",
'groups': ['mygid'],
'name': 'myacl'}
o = ACLOps(None, new)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 1)
self.assertIn('ACLs file section (access "refs/*"), key '
'(label-Code-Review) expects a note rule and '
'a group to be specified (not: -2..+2 coders)',
logs)
kwargs = {'file': """[project]
\tdescription = "My awesome project"
[access "refs/*"]
\tread = group coders
\tlabel-Code-Review = -2..+2 group coders
""",
'groups': ['mygid'],
'name': 'myacl'}
o = ACLOps(None, new)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 0)
new = {
'resources': {
'groups': {
'mygid': {
'name': 'pchitt',
'namespace': '',
'members': ['<EMAIL>'],
}
}
}
}
o = ACLOps(None, new)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 2)
self.assertIn('ACLs file section (access "refs/*"), key '
'(read) relies on an unknown group name: coders',
logs)
self.assertIn('ACLs file section (access "refs/*"), key '
'(label-Code-Review) relies on an unknown '
'group name: coders',
logs) | managesf/tests/test_resources_gitacls.py |
from unittest import TestCase
from managesf.model.yamlbkd.resources.gitacls import ACLOps
class TestACLOps(TestCase):
def test_gerrit_plugin_config(self):
"""Test that adding extra plugin config options in project.config
does not break validation"""
new = {
'resources': {
'groups': {
'mygid': {
'name': 'coders',
'namespace': '',
'members': ['<EMAIL>'],
}
}
}
}
kwargs = {'file': """[project]
\tdescription = "My awesome project"
[access "refs/*"]
\tread = group coders
\tlabel-Code-Review = -2..+2 group coders
[plugin "reviewers-by-blame"]
\tmaxReviewers = 2
\tignoreDrafts = true
\tignoreSubjectRegEx = WIP(.*)
""",
'groups': ['mygid'],
'name': 'myacl'}
o = ACLOps(None, new)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 0)
def test_extra_validations(self):
kwargs = {'file': 'invalid ACLs !',
'groups': [],
'name': 'myacl'}
o = ACLOps(None, None)
logs = o.extra_validations(**kwargs)
self.assertTrue(logs[0].startswith(
"File contains no section headers."))
self.assertEqual(len(logs), 1)
kwargs = {'file': '',
'groups': [],
'name': 'myacl'}
o = ACLOps(None, None)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 0)
new = {
'resources': {
'groups': {
'mygid': {
'name': 'coders',
'namespace': '',
'members': ['<EMAIL>'],
}
}
}
}
kwargs = {'file': """[project]
\tdescription = "My awesome project"
[access "refs/*"]
\tread = coders
""",
'groups': ['mygid'],
'name': 'myacl'}
o = ACLOps(None, new)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 1)
self.assertIn('ACLs file section (access "refs/*"), key '
'(read) expects a group to be specified (not: coders)',
logs)
kwargs = {'file': """[project]
\tdescription = "My awesome project"
[access "refs/*"]
\tlabel-Code-Review = -2..+2 coders
""",
'groups': ['mygid'],
'name': 'myacl'}
o = ACLOps(None, new)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 1)
self.assertIn('ACLs file section (access "refs/*"), key '
'(label-Code-Review) expects a note rule and '
'a group to be specified (not: -2..+2 coders)',
logs)
kwargs = {'file': """[project]
\tdescription = "My awesome project"
[access "refs/*"]
\tread = group coders
\tlabel-Code-Review = -2..+2 group coders
""",
'groups': ['mygid'],
'name': 'myacl'}
o = ACLOps(None, new)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 0)
new = {
'resources': {
'groups': {
'mygid': {
'name': 'pchitt',
'namespace': '',
'members': ['<EMAIL>'],
}
}
}
}
o = ACLOps(None, new)
logs = o.extra_validations(**kwargs)
self.assertEqual(len(logs), 2)
self.assertIn('ACLs file section (access "refs/*"), key '
'(read) relies on an unknown group name: coders',
logs)
self.assertIn('ACLs file section (access "refs/*"), key '
'(label-Code-Review) relies on an unknown '
'group name: coders',
logs) | 0.693992 | 0.305542 |
from PyQt5.QtCore import QRect, QCoreApplication, QMetaObject
from PyQt5.QtWidgets import QPushButton, QMainWindow, QWidget, QLabel, QVBoxLayout, QCheckBox, QAction, QMenuBar, QMenu, QHBoxLayout, QProgressBar
from PyQt5.QtGui import QIcon
class Ui_MainWindow(QMainWindow):
def __init__(self):
super(Ui_MainWindow, self).__init__()
self.setWindowIcon(QIcon('icon.ico'))
self.setFixedSize(481, 447)
def setupUi(self):
self.centralwidget = QWidget(self)
self.verticalLayoutWidget = QWidget(self.centralwidget)
self.verticalLayoutWidget.setGeometry(QRect(20, 50, 121, 271))
self.verticalLayout = QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.checkBox = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox)
self.checkBox_2 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_2)
self.checkBox_4 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_4)
self.checkBox_3 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_3)
self.checkBox_8 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_8)
self.checkBox_5 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_5)
self.checkBox_7 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_7)
self.checkBox_6 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_6)
self.checkBox_9 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_9)
self.verticalLayoutWidget_2 = QWidget(self.centralwidget)
self.verticalLayoutWidget_2.setGeometry(QRect(170, 50, 131, 271))
self.verticalLayout_2 = QVBoxLayout(self.verticalLayoutWidget_2)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.checkBox_10 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_10)
self.checkBox_11 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_11)
self.checkBox_12 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_12)
self.checkBox_13 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_13)
self.checkBox_14 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_14)
self.checkBox_15 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_15)
self.checkBox_16 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_16)
self.checkBox_17 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_17)
self.checkBox_18 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_18)
self.verticalLayoutWidget_3 = QWidget(self.centralwidget)
self.verticalLayoutWidget_3.setGeometry(QRect(330, 50, 131, 271))
self.verticalLayout_3 = QVBoxLayout(self.verticalLayoutWidget_3)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.checkBox_19 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_19)
self.checkBox_20 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_20)
self.checkBox_21 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_21)
self.checkBox_22 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_22)
self.checkBox_23 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_23)
self.checkBox_24 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_24)
self.checkBox_25 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_25)
self.checkBox_26 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_26)
self.checkBox_27 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_27)
self.label_note = QLabel(self.centralwidget)
self.label_note.setGeometry(QRect(20, 340, 350, 16))
self.horizontalLayoutWidget = QWidget(self.centralwidget)
self.horizontalLayoutWidget.setGeometry(QRect(379, 380, 81, 31))
self.horizontalLayout = QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.button_uninstall = QPushButton(self.horizontalLayoutWidget)
self.horizontalLayout.addWidget(self.button_uninstall)
self.label_info = QLabel(self.centralwidget)
self.label_info.setGeometry(QRect(20, 20, 331, 20))
self.progressbar = QProgressBar(self.centralwidget)
self.progressbar.setGeometry(QRect(20, 40, 175, 10))
self.progressbar.hide()
self.horizontalLayoutWidget_2 = QWidget(self.centralwidget)
self.horizontalLayoutWidget_2.setGeometry(QRect(20, 380, 160, 31))
self.horizontalLayout_2 = QHBoxLayout(self.horizontalLayoutWidget_2)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.button_select_all = QPushButton(self.horizontalLayoutWidget_2)
self.horizontalLayout_2.addWidget(self.button_select_all)
self.button_deselect_all = QPushButton(self.horizontalLayoutWidget_2)
self.horizontalLayout_2.addWidget(self.button_deselect_all)
self.setCentralWidget(self.centralwidget)
self.menubar = QMenuBar(self)
self.menubar.setGeometry(QRect(0, 0, 481, 21))
self.menuHelp = QMenu(self.menubar)
self.setMenuBar(self.menubar)
self.actionRefresh = QAction(self)
self.actionHomepage = QAction(self)
self.actionAbout = QAction(self)
self.actionQuit = QAction(self)
self.menuHelp.addAction(self.actionRefresh)
self.menuHelp.addAction(self.actionHomepage)
self.menuHelp.addAction(self.actionAbout)
self.menuHelp.addSeparator()
self.menuHelp.addAction(self.actionQuit)
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi()
QMetaObject.connectSlotsByName(self)
def retranslateUi(self):
_translate = QCoreApplication.translate
self.setWindowTitle(_translate("MainWindow", "PyDebloatX"))
self.menuHelp.setTitle(_translate("MainWindow", "&Help"))
self.actionRefresh.setText(_translate("MainWindow", "&Refresh"))
self.actionRefresh.setShortcut(_translate("MainWindow", "Ctrl+R"))
self.actionHomepage.setText(_translate("MainWindow", "&Github"))
self.actionHomepage.setShortcut(_translate("MainWindow", "Ctrl+G"))
self.actionAbout.setText(_translate("MainWindow", "&About"))
self.actionAbout.setShortcut(_translate("MainWindow", "Ctrl+A"))
self.actionQuit.setText(_translate("MainWindow", "&Quit"))
self.actionQuit.setShortcut(_translate("MainWindow", "Ctrl+Q"))
self.label_info.setText(_translate("MainWindow", "Updating list of installed apps..."))
self.checkBox.setText(_translate("MainWindow", "3D Viewer"))
self.checkBox_2.setText(_translate("MainWindow", "Alarms and Clock"))
self.checkBox_3.setText(_translate("MainWindow", "Calculator"))
self.checkBox_4.setText(_translate("MainWindow", "Calendar and Mail"))
self.checkBox_5.setText(_translate("MainWindow", "Camera"))
self.checkBox_6.setText(_translate("MainWindow", "Get Help"))
self.checkBox_7.setText(_translate("MainWindow", "Get Started"))
self.checkBox_8.setText(_translate("MainWindow", "Groove Music"))
self.checkBox_9.setText(_translate("MainWindow", "Maps"))
self.checkBox_10.setText(_translate("MainWindow", "Messaging"))
self.checkBox_11.setText(_translate("MainWindow", "Money"))
self.checkBox_12.setText(_translate("MainWindow", "Movies && TV"))
self.checkBox_13.setText(_translate("MainWindow", "News"))
self.checkBox_14.setText(_translate("MainWindow", "Office"))
self.checkBox_15.setText(_translate("MainWindow", "OneNote"))
self.checkBox_16.setText(_translate("MainWindow", "Paint 3D"))
self.checkBox_17.setText(_translate("MainWindow", "People"))
self.checkBox_18.setText(_translate("MainWindow", "Photos"))
self.checkBox_19.setText(_translate("MainWindow", "Skype"))
self.checkBox_20.setText(_translate("MainWindow", "Solitaire"))
self.checkBox_21.setText(_translate("MainWindow", "Sports"))
self.checkBox_22.setText(_translate("MainWindow", "Store"))
self.checkBox_23.setText(_translate("MainWindow", "Voice Recorder"))
self.checkBox_24.setText(_translate("MainWindow", "Weather"))
self.checkBox_25.setText(_translate("MainWindow", "Windows Feedback"))
self.checkBox_26.setText(_translate("MainWindow", "Xbox"))
self.checkBox_27.setText(_translate("MainWindow", "Your Phone"))
self.label_note.setText(_translate("MainWindow", "NOTE: Microsoft Edge and Cortana cannot be uninstalled using this GUI."))
self.button_select_all.setText(_translate("MainWindow", "Select All"))
self.button_deselect_all.setText(_translate("MainWindow", "Deselect All"))
self.button_uninstall.setText(_translate("MainWindow", "Uninstall")) | gui_main.py | from PyQt5.QtCore import QRect, QCoreApplication, QMetaObject
from PyQt5.QtWidgets import QPushButton, QMainWindow, QWidget, QLabel, QVBoxLayout, QCheckBox, QAction, QMenuBar, QMenu, QHBoxLayout, QProgressBar
from PyQt5.QtGui import QIcon
class Ui_MainWindow(QMainWindow):
def __init__(self):
super(Ui_MainWindow, self).__init__()
self.setWindowIcon(QIcon('icon.ico'))
self.setFixedSize(481, 447)
def setupUi(self):
self.centralwidget = QWidget(self)
self.verticalLayoutWidget = QWidget(self.centralwidget)
self.verticalLayoutWidget.setGeometry(QRect(20, 50, 121, 271))
self.verticalLayout = QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.checkBox = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox)
self.checkBox_2 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_2)
self.checkBox_4 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_4)
self.checkBox_3 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_3)
self.checkBox_8 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_8)
self.checkBox_5 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_5)
self.checkBox_7 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_7)
self.checkBox_6 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_6)
self.checkBox_9 = QCheckBox(self.verticalLayoutWidget)
self.verticalLayout.addWidget(self.checkBox_9)
self.verticalLayoutWidget_2 = QWidget(self.centralwidget)
self.verticalLayoutWidget_2.setGeometry(QRect(170, 50, 131, 271))
self.verticalLayout_2 = QVBoxLayout(self.verticalLayoutWidget_2)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.checkBox_10 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_10)
self.checkBox_11 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_11)
self.checkBox_12 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_12)
self.checkBox_13 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_13)
self.checkBox_14 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_14)
self.checkBox_15 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_15)
self.checkBox_16 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_16)
self.checkBox_17 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_17)
self.checkBox_18 = QCheckBox(self.verticalLayoutWidget_2)
self.verticalLayout_2.addWidget(self.checkBox_18)
self.verticalLayoutWidget_3 = QWidget(self.centralwidget)
self.verticalLayoutWidget_3.setGeometry(QRect(330, 50, 131, 271))
self.verticalLayout_3 = QVBoxLayout(self.verticalLayoutWidget_3)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.checkBox_19 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_19)
self.checkBox_20 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_20)
self.checkBox_21 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_21)
self.checkBox_22 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_22)
self.checkBox_23 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_23)
self.checkBox_24 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_24)
self.checkBox_25 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_25)
self.checkBox_26 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_26)
self.checkBox_27 = QCheckBox(self.verticalLayoutWidget_3)
self.verticalLayout_3.addWidget(self.checkBox_27)
self.label_note = QLabel(self.centralwidget)
self.label_note.setGeometry(QRect(20, 340, 350, 16))
self.horizontalLayoutWidget = QWidget(self.centralwidget)
self.horizontalLayoutWidget.setGeometry(QRect(379, 380, 81, 31))
self.horizontalLayout = QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.button_uninstall = QPushButton(self.horizontalLayoutWidget)
self.horizontalLayout.addWidget(self.button_uninstall)
self.label_info = QLabel(self.centralwidget)
self.label_info.setGeometry(QRect(20, 20, 331, 20))
self.progressbar = QProgressBar(self.centralwidget)
self.progressbar.setGeometry(QRect(20, 40, 175, 10))
self.progressbar.hide()
self.horizontalLayoutWidget_2 = QWidget(self.centralwidget)
self.horizontalLayoutWidget_2.setGeometry(QRect(20, 380, 160, 31))
self.horizontalLayout_2 = QHBoxLayout(self.horizontalLayoutWidget_2)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.button_select_all = QPushButton(self.horizontalLayoutWidget_2)
self.horizontalLayout_2.addWidget(self.button_select_all)
self.button_deselect_all = QPushButton(self.horizontalLayoutWidget_2)
self.horizontalLayout_2.addWidget(self.button_deselect_all)
self.setCentralWidget(self.centralwidget)
self.menubar = QMenuBar(self)
self.menubar.setGeometry(QRect(0, 0, 481, 21))
self.menuHelp = QMenu(self.menubar)
self.setMenuBar(self.menubar)
self.actionRefresh = QAction(self)
self.actionHomepage = QAction(self)
self.actionAbout = QAction(self)
self.actionQuit = QAction(self)
self.menuHelp.addAction(self.actionRefresh)
self.menuHelp.addAction(self.actionHomepage)
self.menuHelp.addAction(self.actionAbout)
self.menuHelp.addSeparator()
self.menuHelp.addAction(self.actionQuit)
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi()
QMetaObject.connectSlotsByName(self)
def retranslateUi(self):
_translate = QCoreApplication.translate
self.setWindowTitle(_translate("MainWindow", "PyDebloatX"))
self.menuHelp.setTitle(_translate("MainWindow", "&Help"))
self.actionRefresh.setText(_translate("MainWindow", "&Refresh"))
self.actionRefresh.setShortcut(_translate("MainWindow", "Ctrl+R"))
self.actionHomepage.setText(_translate("MainWindow", "&Github"))
self.actionHomepage.setShortcut(_translate("MainWindow", "Ctrl+G"))
self.actionAbout.setText(_translate("MainWindow", "&About"))
self.actionAbout.setShortcut(_translate("MainWindow", "Ctrl+A"))
self.actionQuit.setText(_translate("MainWindow", "&Quit"))
self.actionQuit.setShortcut(_translate("MainWindow", "Ctrl+Q"))
self.label_info.setText(_translate("MainWindow", "Updating list of installed apps..."))
self.checkBox.setText(_translate("MainWindow", "3D Viewer"))
self.checkBox_2.setText(_translate("MainWindow", "Alarms and Clock"))
self.checkBox_3.setText(_translate("MainWindow", "Calculator"))
self.checkBox_4.setText(_translate("MainWindow", "Calendar and Mail"))
self.checkBox_5.setText(_translate("MainWindow", "Camera"))
self.checkBox_6.setText(_translate("MainWindow", "Get Help"))
self.checkBox_7.setText(_translate("MainWindow", "Get Started"))
self.checkBox_8.setText(_translate("MainWindow", "Groove Music"))
self.checkBox_9.setText(_translate("MainWindow", "Maps"))
self.checkBox_10.setText(_translate("MainWindow", "Messaging"))
self.checkBox_11.setText(_translate("MainWindow", "Money"))
self.checkBox_12.setText(_translate("MainWindow", "Movies && TV"))
self.checkBox_13.setText(_translate("MainWindow", "News"))
self.checkBox_14.setText(_translate("MainWindow", "Office"))
self.checkBox_15.setText(_translate("MainWindow", "OneNote"))
self.checkBox_16.setText(_translate("MainWindow", "Paint 3D"))
self.checkBox_17.setText(_translate("MainWindow", "People"))
self.checkBox_18.setText(_translate("MainWindow", "Photos"))
self.checkBox_19.setText(_translate("MainWindow", "Skype"))
self.checkBox_20.setText(_translate("MainWindow", "Solitaire"))
self.checkBox_21.setText(_translate("MainWindow", "Sports"))
self.checkBox_22.setText(_translate("MainWindow", "Store"))
self.checkBox_23.setText(_translate("MainWindow", "Voice Recorder"))
self.checkBox_24.setText(_translate("MainWindow", "Weather"))
self.checkBox_25.setText(_translate("MainWindow", "Windows Feedback"))
self.checkBox_26.setText(_translate("MainWindow", "Xbox"))
self.checkBox_27.setText(_translate("MainWindow", "Your Phone"))
self.label_note.setText(_translate("MainWindow", "NOTE: Microsoft Edge and Cortana cannot be uninstalled using this GUI."))
self.button_select_all.setText(_translate("MainWindow", "Select All"))
self.button_deselect_all.setText(_translate("MainWindow", "Deselect All"))
self.button_uninstall.setText(_translate("MainWindow", "Uninstall")) | 0.390708 | 0.063106 |
"""Unit tests for the Bio.PDB.ResidueDepth module."""
import unittest
import warnings
from Bio.PDB import PDBParser, ResidueDepth
from Bio import MissingExternalDependencyError
from Bio.PDB.PDBExceptions import PDBConstructionWarning
from Bio._py3k import getoutput
msms_exe = None
try:
output = getoutput("msms -h")
if output.startswith("Usage : msms parameters"):
msms_exe = "msms"
except OSError:
pass
if not msms_exe:
raise MissingExternalDependencyError(
"Install MSMS if you want to use it in Biopython.")
class ResidueDepthTests(unittest.TestCase):
"""Test ResidueDepth module."""
def check_msms(self, prot_file, first_100_residues):
p = PDBParser()
with warnings.catch_warnings():
warnings.simplefilter("ignore", PDBConstructionWarning)
s = p.get_structure("X", prot_file)
model = s[0]
rd = ResidueDepth(model)
res_chain = ''
for item in rd.property_list[:100]:
res_chain = res_chain + item[0].get_resname()
self.assertEqual(res_chain, first_100_residues)
def test_ResidueDepth_2XHE(self):
self.check_msms('PDB/2XHE.pdb', 'HISMETSERLEULYSSERALAVALLYSTHRVALLEUTH'
'RASNSERLEUARGSERVALALAASPGLYGLYASPTRPL'
'YSVALLEUVALVALASPLYSPROALALEUARGMETILE'
'SERGLUCYSALAARGMETSERGLUILELEUASPLEUGL'
'YVALTHRVALVALGLUASPVALSERLYSGLNARGLYSV'
'ALLEUPROGLNPHEHISGLYVALTYRPHEILEGLUPRO'
'THRGLUGLUASNLEUASPTYRVALILEARGASPPHEAL'
'AASPARGTHRPROTHRTYRGLUALAALAHISLEU')
def test_ResidueDepth_2BEG(self):
self.check_msms('PDB/2BEG.pdb', 'LEUVALPHEPHEALAGLUASPVALGLYSERASNLYSGL'
'YALAILEILEGLYLEUMETVALGLYGLYVALVALILEA'
'LALEUVALPHEPHEALAGLUASPVALGLYSERASNLYS'
'GLYALAILEILEGLYLEUMETVALGLYGLYVALVALIL'
'EALALEUVALPHEPHEALAGLUASPVALGLYSERASNL'
'YSGLYALAILEILEGLYLEUMETVALGLYGLYVALVAL'
'ILEALALEUVALPHEPHEALAGLUASPVALGLYSERAS'
'NLYSGLYALAILEILEGLYLEUMETVALGLYGLY')
def test_ResidueDepth_1LCD(self):
self.check_msms('PDB/1LCD.pdb', 'METLYSPROVALTHRLEUTYRASPVALALAGLUTYRAL'
'AGLYVALSERTYRGLNTHRVALSERARGVALVALASNG'
'LNALASERHISVALSERALALYSTHRARGGLULYSVAL'
'GLUALAALAMETALAGLULEUASNTYRILEPROASNAR'
'G')
def test_ResidueDepth_1A8O(self):
self.check_msms('PDB/1A8O.pdb', 'MSEASPILEARGGLNGLYPROLYSGLUPROPHEARGAS'
'PTYRVALASPARGPHETYRLYSTHRLEUARGALAGLUG'
'LNALASERGLNGLUVALLYSASNTRPMSETHRGLUTHR'
'LEULEUVALGLNASNALAASNPROASPCYSLYSTHRIL'
'ELEULYSALALEUGLYPROGLYALATHRLEUGLUGLUM'
'SEMSETHRALACYSGLNGLY')
if __name__ == '__main__':
runner = unittest.TextTestRunner(verbosity=2)
unittest.main(testRunner=runner) | Tests/test_PDB_ResidueDepth.py | """Unit tests for the Bio.PDB.ResidueDepth module."""
import unittest
import warnings
from Bio.PDB import PDBParser, ResidueDepth
from Bio import MissingExternalDependencyError
from Bio.PDB.PDBExceptions import PDBConstructionWarning
from Bio._py3k import getoutput
msms_exe = None
try:
output = getoutput("msms -h")
if output.startswith("Usage : msms parameters"):
msms_exe = "msms"
except OSError:
pass
if not msms_exe:
raise MissingExternalDependencyError(
"Install MSMS if you want to use it in Biopython.")
class ResidueDepthTests(unittest.TestCase):
"""Test ResidueDepth module."""
def check_msms(self, prot_file, first_100_residues):
p = PDBParser()
with warnings.catch_warnings():
warnings.simplefilter("ignore", PDBConstructionWarning)
s = p.get_structure("X", prot_file)
model = s[0]
rd = ResidueDepth(model)
res_chain = ''
for item in rd.property_list[:100]:
res_chain = res_chain + item[0].get_resname()
self.assertEqual(res_chain, first_100_residues)
def test_ResidueDepth_2XHE(self):
self.check_msms('PDB/2XHE.pdb', 'HISMETSERLEULYSSERALAVALLYSTHRVALLEUTH'
'RASNSERLEUARGSERVALALAASPGLYGLYASPTRPL'
'YSVALLEUVALVALASPLYSPROALALEUARGMETILE'
'SERGLUCYSALAARGMETSERGLUILELEUASPLEUGL'
'YVALTHRVALVALGLUASPVALSERLYSGLNARGLYSV'
'ALLEUPROGLNPHEHISGLYVALTYRPHEILEGLUPRO'
'THRGLUGLUASNLEUASPTYRVALILEARGASPPHEAL'
'AASPARGTHRPROTHRTYRGLUALAALAHISLEU')
def test_ResidueDepth_2BEG(self):
self.check_msms('PDB/2BEG.pdb', 'LEUVALPHEPHEALAGLUASPVALGLYSERASNLYSGL'
'YALAILEILEGLYLEUMETVALGLYGLYVALVALILEA'
'LALEUVALPHEPHEALAGLUASPVALGLYSERASNLYS'
'GLYALAILEILEGLYLEUMETVALGLYGLYVALVALIL'
'EALALEUVALPHEPHEALAGLUASPVALGLYSERASNL'
'YSGLYALAILEILEGLYLEUMETVALGLYGLYVALVAL'
'ILEALALEUVALPHEPHEALAGLUASPVALGLYSERAS'
'NLYSGLYALAILEILEGLYLEUMETVALGLYGLY')
def test_ResidueDepth_1LCD(self):
self.check_msms('PDB/1LCD.pdb', 'METLYSPROVALTHRLEUTYRASPVALALAGLUTYRAL'
'AGLYVALSERTYRGLNTHRVALSERARGVALVALASNG'
'LNALASERHISVALSERALALYSTHRARGGLULYSVAL'
'GLUALAALAMETALAGLULEUASNTYRILEPROASNAR'
'G')
def test_ResidueDepth_1A8O(self):
self.check_msms('PDB/1A8O.pdb', 'MSEASPILEARGGLNGLYPROLYSGLUPROPHEARGAS'
'PTYRVALASPARGPHETYRLYSTHRLEUARGALAGLUG'
'LNALASERGLNGLUVALLYSASNTRPMSETHRGLUTHR'
'LEULEUVALGLNASNALAASNPROASPCYSLYSTHRIL'
'ELEULYSALALEUGLYPROGLYALATHRLEUGLUGLUM'
'SEMSETHRALACYSGLNGLY')
if __name__ == '__main__':
runner = unittest.TextTestRunner(verbosity=2)
unittest.main(testRunner=runner) | 0.606265 | 0.17892 |
import numpy as np
import matplotlib.pyplot as plt
x_data = np.array([35., 38., 31., 20., 22., 25., 17., 60., 8., 60.])
y_data = 2 * x_data + 50 + 5 * np.random.random(10)
bb = np.arange(0, 100, 1)
ww = np.arange(-5, 5, 0.1)
Z = np.zeros((len(bb), len(ww)))
# Create landscape
for i in range(len(bb)):
for j in range(len(ww)):
b = bb[i]
w = ww[j]
Z[j][i] = 0
for n in range(len(x_data)):
Z[j][i] += (1/2) * (w * x_data[n] + b - y_data[n]) ** 2
# Initial values for Gradient Descent
b = 0
w = 0
lr = 0.00015 # 0.00015741
iterations = 15000 # 8500
b_history = [b]
w_history = [w]
# Run Gradient Descent process
for i in range(iterations):
b_grad = 0.0
w_grad = 0.0
for n in range(len(x_data)):
loss = w * x_data[n] + b - y_data[n]
b_grad += loss
w_grad += loss * x_data[n]
b -= lr * b_grad
w -= lr * w_grad
b_history.append(b)
w_history.append(w)
# Get final values used for testing
b_final = b_history[len(b_history) - 1]
w_final = w_history[len(w_history) - 1]
# Plot figure
plt.figure(figsize = (8, 7))
# Gradient Descent process
plt.subplot(211)
plt.title('Gradient Descent', fontweight = 'bold')
plt.xlim(0, 99)
plt.xlabel('b', fontstyle = 'italic')
plt.ylim(-5, 4.9)
plt.ylabel('w', fontstyle = 'italic', rotation = 'horizontal')
plt.contourf(bb, ww, Z, 50, alpha = 0.5, cmap = plt.get_cmap('jet'))
plt.plot(b_history, w_history, 'o-', ms = 3, lw = 1.5, color = 'black')
plt.plot([b_final], [w_final], 'o-', ms=3, color='orange')
plt.annotate("b = " + str(round(b_final, 2)) + ", w = " + str(round(w_final, 2)), xy = (b_final, w_final), xytext = (b_final + 1, w_final + 0.2))
# Prediction testing
plt.subplot(212)
plt.title('Prediction vs Actual', fontweight = 'bold')
plt.xlim(0, 80)
plt.xlabel('x', fontstyle = 'italic')
plt.ylim(0, 250)
plt.ylabel('y', fontstyle = 'italic', rotation = 'horizontal')
plt.plot(x_data, y_data, 'o', ms = 3, color = 'black', label = 'Actual')
plt.plot(x_data, w * x_data + b, '+', lw = 0.5, ms = 3, color = 'red', label = 'Prediction')
plt.legend()
# Fit data to figure and display
plt.tight_layout()
plt.show() | machine-learning/linear-regression_gradient-descent.py | import numpy as np
import matplotlib.pyplot as plt
x_data = np.array([35., 38., 31., 20., 22., 25., 17., 60., 8., 60.])
y_data = 2 * x_data + 50 + 5 * np.random.random(10)
bb = np.arange(0, 100, 1)
ww = np.arange(-5, 5, 0.1)
Z = np.zeros((len(bb), len(ww)))
# Create landscape
for i in range(len(bb)):
for j in range(len(ww)):
b = bb[i]
w = ww[j]
Z[j][i] = 0
for n in range(len(x_data)):
Z[j][i] += (1/2) * (w * x_data[n] + b - y_data[n]) ** 2
# Initial values for Gradient Descent
b = 0
w = 0
lr = 0.00015 # 0.00015741
iterations = 15000 # 8500
b_history = [b]
w_history = [w]
# Run Gradient Descent process
for i in range(iterations):
b_grad = 0.0
w_grad = 0.0
for n in range(len(x_data)):
loss = w * x_data[n] + b - y_data[n]
b_grad += loss
w_grad += loss * x_data[n]
b -= lr * b_grad
w -= lr * w_grad
b_history.append(b)
w_history.append(w)
# Get final values used for testing
b_final = b_history[len(b_history) - 1]
w_final = w_history[len(w_history) - 1]
# Plot figure
plt.figure(figsize = (8, 7))
# Gradient Descent process
plt.subplot(211)
plt.title('Gradient Descent', fontweight = 'bold')
plt.xlim(0, 99)
plt.xlabel('b', fontstyle = 'italic')
plt.ylim(-5, 4.9)
plt.ylabel('w', fontstyle = 'italic', rotation = 'horizontal')
plt.contourf(bb, ww, Z, 50, alpha = 0.5, cmap = plt.get_cmap('jet'))
plt.plot(b_history, w_history, 'o-', ms = 3, lw = 1.5, color = 'black')
plt.plot([b_final], [w_final], 'o-', ms=3, color='orange')
plt.annotate("b = " + str(round(b_final, 2)) + ", w = " + str(round(w_final, 2)), xy = (b_final, w_final), xytext = (b_final + 1, w_final + 0.2))
# Prediction testing
plt.subplot(212)
plt.title('Prediction vs Actual', fontweight = 'bold')
plt.xlim(0, 80)
plt.xlabel('x', fontstyle = 'italic')
plt.ylim(0, 250)
plt.ylabel('y', fontstyle = 'italic', rotation = 'horizontal')
plt.plot(x_data, y_data, 'o', ms = 3, color = 'black', label = 'Actual')
plt.plot(x_data, w * x_data + b, '+', lw = 0.5, ms = 3, color = 'red', label = 'Prediction')
plt.legend()
# Fit data to figure and display
plt.tight_layout()
plt.show() | 0.555676 | 0.608739 |
import logging
from enum import Enum
from pathlib import Path
from typing import Callable, List, Literal, NamedTuple, Optional, Set
from pydantic import Extra
from pydantic.class_validators import root_validator, validator
from pydantic.fields import Field
from hydrolib.core.io.ini.io_models import Property, Section
from hydrolib.core.io.ini.models import DataBlockINIBasedModel, INIGeneral, INIModel
from hydrolib.core.io.ini.parser import Parser, ParserConfig
from hydrolib.core.io.ini.serializer import SerializerConfig, write_ini
from hydrolib.core.io.ini.util import get_enum_validator, get_from_subclass_defaults
logger = logging.getLogger(__name__)
class VerticalInterpolation(str, Enum):
linear = "linear"
log = "log"
block = "block"
class VerticalPositionType(str, Enum):
percentage_bed = "percBed"
z_bed = "ZBed"
class TimeInterpolation(str, Enum):
linear = "linear"
block_from = "blockFrom"
block_to = "blockTo"
class QuantityUnitPair(NamedTuple):
quantity: str
unit: str
def _to_properties(self):
yield Property(key="quantity", value=self.quantity)
yield Property(key="unit", value=self.unit)
class ForcingBase(DataBlockINIBasedModel):
_header: Literal["Forcing"] = "Forcing"
name: str = Field(alias="name")
function: str = Field(alias="function")
quantityunitpair: List[QuantityUnitPair]
def _exclude_fields(self) -> Set:
return {"quantityunitpair"}.union(super()._exclude_fields())
@classmethod
def _supports_comments(cls):
return True
@classmethod
def _duplicate_keys_as_list(cls):
return True
@root_validator(pre=True)
def _validate_quantityunitpair(cls, values):
quantityunitpairkey = "quantityunitpair"
if values.get(quantityunitpairkey) is not None:
return values
quantities = values.get("quantity")
if quantities is None:
raise ValueError("quantity is not provided")
units = values.get("unit")
if units is None:
raise ValueError("unit is not provided")
if isinstance(quantities, str) and isinstance(units, str):
values[quantityunitpairkey] = [(quantities, units)]
return values
if isinstance(quantities, list) and isinstance(units, list):
if not len(quantities) == len(units):
raise ValueError(
"Number of quantities should be equal to number of units"
)
values[quantityunitpairkey] = [
(quantity, unit) for quantity, unit in zip(quantities, units)
]
return values
raise ValueError("Number of quantities should be equal to number of units")
@validator("function", pre=True)
def _set_function(cls, value):
return get_from_subclass_defaults(ForcingBase, "function", value)
@classmethod
def validate(cls, v):
"""Try to iniatialize subclass based on the `function` field.
This field is compared to each `function` field of the derived models of `ForcingBase`.
The derived model with an equal function type will be initialized.
Raises:
ValueError: When the given type is not a known structure type.
"""
# should be replaced by discriminated unions once merged
# https://github.com/samuelcolvin/pydantic/pull/2336
if isinstance(v, dict):
for c in cls.__subclasses__():
if (
c.__fields__.get("function").default.lower()
== v.get("function", "").lower()
):
v = c(**v)
break
else:
raise ValueError(
f"Function of {cls.__name__} with name={v.get('name', '')} and function={v.get('function', '')} is not recognized."
)
return v
def _get_identifier(self, data: dict) -> Optional[str]:
return data.get("name")
def _to_section(self) -> Section:
section = super()._to_section()
for quantity in self.quantityunitpair:
for prop in quantity._to_properties():
section.content.append(prop)
return section
class Config:
extra = Extra.ignore
class TimeSeries(ForcingBase):
function: Literal["timeseries"] = "timeseries"
timeinterpolation: TimeInterpolation = Field(alias="timeInterpolation")
offset: float = Field(0.0, alias="offset")
factor: float = Field(1.0, alias="factor")
_timeinterpolation_validator = get_enum_validator(
"timeinterpolation", enum=TimeInterpolation
)
class Harmonic(ForcingBase):
function: Literal["harmonic"] = "harmonic"
factor: float = Field(1.0, alias="factor")
class Astronomic(ForcingBase):
function: Literal["astronomic"] = "astronomic"
factor: float = Field(1.0, alias="factor")
class HarmonicCorrection(ForcingBase):
function: Literal["harmonic-correction"] = "harmonic-correction"
class AstronomicCorrection(ForcingBase):
function: Literal["astronomic-correction"] = "astronomic-correction"
class T3D(ForcingBase):
function: Literal["t3d"] = "t3d"
offset: float = Field(0.0, alias="offset")
factor: float = Field(1.0, alias="factor")
verticalpositions: List[float] = Field(alias="verticalPositions")
verticalinterpolation: VerticalInterpolation = Field(alias="verticalInterpolation")
verticalpositiontype: VerticalPositionType = Field(alias="verticalPositionType")
_verticalinterpolation_validator = get_enum_validator(
"verticalinterpolation", enum=VerticalInterpolation
)
_verticalpositiontype_validator = get_enum_validator(
"verticalpositiontype", enum=VerticalPositionType
)
class QHTable(ForcingBase):
function: Literal["qhtable"] = "qhtable"
class Constant(ForcingBase):
function: Literal["constant"] = "constant"
offset: float = Field(0.0, alias="offset")
factor: float = Field(1.0, alias="factor")
class ForcingGeneral(INIGeneral):
fileversion: str = Field("1.01", alias="fileVersion")
filetype: Literal["boundConds"] = Field("boundConds", alias="fileType")
class ForcingModel(INIModel):
general: ForcingGeneral = ForcingGeneral()
forcing: List[ForcingBase] = []
@classmethod
def _ext(cls) -> str:
return ".bc"
@classmethod
def _filename(cls) -> str:
return "boundaryconditions"
@classmethod
def _get_parser(cls) -> Callable:
return cls.parse
@classmethod
def parse(cls, filepath: Path):
# It's odd to have to disable parsing something as comments
# but also need to pass it to the *flattener*.
# This method now only supports per model settings, not per section.
parser = Parser(ParserConfig(parse_datablocks=True, parse_comments=False))
with filepath.open() as f:
for line in f:
parser.feed_line(line)
return parser.finalize().flatten(True, False)
def _serialize(self, _: dict) -> None:
# We skip the passed dict for a better one.
config = SerializerConfig(section_indent=0, property_indent=4)
write_ini(self._resolved_filepath, self._to_document(), config=config) | hydrolib/core/io/bc/models.py | import logging
from enum import Enum
from pathlib import Path
from typing import Callable, List, Literal, NamedTuple, Optional, Set
from pydantic import Extra
from pydantic.class_validators import root_validator, validator
from pydantic.fields import Field
from hydrolib.core.io.ini.io_models import Property, Section
from hydrolib.core.io.ini.models import DataBlockINIBasedModel, INIGeneral, INIModel
from hydrolib.core.io.ini.parser import Parser, ParserConfig
from hydrolib.core.io.ini.serializer import SerializerConfig, write_ini
from hydrolib.core.io.ini.util import get_enum_validator, get_from_subclass_defaults
logger = logging.getLogger(__name__)
class VerticalInterpolation(str, Enum):
linear = "linear"
log = "log"
block = "block"
class VerticalPositionType(str, Enum):
percentage_bed = "percBed"
z_bed = "ZBed"
class TimeInterpolation(str, Enum):
linear = "linear"
block_from = "blockFrom"
block_to = "blockTo"
class QuantityUnitPair(NamedTuple):
quantity: str
unit: str
def _to_properties(self):
yield Property(key="quantity", value=self.quantity)
yield Property(key="unit", value=self.unit)
class ForcingBase(DataBlockINIBasedModel):
_header: Literal["Forcing"] = "Forcing"
name: str = Field(alias="name")
function: str = Field(alias="function")
quantityunitpair: List[QuantityUnitPair]
def _exclude_fields(self) -> Set:
return {"quantityunitpair"}.union(super()._exclude_fields())
@classmethod
def _supports_comments(cls):
return True
@classmethod
def _duplicate_keys_as_list(cls):
return True
@root_validator(pre=True)
def _validate_quantityunitpair(cls, values):
quantityunitpairkey = "quantityunitpair"
if values.get(quantityunitpairkey) is not None:
return values
quantities = values.get("quantity")
if quantities is None:
raise ValueError("quantity is not provided")
units = values.get("unit")
if units is None:
raise ValueError("unit is not provided")
if isinstance(quantities, str) and isinstance(units, str):
values[quantityunitpairkey] = [(quantities, units)]
return values
if isinstance(quantities, list) and isinstance(units, list):
if not len(quantities) == len(units):
raise ValueError(
"Number of quantities should be equal to number of units"
)
values[quantityunitpairkey] = [
(quantity, unit) for quantity, unit in zip(quantities, units)
]
return values
raise ValueError("Number of quantities should be equal to number of units")
@validator("function", pre=True)
def _set_function(cls, value):
return get_from_subclass_defaults(ForcingBase, "function", value)
@classmethod
def validate(cls, v):
"""Try to iniatialize subclass based on the `function` field.
This field is compared to each `function` field of the derived models of `ForcingBase`.
The derived model with an equal function type will be initialized.
Raises:
ValueError: When the given type is not a known structure type.
"""
# should be replaced by discriminated unions once merged
# https://github.com/samuelcolvin/pydantic/pull/2336
if isinstance(v, dict):
for c in cls.__subclasses__():
if (
c.__fields__.get("function").default.lower()
== v.get("function", "").lower()
):
v = c(**v)
break
else:
raise ValueError(
f"Function of {cls.__name__} with name={v.get('name', '')} and function={v.get('function', '')} is not recognized."
)
return v
def _get_identifier(self, data: dict) -> Optional[str]:
return data.get("name")
def _to_section(self) -> Section:
section = super()._to_section()
for quantity in self.quantityunitpair:
for prop in quantity._to_properties():
section.content.append(prop)
return section
class Config:
extra = Extra.ignore
class TimeSeries(ForcingBase):
function: Literal["timeseries"] = "timeseries"
timeinterpolation: TimeInterpolation = Field(alias="timeInterpolation")
offset: float = Field(0.0, alias="offset")
factor: float = Field(1.0, alias="factor")
_timeinterpolation_validator = get_enum_validator(
"timeinterpolation", enum=TimeInterpolation
)
class Harmonic(ForcingBase):
function: Literal["harmonic"] = "harmonic"
factor: float = Field(1.0, alias="factor")
class Astronomic(ForcingBase):
function: Literal["astronomic"] = "astronomic"
factor: float = Field(1.0, alias="factor")
class HarmonicCorrection(ForcingBase):
function: Literal["harmonic-correction"] = "harmonic-correction"
class AstronomicCorrection(ForcingBase):
function: Literal["astronomic-correction"] = "astronomic-correction"
class T3D(ForcingBase):
function: Literal["t3d"] = "t3d"
offset: float = Field(0.0, alias="offset")
factor: float = Field(1.0, alias="factor")
verticalpositions: List[float] = Field(alias="verticalPositions")
verticalinterpolation: VerticalInterpolation = Field(alias="verticalInterpolation")
verticalpositiontype: VerticalPositionType = Field(alias="verticalPositionType")
_verticalinterpolation_validator = get_enum_validator(
"verticalinterpolation", enum=VerticalInterpolation
)
_verticalpositiontype_validator = get_enum_validator(
"verticalpositiontype", enum=VerticalPositionType
)
class QHTable(ForcingBase):
function: Literal["qhtable"] = "qhtable"
class Constant(ForcingBase):
function: Literal["constant"] = "constant"
offset: float = Field(0.0, alias="offset")
factor: float = Field(1.0, alias="factor")
class ForcingGeneral(INIGeneral):
fileversion: str = Field("1.01", alias="fileVersion")
filetype: Literal["boundConds"] = Field("boundConds", alias="fileType")
class ForcingModel(INIModel):
general: ForcingGeneral = ForcingGeneral()
forcing: List[ForcingBase] = []
@classmethod
def _ext(cls) -> str:
return ".bc"
@classmethod
def _filename(cls) -> str:
return "boundaryconditions"
@classmethod
def _get_parser(cls) -> Callable:
return cls.parse
@classmethod
def parse(cls, filepath: Path):
# It's odd to have to disable parsing something as comments
# but also need to pass it to the *flattener*.
# This method now only supports per model settings, not per section.
parser = Parser(ParserConfig(parse_datablocks=True, parse_comments=False))
with filepath.open() as f:
for line in f:
parser.feed_line(line)
return parser.finalize().flatten(True, False)
def _serialize(self, _: dict) -> None:
# We skip the passed dict for a better one.
config = SerializerConfig(section_indent=0, property_indent=4)
write_ini(self._resolved_filepath, self._to_document(), config=config) | 0.905971 | 0.285746 |
def Breadth_First_Traversal(s, adj):
"""
Takes the starting node for breadth first traversal and the adjoint matrix
of the graph and returns the level of each node in the graph starting from
0 for the start node.
"""
level, parent = {}, {}
level[s] = 0
parent[s] = None
i = 1
frontier = [s]
while frontier:
next = []
for u in frontier:
for v in adj[u]:
if v not in level:
level[v] = i
parent[v] = u
next.append(v)
frontier = next
i += 1
return level
if __name__ == '__main__' :
from Graph import FormGraph
g = FormGraph("Graph")
g.add_node('S')
g.add_node('A')
g.add_node('B')
g.add_node('C')
g.add_node('D')
g.add_node('E')
g.add_node('F')
g.add_node('G')
#g.add_node('H')
g.add_edge('S', 'A', 0)
g.add_edge('S', 'C', 0)
g.add_edge('A', 'B', 0)
g.add_edge('C', 'D', 0)
g.add_edge('C', 'G', 0)
g.add_edge('D', 'E', 0)
g.add_edge('D', 'F', 0)
g.add_edge('D', 'G', 0)
g.add_edge('E', 'F', 0)
g.add_edge('F', 'G', 0)
adj = g.get_adj()
lvl = Breadth_First_Traversal('S', adj)
print('Nodes: {}'.format(g.get_nodes()))
print('Edges: {}'.format(g.get_edges()))
print('Graph\n{}'.format(g))
print('Adjoint Matrix of the graph: {}'.format(adj))
print('Levels of each node after breadth-first-traversal: {}'.format(lvl))
"""
OUTPUT
Nodes: ['S', 'A', 'B', 'C', 'D', 'E', 'F', 'G']
Edges: [('S', 'A'), ('A', 'S'), ('S', 'C'), ('C', 'S'), ('A', 'B'), ('B', 'A'),\
('C', 'D'), ('D', 'C'), ('C', 'G'), ('G', 'C'), ('D', 'E'), ('E', 'D'),\
('D', 'F'), ('F', 'D'), ('D', 'G'), ('G', 'D'), ('E', 'F'), ('F', 'E'),\
('F', 'G'), ('G', 'F')]
Graph
'S' -> ['A', 'C']
'A' -> ['S', 'B']
'B' -> ['A']
'C' -> ['S', 'D', 'G']
'D' -> ['C', 'E', 'F', 'G']
'E' -> ['D', 'F']
'F' -> ['D', 'E', 'G']
'G' -> ['C', 'D', 'F']
Adjoint Matrix of the graph: {'S': ['A', 'C'], 'A': ['S', 'B'], 'B': ['A'],\
'C': ['S', 'D', 'G'], 'D': ['C', 'E', 'F', 'G'], 'E': ['D', 'F'], \
'F': ['D', 'E', 'G'], 'G': ['C', 'D', 'F']}
Levels of each node after breadth-first-traversal: {'S': 0, 'A': 1, 'C': 1, \
'B': 2, 'D': 2, 'G': 2, 'E': 3, 'F': 3}""" | 6.006 Introduction to Algorithms/Breadth_First_Traversal.py | def Breadth_First_Traversal(s, adj):
"""
Takes the starting node for breadth first traversal and the adjoint matrix
of the graph and returns the level of each node in the graph starting from
0 for the start node.
"""
level, parent = {}, {}
level[s] = 0
parent[s] = None
i = 1
frontier = [s]
while frontier:
next = []
for u in frontier:
for v in adj[u]:
if v not in level:
level[v] = i
parent[v] = u
next.append(v)
frontier = next
i += 1
return level
if __name__ == '__main__' :
from Graph import FormGraph
g = FormGraph("Graph")
g.add_node('S')
g.add_node('A')
g.add_node('B')
g.add_node('C')
g.add_node('D')
g.add_node('E')
g.add_node('F')
g.add_node('G')
#g.add_node('H')
g.add_edge('S', 'A', 0)
g.add_edge('S', 'C', 0)
g.add_edge('A', 'B', 0)
g.add_edge('C', 'D', 0)
g.add_edge('C', 'G', 0)
g.add_edge('D', 'E', 0)
g.add_edge('D', 'F', 0)
g.add_edge('D', 'G', 0)
g.add_edge('E', 'F', 0)
g.add_edge('F', 'G', 0)
adj = g.get_adj()
lvl = Breadth_First_Traversal('S', adj)
print('Nodes: {}'.format(g.get_nodes()))
print('Edges: {}'.format(g.get_edges()))
print('Graph\n{}'.format(g))
print('Adjoint Matrix of the graph: {}'.format(adj))
print('Levels of each node after breadth-first-traversal: {}'.format(lvl))
"""
OUTPUT
Nodes: ['S', 'A', 'B', 'C', 'D', 'E', 'F', 'G']
Edges: [('S', 'A'), ('A', 'S'), ('S', 'C'), ('C', 'S'), ('A', 'B'), ('B', 'A'),\
('C', 'D'), ('D', 'C'), ('C', 'G'), ('G', 'C'), ('D', 'E'), ('E', 'D'),\
('D', 'F'), ('F', 'D'), ('D', 'G'), ('G', 'D'), ('E', 'F'), ('F', 'E'),\
('F', 'G'), ('G', 'F')]
Graph
'S' -> ['A', 'C']
'A' -> ['S', 'B']
'B' -> ['A']
'C' -> ['S', 'D', 'G']
'D' -> ['C', 'E', 'F', 'G']
'E' -> ['D', 'F']
'F' -> ['D', 'E', 'G']
'G' -> ['C', 'D', 'F']
Adjoint Matrix of the graph: {'S': ['A', 'C'], 'A': ['S', 'B'], 'B': ['A'],\
'C': ['S', 'D', 'G'], 'D': ['C', 'E', 'F', 'G'], 'E': ['D', 'F'], \
'F': ['D', 'E', 'G'], 'G': ['C', 'D', 'F']}
Levels of each node after breadth-first-traversal: {'S': 0, 'A': 1, 'C': 1, \
'B': 2, 'D': 2, 'G': 2, 'E': 3, 'F': 3}""" | 0.59749 | 0.530054 |
from collections import Counter, Iterator, namedtuple
from copy import copy
from functools import wraps
from operator import methodcaller
from os.path import isfile
from pprint import pprint
from time import clock
from .base import (PYTHON3, Frame, LogErr, LogInfo, Matrix, Series,
SeriesSet, auto_plus_one, is_iter, is_seq, is_str,
map, pickle, range, zip)
from .io import (parse_addr, parse_db, parse_excel, parse_html,
parse_mysql_server, parse_sav, parse_sql, write_db,
write_html, write_sql, write_txt, write_xls)
__all__ = ['DataSet']
SHOW_LOG = True
def timer(func):
@wraps(func)
def timer_func(self, *args, **kwrds):
start = clock()
ret = func(self, *args, **kwrds)
if self.logging is True:
name, spent = func.__name__, clock() - start
LogInfo('%s() in %.3fs.' % (name, spent))
return ret
return timer_func
def operater(callfunc):
callfunc = getattr(SeriesSet, callfunc.__name__)
@wraps(callfunc)
def operate_func(self, *args, **kwrds):
ret_set = DataSet()
for name, sheet in zip(self._sheets, self._data):
ret = callfunc(sheet, *args, **kwrds)
if isinstance(ret, (SeriesSet, Series, list, tuple)):
ret_set._add(ret, name)
elif isinstance(ret, (dict, Counter)):
for name_, ret_ in ret.items():
ret_set._add(ret_, name_)
return ret_set
return operate_func
class DataSet(object):
'''A easy-to-use functional data structure similar to MySQL database
DataSet is one of the fundamantal data structure in DaPy.
It supports users easily to opearte any data structure in
a same way with Pythonic Syntax. Additionally, it has
logging function.
Attrbutes
---------
data : list
a list stored all the sheets inside.
sheets : list
a list stored all the names of each sheet.
types : list
the list stored all the type of each sheet.
Examples
--------
>>> import DaPy as dp
>>> data = dp.DataSet([[1, 2, 3], [2, 3, 4]])
>>> data.tocol()
>>> data
sheet:sheet0
============
Col_0: <1, 2>
Col_1: <2, 3>
Col_2: <3, 4>
>>> data.info
sheet:sheet0
============
1. Structure: DaPy.SeriesSet
2. Dimensions: Ln=2 | Col=3
3. Miss Value: 0 elements
4. Describe:
Title | Miss | Min | Max | Mean | Std |Dtype
-------+------+-----+-----+------+------+-----
Col_0 | 0 | 1 | 2 | 1.50 | 0.71 | list
Col_1 | 0 | 2 | 3 | 2.50 | 0.71 | list
Col_2 | 0 | 3 | 4 | 3.50 | 0.71 | list
==============================================
'''
__all__ = ['data', 'columns', 'sheets','info', 'add', 'append', 'append_col', 'info',
'count', 'count_element', 'pop_miss_value', 'size', 'shape',
'extend', 'insert', 'insert_col', 'pick', 'pop', 'pop_col',
'normalized', 'read', 'reverse', 'replace', 'shuffles','corr',
'sort', 'save', 'tomat', 'toframe', 'tocol', 'show', 'log']
def __init__(self, obj=None, sheet='sheet0', log=SHOW_LOG):
'''
Parameter
---------
obj : array-like (default=None)
initialized your data from a data structure, such as dict(), list()
Frame(), SeriesSet(), Matrix(), DataSet().
sheet : str (default='sheet0')
the name of first sheet inside.
log : bool (default=True)
show the time consuming for each operation
'''
self.logging = log
if obj is None:
self._data = []
self._sheets = []
self._types = []
elif (not is_iter(obj)) and not isinstance(obj, str):
raise TypeError('DataSet can not store this object.')
elif isinstance(obj, DataSet):
self._data = copy(obj._data)
self._sheets = copy(obj._sheets)
self._types = copy(obj._types)
elif isinstance(obj, (Matrix, SeriesSet, Frame)):
self._data = [obj, ]
self._sheets = [str(sheet), ]
self._types = [type(sheet), ]
elif isinstance(sheet, str):
self._data = [obj, ]
self._sheets = [str(sheet), ]
self._types = [type(obj), ]
else:
self._data = list(obj)
self._sheets = list(map(str, sheet))
self._types = list(map(type, self._data))
if len(set(self._sheets)) != len(self._data):
raise ValueError("the number of sheets' names do not enough.")
@property
def data(self):
if len(self._data) == 1:
return self._data[0]
return self._data
@property
def columns(self):
'''names of columns of each table'''
if len(self._data) > 1:
new_ = list()
for i, data in enumerate(self._data):
if hasattr(data, 'columns'):
new_.append([self._sheets[i]] + data.columns)
else:
new_.append([self._sheets[i], None])
new_title = ['sheet name']
new_title.extend(['title_%d'%i for i in range(1, len(max(new_, key=len)))])
return SeriesSet(new_, new_title)
if len(self._data) == 1:
if hasattr(self._data[0], 'columns'):
return self._data[0].columns
return None
@property
def logging(self):
return self._log
@logging.setter
def logging(self, value):
if value is not True:
self._log = False
else:
self._log = True
@property
def level(self):
return len(self._data)
@columns.setter
def columns(self, value):
for data in self._data:
if hasattr(data, 'columns'):
data.columns = value
@property
def sheets(self):
return self._sheets
@sheets.setter
def sheets(self, other):
if isinstance(other, str):
self._sheets = [self._check_sheet_new_name(other) for i in range(len(self._sheets))]
elif is_iter(other):
if len(set(other)) == len(self._sheets):
self._sheets = []
self._sheets = [self._check_sheet_new_name(item) for item in other]
else:
raise ValueError('the names size does not match the size of '+\
'sheets inside the DataSet')
else:
raise ValueError('unrecognized symbol as %s'%other)
@property
def shape(self):
temp = SeriesSet(None, ['Level', 'Sheet', 'Ln', 'Col'], nan='-')
for i, (sheet, data) in enumerate(zip(self._sheets, self._data)):
if hasattr(data, 'shape'):
temp.append([i, sheet] + list(data.shape))
else:
temp.append((i, sheet, len(data)))
return temp
@property
def info(self):
for i, data in enumerate(self._data):
print('sheet:' + self._sheets[i])
print('=' * (len(self._sheets[i]) + 6))
if isinstance(data, (Frame, SeriesSet)):
data.info
else:
print('%s has no info() function'%type(data))
return None
def __getattr__(self, name):
if name in self._sheets:
return self.__getitem__(name)
temp = DataSet()
for sheet, data in zip(self._sheets, self._data):
if hasattr(data, name) or\
(hasattr(data, 'columns') and name in data.columns):
attr = methodcaller(name)
try:
temp._add(attr(data), sheet)
except TypeError:
temp._add(getattr(data, name), sheet)
assert temp.level != 0, "DataSet has no sheet `%s`'" % name
return temp
def _check_col_ind_str(self, ind):
assert ind in self._sheets, "'%s' is not a sheet name" % ind
return self._sheets.index(ind)
def _check_col_ind_int(self, ind):
if ind < 0:
sheet += self.level - 1
assert 0 <= ind < self.level, "'%s' is not exist." % ind
return ind
def _check_sheet_new_name(self, new_name):
new_name = str(new_name)
if not new_name:
return self._check_sheet_new_name('sheet_%d' % len(self._sheets))
if new_name not in self._sheets:
return new_name
return auto_plus_one(self._sheets, new_name)
def _check_sheet_index_slice(self, i, j):
if is_str(i) or is_str(j):
if i is not None:
i = self._check_col_ind_str(i)
if j is not None:
j = self._check_col_ind_int(j)
i = self._check_col_ind_int(i) + 1
j = self._check_col_ind_int(j) + 1
return range(len(self._sheets))[slice(i, j)]
def _check_sheet_index(self, sheet):
'''return a list of sheet indexes'''
if sheet is None:
return range(len(self._data))
if is_str(sheet):
return [self._check_col_ind_str(sheet)]
if isinstance(sheet, slice):
return self._check_sheet_index_slice(sheet.start, sheet.stop)
if isinstance(sheet, int):
return [self._check_col_ind_int(sheet)]
if isinstance(sheet, (list, tuple)):
return [self._check_sheet_index(_) for _ in sheet]
def __getstate__(self):
toreturn = self.__dict__.copy()
for key in toreturn:
if key not in ('_data', '_sheets', '_types'):
del toreturn[key]
return toreturn
def __setstate__(self, arg):
self._data = arg['_data']
self._sheets = arg['_sheets']
self._types = arg['_types']
def __contains__(self, e):
'''__contains__(e) -> e in DataSet
Determind that weather the object is a sheet name inside.
'''
if isinstance(e, str):
return e in self._sheets
return any([e == data for data in self._data])
def __repr__(self):
if len(self._data) == 0:
return 'empty DataSet object'
reprs = ''
for i, data in enumerate(self._data):
reprs += 'sheet:' + self._sheets[i] + '\n'
reprs += '=' * (len(self._sheets[i]) + 6) + '\n'
reprs += data.__repr__() + '\n\n'
return reprs[:-2]
def __len__(self):
if len(self._data) == 1:
if hasattr(self._data[0], 'shape'):
return self._data[0].shape[0]
return len(self._data[0])
return len(self._data)
def __getitem__(self, key):
if len(self._data) == 1 and (key not in self._sheets):
return DataSet(self._data[0][key], self._sheets[0])
if isinstance(key, slice):
return self.__getslice__(key.start, key.stop)
def __getslice__(self, i, j):
return DataSet([_[i:j] for _ in self._data], self._sheets)
def __setitem__(self, key, val):
if len(self._data) == 1 and key not in self._sheets:
self._data[0].__setitem__(key, val)
return
if is_str(key):
if isinstance(val, DataSet):
for src, title in zip(val._data, val._sheets):
self._data.append(src)
self._types.append(type(src))
new_key = '%s_%s' % (key, title)
self._sheets.append(self._check_sheet_new_name(new_key))
return
if key not in self._sheets:
self._data.append(val)
self._types.append(type(val))
self._sheets.append(self._check_sheet_new_name(key))
return
if key in self._sheets:
key = self._sheets.index(key)
self._data[key] = val
self._types[key] = val
return
if isinstance(key, int):
assert abs(key) <= len(self._data), 'set index out of range'
self._data[key] = val
self._types[key] = type(val)
def __delslice__(self, start, stop):
if start not in self._sheets and stop not in self._sheets:
for data in self._data:
del data[start: stop]
return
start, stop = self._slice2int(start, stop)
del self._data[start: stop + 1]
def __delitem__(self, key):
if isinstance(key, slice):
self.__delslice__(key.start, key.stop)
elif key in self._sheets:
index = self._sheets.index(key)
del self._sheets[index], self._data[index], self._types[index]
elif isinstance(key, tuple):
for obj in key:
self.__delitem__(obj)
else:
for data in self._data:
data.__delitem__(key)
def __iter__(self):
if len(self._data) == 1:
for item in self._data[0]:
yield item
else:
for item in self._data:
yield item
def __reversed__(self):
if len(self._data) == 1:
self._data[0].reverse()
else:
self._data.reverse()
def _add(self, item, name):
if isinstance(item, DataSet):
name = '' if not name else name + '_'
new_sheets = [self._check_sheet_new_name(name + new) \
for new in item.sheets]
self._data.extend(item._data)
self._sheets.extend(new_sheets)
self._types.extend(item._types)
else:
self._data.append(item)
self._types.append(type(item))
self._sheets.append(self._check_sheet_new_name(name))
@timer
def add(self, items, names=None):
''' add a new sheet to the current dataset
Parameter
---------
item : object
the new sheet object
name : str or None ( default=None)
the new sheet name
Example
-------
>>> import DaPy as dp
>>> data2 = dp.DataSet([[1, 1, 1], [1, 1, 1]])
>>> data2
sheet:sheet0
============
Col_0 | Col_1 | Col_2
-------+-------+-------
1 | 1 | 1
1 | 1 | 1
>>> data.add(data2)
>>> data
sheet:sheet0
============
Col_0: <1, 2>
Col_1: <2, 3>
Col_2: <3, 4>
sheet:sheet0
============
Col_0 | Col_1 | Col_2
-------+-------+-------
1 | 1 | 1
1 | 1 | 1
'''
if not is_seq(items):
items = (items,)
if not is_seq(names):
names = (names,)
for item, name in zip(items, names):
self._add(item, name)
@timer
@operater
def apply(self, func, col=None, axis=0, *args, **kwrds):
pass
@timer
@operater
def append_row(self, item):
pass
@timer
@operater
def append_col(self, series, variable_name=None):
pass
@timer
@operater
def corr(self, method='pearson', col=None):
pass
@timer
@operater
def count(self, value, col=None, row=None):
pass
@timer
@operater
def copy(self):
pass
@timer
@operater
def count_values(self, col=None):
pass
@timer
@operater
def set_index(self, column):
pass
@timer
@operater
def get(self, key, default):
pass
def get_tables(self, cols=None):
key = self._check_sheet_index(cols)
title = [self._sheets[_] for _ in key]
src = [self._data[_] for _ in key]
return DataSet(src, title)
@timer
@operater
def get_best_features(self, method='variance', X=None, Y=None, top_k=1, inplace=False):
pass
@timer
@operater
def get_categories(self, cols, cut_points, group_name, boundary=(False, True), inplace=False):
pass
@timer
@operater
def get_date_label(self, cols, daytime=True,
weekend=True, season=True, inplace=False):
pass
@timer
@operater
def get_interactions(self, n_power=3, cols=None, inplace=False):
pass
@timer
@operater
def get_ranks(self, cols=None, duplicate='mean', inplace=False):
pass
@timer
@operater
def get_dummies(self, col=None, value=1, inplace=False):
pass
@timer
@operater
def get_nan_instrument(cols=None, inplace=False):
pass
@timer
@operater
def get_numeric_label(self, cols=None, inplace=False):
pass
@timer
@operater
def groupby(self, keys, func=None, apply_col=None, unapply_col=None):
pass
@timer
@operater
def insert_row(self, index, item):
pass
@timer
@operater
def insert_col(self, index, series, variable_name=None):
pass
@timer
@operater
def dropna(self, axis=0, how='any', inplace=False):
pass
@timer
@operater
def select(self, where, col=None, limit=1000):
pass
@timer
@operater
def pop(self, index=-1, aixs=0):
pass
@timer
@operater
def pop_row(self, index=-1):
pass
@timer
@operater
def pop_col(self, col='all'):
pass
@timer
@operater
def query(self, expression, col=None, limit=1000):
pass
@timer
@operater
def extend(self, other):
pass
@timer
@operater
def join(self, other):
pass
@timer
@operater
def normalized(self, process='NORMAL', col=None, **kwrds):
pass
@timer
@operater
def map(self, func, cols=None, inplace=False):
pass
@timer
@operater
def merge(self, other, self_key=0, other_key=0, keep_key=True, keep_same=True):
pass
@timer
@operater
def drop(self, index=-1, axis=0, inplace=False):
pass
@timer
@operater
def drop_row(self, index=-1, axis=0, inplace=False):
pass
@timer
@operater
def drop_col(self, index=-1, axis=0, inplace=False):
pass
@timer
@operater
def fillna(self, fill_with=None, col=None, method=None, limit=None):
pass
@timer
def read(self, addr, dtype='col', **kwrd):
'''This function could be used with loading data from a file and
transform it into one of DaPy data structure.
Parameters
----------
addr : str
the address of data file or a statement like:
"mysql://[username]:[password]@[server_ip]:[server_port]/[database_name]/[table1]/[table2]..."
to access a mysql database. Attention, if `table` keyword is missing
in this address, all records will be loaded.
ftype : str (default=None)
the file type of this address
`None` -> automtotally analysis the file type
"web" -> a website address, it will use requests.get to load the website
then use bs4.BeautifulSoup to find <table> tag in the file.
"html" -> a local html file
"db" -> SQLite3 database file
"sav" -> SPSS data file
"xls" -> Excel data file
"csv" -> Text file with ',' as delimeters
"txt" -> Text file with ' ' as delimeters
"pkl" -> Python pickle file
"sql" -> MySQL database commands file
"mysql" -> MySQL database Server
sheet_name : str (default=None)
the sheet name of new table.
miss_symbol : str or str in list (default=['?', '??', '', ' ', 'NA', 'None'])
the miss value symbol in this data file.
nan : value (default=nan)
the miss value symbol in your new data set.
first_line : int (default=1)
the first line which includes data values in this file.
title_line : int (default=0)
the line which includes your data's column names.
tip: if there is no title in your data, used -1 represented,
and, it will automatic create it.
sep : str (default=",")
the delimiter symbol inside.
dtypes : type name in str or dict of columns (default=None):
DaPy autometally transfers str source text into the most
suitable data type in efficiency. However, some of process costs
long time. For example, "2018-1-1" is a datetime label and
DaPy spends a long time time to transfer this label into datetime.
Thus, in some cases, you don't need it in datetime, so just set this column
type into "str" to save time. The supported data types are "int",
"float", "str", "datetime" and "bool".
use this keyword as following samples
>>> read("addr.csv", dtypes={'A_col': int, 'B_col': float})
>>> read("addr.csv", dtypes="float")
>>> read("addr.csv", dtypes=["float", "int"])
Examples
--------
>>> import DaPy as dp
>>> data = dp.read('your_data_file.csv')
>>> data.read('another_data_file.xlsx')
'''
nan = kwrd.get('nan', float('nan'))
sheet_name = kwrd.get('sheet_name', None)
miss_symbol = kwrd.get('miss_symbol', set(['?', '??', '', ' ', 'NA', 'None']))
fpath, fname, fbase, ftype = parse_addr(addr)
ftype = kwrd.get('ftype', ftype)
assert ftype in ('web', 'html', 'htm', 'db', 'sav', 'xls', 'xlsx', 'csv', 'txt', 'pkl', 'sql', 'mysql')
if ftype not in ('web', 'html', 'htm', 'mysql') and not isfile(addr):
raise IOError('can not find the target file or auto analysis data source type failed')
if sheet_name is None:
sheet_name = fbase
if ftype == 'db':
try:
import sqlite3 as sql3
except ImportError:
raise ImportError('DaPy uses "sqlite3" to access a database local file.')
with sql3.connect(addr) as conn:
cur = conn.cursor()
for sheet, name in parse_db(cur, dtype, nan):
self._add(sheet, name)
elif ftype == 'sav':
try:
import savReaderWriter
except ImportError:
raise ImportError('DaPy uses "savReaderWriter" to open a .sav file, '+\
'please try command: pip install savReaderWriter.')
with savReaderWriter.SavReader(addr) as reader:
self._add(parse_sav(reader, dtype, nan), sheet_name)
elif ftype == 'xls' or ftype == 'xlsx':
first_line = kwrd.get('first_line', 1)
title_line = kwrd.get('title_line', 0)
for sheet, name in parse_excel(dtype, addr, first_line, title_line, nan):
self._add(sheet, name)
elif ftype in ('txt', 'csv'):
kwrd['sep'] = kwrd.get('sep', {'csv':',', 'txt':'\t'}[ftype])
dtype_dic = {'COL': SeriesSet, 'SERIESSET': SeriesSet,
'MATRIX': Matrix, 'MAT': Matrix}
dtype = dtype_dic.get(dtype.upper(), SeriesSet)
self._add(dtype.from_file(addr, **kwrd), sheet_name)
elif ftype == 'pkl':
self._add(pickle.load(open(addr, 'rb')), sheet_name)
elif ftype in ('html', 'htm', 'web'):
if ftype == 'web':
try:
from requests import get
except ImportError:
raise ImportError('DaPy uses "reqeusts" to load a website.')
else:
text = get(addr).text
else:
with open(addr) as doc:
text = doc.read()
assert '<table' in text, 'there is no tag <table> in the html file.'
for sheet, name in parse_html(text, dtype, miss_symbol, nan, sheet_name):
self._add(sheet, name)
return self
elif ftype == 'mysql':
user, psd = fpath.split(':')
host, port = fbase.split(':')
try:
import pymysql as sql
except ImportError:
try:
import MySQLdb as sql
except ImportError:
raise ImportError('DaPy uses "pymysql" or "MySQLdb" libraries to access a database server.')
with sql.connect(host=host, port=int(port), user=user, passwd=psd, db=fname[0], charset='utf8') as cur:
for sheet, name in parse_mysql_server(cur, fname):
self._add(sheet, name)
elif ftype == 'sql':
with open(addr) as doc:
for sheet, name in parse_sql(doc, nan):
self._add(sheet, name)
return self
else:
raise ValueError('DaPy singly supports file types as'+\
'(xls, xlsx, csv, txt, pkl, db, sav, html, htm).')
@timer
@operater
def reshape(self, nshape):
pass
@timer
def reverse(self, axis='sheet'):
'''Reverse your data set or records.
Parameters
----------
axis : str (default='sheet')
settle down reverse sheets or records in each sheet.
Example
-------
>>> import DaPy as dp
>>> data = dp.DataSet([[1,2,3,4],
[2,3,4,5],
[3,4,5,6],
[4,5,6,7],
[5,6,7,8]])
>>> data.tocol()
>>> data.reverse()
'''
if axis.upper() == 'SHEET':
self._data.reverse()
self._sheets.reverse()
self._types.reverse()
return
if axis.upper() == 'RECORD':
for data in self._data:
if hasattr(data, 'reverse'):
data.reverse(axis)
return
raise AttributeError('axis should be "sheet" or "record"')
@timer
@operater
def replace(self, old, new, col=None, regex=False, sheet=None):
pass
@timer
@operater
def shuffle(self):
pass
@timer
@operater
def sort(self, *orders):
pass
@timer
def save(self, addr, **kwrds):
'''Save the DataSet to a file.
Parameters
----------
addr : str
the output file address.
encode : str (default='utf-8')
saving the file in such code type
ftype : str
the file type you want to save as. Use the file type in
your address as default. For example, 'data.save("test.csv")'
means save this object into .csv type. DaPy supports
following file types since V1.5.1:
.csv, .txt, .xls, .pkl, .db, .html
newline : str (default='\n')
use this simble to mark change line.
delimiter : str (default=',')
use this simble to seperate a records.
if_exists : str (default='fail')
when saving the data into a exist database file, how to face the
delimma that the sheet name has been existed in the database.
'fail' -> raise an error;
'replace' -> replace the exist table with current data;
'append' -> append these records to the exist sheet
'
'''
fpath, fname, fbase, ftype = parse_addr(addr)
encode = kwrds.get('encode', 'utf-8')
ftype = kwrds.get('ftype', ftype)
if ftype in ('csv', 'txt'):
newline = kwrds.get('newline', '\n')
delimiter = kwrds.get('delimiter', ',')
para = dict(mode='w', buffering=2048)
if PYTHON3:
para['encoding'] = encode
para['file'] = addr
else:
para['name'] = addr
for data, sheet in zip(self._data, self._sheets):
if data is None:
continue
if len(self._data) > 1:
addr = fpath + fbase + '_' + sheet + '.' + ftype
f = open(**para)
try:
write_txt(f, data, newline, delimiter)
finally:
f.close()
elif ftype in ('xls', 'xlsx'):
try:
import xlwt
except ImportError:
raise ImportError('DaPy uses xlwt library to save a `xls/xlsx` file.')
workbook = xlwt.Workbook(encoding=encode)
for sheet, data in zip(self._sheets, self._data):
if not data:
continue
worksheet = workbook.add_sheet(sheet)
write_xls(worksheet, data)
workbook.save(addr)
elif ftype == 'pkl':
pickle.dump(self, open(addr, 'wb'))
elif ftype == 'db':
import sqlite3 as sql
with sql.connect(addr) as conn:
for data, sheet in zip(self._data, self._sheets):
write_db(conn.cursor(), sheet, data, kwrds.get('if_exists', 'fail'), 'sqlite3')
elif ftype == 'html':
with open(addr, 'w') as f:
for data, sheet in zip(self._data, self._sheets):
if not data:
continue
f.write('<table border="1" class="%s">' % sheet)
write_html(f, data)
f.write('</table>')
elif ftype == 'sql':
with open(addr, 'w') as doc:
for name, sheet in zip(self._sheets, self._data):
write_sql(doc, sheet, name)
elif ftype == 'mysql':
try:
import pymysql as sql
except ImportError:
try:
import MySQLdb as sql
except ImportError:
raise ImportError('DaPy uses "pymysql" or "MySQLdb" libraries to access a database server.')
user, psd = fpath.split(':')
host, port = fbase.split(':')
with sql.connect(host=host, port=int(port), user=user, passwd=<PASSWORD>, db=fname[0], charset='utf8') as conn:
for data, sheet in zip(self._data, self._sheets):
write_db(conn, sheet, data, kwrds.get('if_exists', 'fail'), 'mysql')
else:
raise ValueError('unrecognized file type')
@timer
@operater
def todict(self):
pass
@timer
def tocol(self):
'''Transform all of the stored data structure to DaPy.SeriesSet
'''
for i, data in enumerate(self._data):
if isinstance(data, SeriesSet):
continue
try:
if hasattr(data, 'columns'):
if hasattr(data, 'miss_symbol'):
self._data[i] = SeriesSet(data, list(data.columns),
miss_value=data.miss_symbol)
else:
self._data[i] = SeriesSet(data, data.columns)
else:
self._data[i] = SeriesSet(data)
except Exception as e:
LogErr('sheet[%s] can not transform to SeriesSet, ' % self._sheets[i] +\
'because: %s' % e)
self._types[i] = SeriesSet
@timer
def tomat(self):
'''Transform all of the stored data structure to DaPy.Matrix
'''
for i, data in enumerate(self._data):
if isinstance(data, Matrix):
continue
try:
self._data[i] = Matrix(data)
except:
LogErr('sheet:%s can not transform to Matrix.'%self._sheets[i])
self._types[i] = Matrix
@timer
@operater
def tolist(self):
pass
@timer
@operater
def toarray(self):
pass
def show(self, max_lines=None, max_display=75, max_col_size=25, multi_line=True):
'''show(lines=None) -> None
See Also
--------
DaPy.SeriesSet.show
'''
for i, data in enumerate(self._data):
print('sheet:' + self._sheets[i])
print('=' * (len(self._sheets[i]) + 6))
if hasattr(data, 'show'):
data.show(max_lines, max_display, max_col_size, multi_line)
else:
pprint(data.__repr__())
if __name__ == '__main__':
from doctest import testmod
testmod() | DaPy/core/DataSet.py | from collections import Counter, Iterator, namedtuple
from copy import copy
from functools import wraps
from operator import methodcaller
from os.path import isfile
from pprint import pprint
from time import clock
from .base import (PYTHON3, Frame, LogErr, LogInfo, Matrix, Series,
SeriesSet, auto_plus_one, is_iter, is_seq, is_str,
map, pickle, range, zip)
from .io import (parse_addr, parse_db, parse_excel, parse_html,
parse_mysql_server, parse_sav, parse_sql, write_db,
write_html, write_sql, write_txt, write_xls)
__all__ = ['DataSet']
SHOW_LOG = True
def timer(func):
@wraps(func)
def timer_func(self, *args, **kwrds):
start = clock()
ret = func(self, *args, **kwrds)
if self.logging is True:
name, spent = func.__name__, clock() - start
LogInfo('%s() in %.3fs.' % (name, spent))
return ret
return timer_func
def operater(callfunc):
callfunc = getattr(SeriesSet, callfunc.__name__)
@wraps(callfunc)
def operate_func(self, *args, **kwrds):
ret_set = DataSet()
for name, sheet in zip(self._sheets, self._data):
ret = callfunc(sheet, *args, **kwrds)
if isinstance(ret, (SeriesSet, Series, list, tuple)):
ret_set._add(ret, name)
elif isinstance(ret, (dict, Counter)):
for name_, ret_ in ret.items():
ret_set._add(ret_, name_)
return ret_set
return operate_func
class DataSet(object):
'''A easy-to-use functional data structure similar to MySQL database
DataSet is one of the fundamantal data structure in DaPy.
It supports users easily to opearte any data structure in
a same way with Pythonic Syntax. Additionally, it has
logging function.
Attrbutes
---------
data : list
a list stored all the sheets inside.
sheets : list
a list stored all the names of each sheet.
types : list
the list stored all the type of each sheet.
Examples
--------
>>> import DaPy as dp
>>> data = dp.DataSet([[1, 2, 3], [2, 3, 4]])
>>> data.tocol()
>>> data
sheet:sheet0
============
Col_0: <1, 2>
Col_1: <2, 3>
Col_2: <3, 4>
>>> data.info
sheet:sheet0
============
1. Structure: DaPy.SeriesSet
2. Dimensions: Ln=2 | Col=3
3. Miss Value: 0 elements
4. Describe:
Title | Miss | Min | Max | Mean | Std |Dtype
-------+------+-----+-----+------+------+-----
Col_0 | 0 | 1 | 2 | 1.50 | 0.71 | list
Col_1 | 0 | 2 | 3 | 2.50 | 0.71 | list
Col_2 | 0 | 3 | 4 | 3.50 | 0.71 | list
==============================================
'''
__all__ = ['data', 'columns', 'sheets','info', 'add', 'append', 'append_col', 'info',
'count', 'count_element', 'pop_miss_value', 'size', 'shape',
'extend', 'insert', 'insert_col', 'pick', 'pop', 'pop_col',
'normalized', 'read', 'reverse', 'replace', 'shuffles','corr',
'sort', 'save', 'tomat', 'toframe', 'tocol', 'show', 'log']
def __init__(self, obj=None, sheet='sheet0', log=SHOW_LOG):
'''
Parameter
---------
obj : array-like (default=None)
initialized your data from a data structure, such as dict(), list()
Frame(), SeriesSet(), Matrix(), DataSet().
sheet : str (default='sheet0')
the name of first sheet inside.
log : bool (default=True)
show the time consuming for each operation
'''
self.logging = log
if obj is None:
self._data = []
self._sheets = []
self._types = []
elif (not is_iter(obj)) and not isinstance(obj, str):
raise TypeError('DataSet can not store this object.')
elif isinstance(obj, DataSet):
self._data = copy(obj._data)
self._sheets = copy(obj._sheets)
self._types = copy(obj._types)
elif isinstance(obj, (Matrix, SeriesSet, Frame)):
self._data = [obj, ]
self._sheets = [str(sheet), ]
self._types = [type(sheet), ]
elif isinstance(sheet, str):
self._data = [obj, ]
self._sheets = [str(sheet), ]
self._types = [type(obj), ]
else:
self._data = list(obj)
self._sheets = list(map(str, sheet))
self._types = list(map(type, self._data))
if len(set(self._sheets)) != len(self._data):
raise ValueError("the number of sheets' names do not enough.")
@property
def data(self):
if len(self._data) == 1:
return self._data[0]
return self._data
@property
def columns(self):
'''names of columns of each table'''
if len(self._data) > 1:
new_ = list()
for i, data in enumerate(self._data):
if hasattr(data, 'columns'):
new_.append([self._sheets[i]] + data.columns)
else:
new_.append([self._sheets[i], None])
new_title = ['sheet name']
new_title.extend(['title_%d'%i for i in range(1, len(max(new_, key=len)))])
return SeriesSet(new_, new_title)
if len(self._data) == 1:
if hasattr(self._data[0], 'columns'):
return self._data[0].columns
return None
@property
def logging(self):
return self._log
@logging.setter
def logging(self, value):
if value is not True:
self._log = False
else:
self._log = True
@property
def level(self):
return len(self._data)
@columns.setter
def columns(self, value):
for data in self._data:
if hasattr(data, 'columns'):
data.columns = value
@property
def sheets(self):
return self._sheets
@sheets.setter
def sheets(self, other):
if isinstance(other, str):
self._sheets = [self._check_sheet_new_name(other) for i in range(len(self._sheets))]
elif is_iter(other):
if len(set(other)) == len(self._sheets):
self._sheets = []
self._sheets = [self._check_sheet_new_name(item) for item in other]
else:
raise ValueError('the names size does not match the size of '+\
'sheets inside the DataSet')
else:
raise ValueError('unrecognized symbol as %s'%other)
@property
def shape(self):
temp = SeriesSet(None, ['Level', 'Sheet', 'Ln', 'Col'], nan='-')
for i, (sheet, data) in enumerate(zip(self._sheets, self._data)):
if hasattr(data, 'shape'):
temp.append([i, sheet] + list(data.shape))
else:
temp.append((i, sheet, len(data)))
return temp
@property
def info(self):
for i, data in enumerate(self._data):
print('sheet:' + self._sheets[i])
print('=' * (len(self._sheets[i]) + 6))
if isinstance(data, (Frame, SeriesSet)):
data.info
else:
print('%s has no info() function'%type(data))
return None
def __getattr__(self, name):
if name in self._sheets:
return self.__getitem__(name)
temp = DataSet()
for sheet, data in zip(self._sheets, self._data):
if hasattr(data, name) or\
(hasattr(data, 'columns') and name in data.columns):
attr = methodcaller(name)
try:
temp._add(attr(data), sheet)
except TypeError:
temp._add(getattr(data, name), sheet)
assert temp.level != 0, "DataSet has no sheet `%s`'" % name
return temp
def _check_col_ind_str(self, ind):
assert ind in self._sheets, "'%s' is not a sheet name" % ind
return self._sheets.index(ind)
def _check_col_ind_int(self, ind):
if ind < 0:
sheet += self.level - 1
assert 0 <= ind < self.level, "'%s' is not exist." % ind
return ind
def _check_sheet_new_name(self, new_name):
new_name = str(new_name)
if not new_name:
return self._check_sheet_new_name('sheet_%d' % len(self._sheets))
if new_name not in self._sheets:
return new_name
return auto_plus_one(self._sheets, new_name)
def _check_sheet_index_slice(self, i, j):
if is_str(i) or is_str(j):
if i is not None:
i = self._check_col_ind_str(i)
if j is not None:
j = self._check_col_ind_int(j)
i = self._check_col_ind_int(i) + 1
j = self._check_col_ind_int(j) + 1
return range(len(self._sheets))[slice(i, j)]
def _check_sheet_index(self, sheet):
'''return a list of sheet indexes'''
if sheet is None:
return range(len(self._data))
if is_str(sheet):
return [self._check_col_ind_str(sheet)]
if isinstance(sheet, slice):
return self._check_sheet_index_slice(sheet.start, sheet.stop)
if isinstance(sheet, int):
return [self._check_col_ind_int(sheet)]
if isinstance(sheet, (list, tuple)):
return [self._check_sheet_index(_) for _ in sheet]
def __getstate__(self):
toreturn = self.__dict__.copy()
for key in toreturn:
if key not in ('_data', '_sheets', '_types'):
del toreturn[key]
return toreturn
def __setstate__(self, arg):
self._data = arg['_data']
self._sheets = arg['_sheets']
self._types = arg['_types']
def __contains__(self, e):
'''__contains__(e) -> e in DataSet
Determind that weather the object is a sheet name inside.
'''
if isinstance(e, str):
return e in self._sheets
return any([e == data for data in self._data])
def __repr__(self):
if len(self._data) == 0:
return 'empty DataSet object'
reprs = ''
for i, data in enumerate(self._data):
reprs += 'sheet:' + self._sheets[i] + '\n'
reprs += '=' * (len(self._sheets[i]) + 6) + '\n'
reprs += data.__repr__() + '\n\n'
return reprs[:-2]
def __len__(self):
if len(self._data) == 1:
if hasattr(self._data[0], 'shape'):
return self._data[0].shape[0]
return len(self._data[0])
return len(self._data)
def __getitem__(self, key):
if len(self._data) == 1 and (key not in self._sheets):
return DataSet(self._data[0][key], self._sheets[0])
if isinstance(key, slice):
return self.__getslice__(key.start, key.stop)
def __getslice__(self, i, j):
return DataSet([_[i:j] for _ in self._data], self._sheets)
def __setitem__(self, key, val):
if len(self._data) == 1 and key not in self._sheets:
self._data[0].__setitem__(key, val)
return
if is_str(key):
if isinstance(val, DataSet):
for src, title in zip(val._data, val._sheets):
self._data.append(src)
self._types.append(type(src))
new_key = '%s_%s' % (key, title)
self._sheets.append(self._check_sheet_new_name(new_key))
return
if key not in self._sheets:
self._data.append(val)
self._types.append(type(val))
self._sheets.append(self._check_sheet_new_name(key))
return
if key in self._sheets:
key = self._sheets.index(key)
self._data[key] = val
self._types[key] = val
return
if isinstance(key, int):
assert abs(key) <= len(self._data), 'set index out of range'
self._data[key] = val
self._types[key] = type(val)
def __delslice__(self, start, stop):
if start not in self._sheets and stop not in self._sheets:
for data in self._data:
del data[start: stop]
return
start, stop = self._slice2int(start, stop)
del self._data[start: stop + 1]
def __delitem__(self, key):
if isinstance(key, slice):
self.__delslice__(key.start, key.stop)
elif key in self._sheets:
index = self._sheets.index(key)
del self._sheets[index], self._data[index], self._types[index]
elif isinstance(key, tuple):
for obj in key:
self.__delitem__(obj)
else:
for data in self._data:
data.__delitem__(key)
def __iter__(self):
if len(self._data) == 1:
for item in self._data[0]:
yield item
else:
for item in self._data:
yield item
def __reversed__(self):
if len(self._data) == 1:
self._data[0].reverse()
else:
self._data.reverse()
def _add(self, item, name):
if isinstance(item, DataSet):
name = '' if not name else name + '_'
new_sheets = [self._check_sheet_new_name(name + new) \
for new in item.sheets]
self._data.extend(item._data)
self._sheets.extend(new_sheets)
self._types.extend(item._types)
else:
self._data.append(item)
self._types.append(type(item))
self._sheets.append(self._check_sheet_new_name(name))
@timer
def add(self, items, names=None):
''' add a new sheet to the current dataset
Parameter
---------
item : object
the new sheet object
name : str or None ( default=None)
the new sheet name
Example
-------
>>> import DaPy as dp
>>> data2 = dp.DataSet([[1, 1, 1], [1, 1, 1]])
>>> data2
sheet:sheet0
============
Col_0 | Col_1 | Col_2
-------+-------+-------
1 | 1 | 1
1 | 1 | 1
>>> data.add(data2)
>>> data
sheet:sheet0
============
Col_0: <1, 2>
Col_1: <2, 3>
Col_2: <3, 4>
sheet:sheet0
============
Col_0 | Col_1 | Col_2
-------+-------+-------
1 | 1 | 1
1 | 1 | 1
'''
if not is_seq(items):
items = (items,)
if not is_seq(names):
names = (names,)
for item, name in zip(items, names):
self._add(item, name)
@timer
@operater
def apply(self, func, col=None, axis=0, *args, **kwrds):
pass
@timer
@operater
def append_row(self, item):
pass
@timer
@operater
def append_col(self, series, variable_name=None):
pass
@timer
@operater
def corr(self, method='pearson', col=None):
pass
@timer
@operater
def count(self, value, col=None, row=None):
pass
@timer
@operater
def copy(self):
pass
@timer
@operater
def count_values(self, col=None):
pass
@timer
@operater
def set_index(self, column):
pass
@timer
@operater
def get(self, key, default):
pass
def get_tables(self, cols=None):
key = self._check_sheet_index(cols)
title = [self._sheets[_] for _ in key]
src = [self._data[_] for _ in key]
return DataSet(src, title)
@timer
@operater
def get_best_features(self, method='variance', X=None, Y=None, top_k=1, inplace=False):
pass
@timer
@operater
def get_categories(self, cols, cut_points, group_name, boundary=(False, True), inplace=False):
pass
@timer
@operater
def get_date_label(self, cols, daytime=True,
weekend=True, season=True, inplace=False):
pass
@timer
@operater
def get_interactions(self, n_power=3, cols=None, inplace=False):
pass
@timer
@operater
def get_ranks(self, cols=None, duplicate='mean', inplace=False):
pass
@timer
@operater
def get_dummies(self, col=None, value=1, inplace=False):
pass
@timer
@operater
def get_nan_instrument(cols=None, inplace=False):
pass
@timer
@operater
def get_numeric_label(self, cols=None, inplace=False):
pass
@timer
@operater
def groupby(self, keys, func=None, apply_col=None, unapply_col=None):
pass
@timer
@operater
def insert_row(self, index, item):
pass
@timer
@operater
def insert_col(self, index, series, variable_name=None):
pass
@timer
@operater
def dropna(self, axis=0, how='any', inplace=False):
pass
@timer
@operater
def select(self, where, col=None, limit=1000):
pass
@timer
@operater
def pop(self, index=-1, aixs=0):
pass
@timer
@operater
def pop_row(self, index=-1):
pass
@timer
@operater
def pop_col(self, col='all'):
pass
@timer
@operater
def query(self, expression, col=None, limit=1000):
pass
@timer
@operater
def extend(self, other):
pass
@timer
@operater
def join(self, other):
pass
@timer
@operater
def normalized(self, process='NORMAL', col=None, **kwrds):
pass
@timer
@operater
def map(self, func, cols=None, inplace=False):
pass
@timer
@operater
def merge(self, other, self_key=0, other_key=0, keep_key=True, keep_same=True):
pass
@timer
@operater
def drop(self, index=-1, axis=0, inplace=False):
pass
@timer
@operater
def drop_row(self, index=-1, axis=0, inplace=False):
pass
@timer
@operater
def drop_col(self, index=-1, axis=0, inplace=False):
pass
@timer
@operater
def fillna(self, fill_with=None, col=None, method=None, limit=None):
pass
@timer
def read(self, addr, dtype='col', **kwrd):
'''This function could be used with loading data from a file and
transform it into one of DaPy data structure.
Parameters
----------
addr : str
the address of data file or a statement like:
"mysql://[username]:[password]@[server_ip]:[server_port]/[database_name]/[table1]/[table2]..."
to access a mysql database. Attention, if `table` keyword is missing
in this address, all records will be loaded.
ftype : str (default=None)
the file type of this address
`None` -> automtotally analysis the file type
"web" -> a website address, it will use requests.get to load the website
then use bs4.BeautifulSoup to find <table> tag in the file.
"html" -> a local html file
"db" -> SQLite3 database file
"sav" -> SPSS data file
"xls" -> Excel data file
"csv" -> Text file with ',' as delimeters
"txt" -> Text file with ' ' as delimeters
"pkl" -> Python pickle file
"sql" -> MySQL database commands file
"mysql" -> MySQL database Server
sheet_name : str (default=None)
the sheet name of new table.
miss_symbol : str or str in list (default=['?', '??', '', ' ', 'NA', 'None'])
the miss value symbol in this data file.
nan : value (default=nan)
the miss value symbol in your new data set.
first_line : int (default=1)
the first line which includes data values in this file.
title_line : int (default=0)
the line which includes your data's column names.
tip: if there is no title in your data, used -1 represented,
and, it will automatic create it.
sep : str (default=",")
the delimiter symbol inside.
dtypes : type name in str or dict of columns (default=None):
DaPy autometally transfers str source text into the most
suitable data type in efficiency. However, some of process costs
long time. For example, "2018-1-1" is a datetime label and
DaPy spends a long time time to transfer this label into datetime.
Thus, in some cases, you don't need it in datetime, so just set this column
type into "str" to save time. The supported data types are "int",
"float", "str", "datetime" and "bool".
use this keyword as following samples
>>> read("addr.csv", dtypes={'A_col': int, 'B_col': float})
>>> read("addr.csv", dtypes="float")
>>> read("addr.csv", dtypes=["float", "int"])
Examples
--------
>>> import DaPy as dp
>>> data = dp.read('your_data_file.csv')
>>> data.read('another_data_file.xlsx')
'''
nan = kwrd.get('nan', float('nan'))
sheet_name = kwrd.get('sheet_name', None)
miss_symbol = kwrd.get('miss_symbol', set(['?', '??', '', ' ', 'NA', 'None']))
fpath, fname, fbase, ftype = parse_addr(addr)
ftype = kwrd.get('ftype', ftype)
assert ftype in ('web', 'html', 'htm', 'db', 'sav', 'xls', 'xlsx', 'csv', 'txt', 'pkl', 'sql', 'mysql')
if ftype not in ('web', 'html', 'htm', 'mysql') and not isfile(addr):
raise IOError('can not find the target file or auto analysis data source type failed')
if sheet_name is None:
sheet_name = fbase
if ftype == 'db':
try:
import sqlite3 as sql3
except ImportError:
raise ImportError('DaPy uses "sqlite3" to access a database local file.')
with sql3.connect(addr) as conn:
cur = conn.cursor()
for sheet, name in parse_db(cur, dtype, nan):
self._add(sheet, name)
elif ftype == 'sav':
try:
import savReaderWriter
except ImportError:
raise ImportError('DaPy uses "savReaderWriter" to open a .sav file, '+\
'please try command: pip install savReaderWriter.')
with savReaderWriter.SavReader(addr) as reader:
self._add(parse_sav(reader, dtype, nan), sheet_name)
elif ftype == 'xls' or ftype == 'xlsx':
first_line = kwrd.get('first_line', 1)
title_line = kwrd.get('title_line', 0)
for sheet, name in parse_excel(dtype, addr, first_line, title_line, nan):
self._add(sheet, name)
elif ftype in ('txt', 'csv'):
kwrd['sep'] = kwrd.get('sep', {'csv':',', 'txt':'\t'}[ftype])
dtype_dic = {'COL': SeriesSet, 'SERIESSET': SeriesSet,
'MATRIX': Matrix, 'MAT': Matrix}
dtype = dtype_dic.get(dtype.upper(), SeriesSet)
self._add(dtype.from_file(addr, **kwrd), sheet_name)
elif ftype == 'pkl':
self._add(pickle.load(open(addr, 'rb')), sheet_name)
elif ftype in ('html', 'htm', 'web'):
if ftype == 'web':
try:
from requests import get
except ImportError:
raise ImportError('DaPy uses "reqeusts" to load a website.')
else:
text = get(addr).text
else:
with open(addr) as doc:
text = doc.read()
assert '<table' in text, 'there is no tag <table> in the html file.'
for sheet, name in parse_html(text, dtype, miss_symbol, nan, sheet_name):
self._add(sheet, name)
return self
elif ftype == 'mysql':
user, psd = fpath.split(':')
host, port = fbase.split(':')
try:
import pymysql as sql
except ImportError:
try:
import MySQLdb as sql
except ImportError:
raise ImportError('DaPy uses "pymysql" or "MySQLdb" libraries to access a database server.')
with sql.connect(host=host, port=int(port), user=user, passwd=psd, db=fname[0], charset='utf8') as cur:
for sheet, name in parse_mysql_server(cur, fname):
self._add(sheet, name)
elif ftype == 'sql':
with open(addr) as doc:
for sheet, name in parse_sql(doc, nan):
self._add(sheet, name)
return self
else:
raise ValueError('DaPy singly supports file types as'+\
'(xls, xlsx, csv, txt, pkl, db, sav, html, htm).')
@timer
@operater
def reshape(self, nshape):
pass
@timer
def reverse(self, axis='sheet'):
'''Reverse your data set or records.
Parameters
----------
axis : str (default='sheet')
settle down reverse sheets or records in each sheet.
Example
-------
>>> import DaPy as dp
>>> data = dp.DataSet([[1,2,3,4],
[2,3,4,5],
[3,4,5,6],
[4,5,6,7],
[5,6,7,8]])
>>> data.tocol()
>>> data.reverse()
'''
if axis.upper() == 'SHEET':
self._data.reverse()
self._sheets.reverse()
self._types.reverse()
return
if axis.upper() == 'RECORD':
for data in self._data:
if hasattr(data, 'reverse'):
data.reverse(axis)
return
raise AttributeError('axis should be "sheet" or "record"')
@timer
@operater
def replace(self, old, new, col=None, regex=False, sheet=None):
pass
@timer
@operater
def shuffle(self):
pass
@timer
@operater
def sort(self, *orders):
pass
@timer
def save(self, addr, **kwrds):
'''Save the DataSet to a file.
Parameters
----------
addr : str
the output file address.
encode : str (default='utf-8')
saving the file in such code type
ftype : str
the file type you want to save as. Use the file type in
your address as default. For example, 'data.save("test.csv")'
means save this object into .csv type. DaPy supports
following file types since V1.5.1:
.csv, .txt, .xls, .pkl, .db, .html
newline : str (default='\n')
use this simble to mark change line.
delimiter : str (default=',')
use this simble to seperate a records.
if_exists : str (default='fail')
when saving the data into a exist database file, how to face the
delimma that the sheet name has been existed in the database.
'fail' -> raise an error;
'replace' -> replace the exist table with current data;
'append' -> append these records to the exist sheet
'
'''
fpath, fname, fbase, ftype = parse_addr(addr)
encode = kwrds.get('encode', 'utf-8')
ftype = kwrds.get('ftype', ftype)
if ftype in ('csv', 'txt'):
newline = kwrds.get('newline', '\n')
delimiter = kwrds.get('delimiter', ',')
para = dict(mode='w', buffering=2048)
if PYTHON3:
para['encoding'] = encode
para['file'] = addr
else:
para['name'] = addr
for data, sheet in zip(self._data, self._sheets):
if data is None:
continue
if len(self._data) > 1:
addr = fpath + fbase + '_' + sheet + '.' + ftype
f = open(**para)
try:
write_txt(f, data, newline, delimiter)
finally:
f.close()
elif ftype in ('xls', 'xlsx'):
try:
import xlwt
except ImportError:
raise ImportError('DaPy uses xlwt library to save a `xls/xlsx` file.')
workbook = xlwt.Workbook(encoding=encode)
for sheet, data in zip(self._sheets, self._data):
if not data:
continue
worksheet = workbook.add_sheet(sheet)
write_xls(worksheet, data)
workbook.save(addr)
elif ftype == 'pkl':
pickle.dump(self, open(addr, 'wb'))
elif ftype == 'db':
import sqlite3 as sql
with sql.connect(addr) as conn:
for data, sheet in zip(self._data, self._sheets):
write_db(conn.cursor(), sheet, data, kwrds.get('if_exists', 'fail'), 'sqlite3')
elif ftype == 'html':
with open(addr, 'w') as f:
for data, sheet in zip(self._data, self._sheets):
if not data:
continue
f.write('<table border="1" class="%s">' % sheet)
write_html(f, data)
f.write('</table>')
elif ftype == 'sql':
with open(addr, 'w') as doc:
for name, sheet in zip(self._sheets, self._data):
write_sql(doc, sheet, name)
elif ftype == 'mysql':
try:
import pymysql as sql
except ImportError:
try:
import MySQLdb as sql
except ImportError:
raise ImportError('DaPy uses "pymysql" or "MySQLdb" libraries to access a database server.')
user, psd = fpath.split(':')
host, port = fbase.split(':')
with sql.connect(host=host, port=int(port), user=user, passwd=<PASSWORD>, db=fname[0], charset='utf8') as conn:
for data, sheet in zip(self._data, self._sheets):
write_db(conn, sheet, data, kwrds.get('if_exists', 'fail'), 'mysql')
else:
raise ValueError('unrecognized file type')
@timer
@operater
def todict(self):
pass
@timer
def tocol(self):
'''Transform all of the stored data structure to DaPy.SeriesSet
'''
for i, data in enumerate(self._data):
if isinstance(data, SeriesSet):
continue
try:
if hasattr(data, 'columns'):
if hasattr(data, 'miss_symbol'):
self._data[i] = SeriesSet(data, list(data.columns),
miss_value=data.miss_symbol)
else:
self._data[i] = SeriesSet(data, data.columns)
else:
self._data[i] = SeriesSet(data)
except Exception as e:
LogErr('sheet[%s] can not transform to SeriesSet, ' % self._sheets[i] +\
'because: %s' % e)
self._types[i] = SeriesSet
@timer
def tomat(self):
'''Transform all of the stored data structure to DaPy.Matrix
'''
for i, data in enumerate(self._data):
if isinstance(data, Matrix):
continue
try:
self._data[i] = Matrix(data)
except:
LogErr('sheet:%s can not transform to Matrix.'%self._sheets[i])
self._types[i] = Matrix
@timer
@operater
def tolist(self):
pass
@timer
@operater
def toarray(self):
pass
def show(self, max_lines=None, max_display=75, max_col_size=25, multi_line=True):
'''show(lines=None) -> None
See Also
--------
DaPy.SeriesSet.show
'''
for i, data in enumerate(self._data):
print('sheet:' + self._sheets[i])
print('=' * (len(self._sheets[i]) + 6))
if hasattr(data, 'show'):
data.show(max_lines, max_display, max_col_size, multi_line)
else:
pprint(data.__repr__())
if __name__ == '__main__':
from doctest import testmod
testmod() | 0.681727 | 0.266119 |
import os
import json
import constants
import shared
import saveable
import mode
from state import State
class Save(object):
__slots__ = (
'file_name',
'_mode_name',
'_mode_data',
'_shared_data',
)
def __init__(self, file_name: str, mode_name: str, mode_data, shared_data):
self.file_name = file_name
self._mode_name = mode_name
self._mode_data = mode_data
self._shared_data = shared_data
@staticmethod
def willOverwrite(file_name: str):
return os.path.exists(
os.path.join(constants.SAVE_DIRECTORY, file_name)
)
@staticmethod
def _getSaveFiles():
if not os.path.isdir(constants.SAVE_DIRECTORY):
return ()
return (
file_name
for file_name
in os.listdir(constants.SAVE_DIRECTORY)
if os.path.isfile(
os.path.join(constants.SAVE_DIRECTORY, file_name)
)
)
@classmethod
def getAllFromFiles(cls):
return tuple(
sorted(
(
save
for save
in (
cls.getFromFile(file)
for file
in cls._getSaveFiles()
)
if save
),
key=lambda s: (s.file_name.lower(), s.file_name)
)
)
@classmethod
def getFromFile(cls, file_name: str):
file_path = os.path.join(constants.SAVE_DIRECTORY, file_name)
try:
with open(file_path, 'r') as file:
save_object = json.load(file, object_hook=saveable.decodeSaveable)
return cls(file_name, save_object['mode_name'], save_object['mode_data'], save_object['shared_data'])
except (IOError, json.decoder.JSONDecodeError):
return False
@classmethod
def getFromMode(cls, file_name: str, from_mode: saveable.Saveable):
return cls(file_name, type(from_mode).__name__, from_mode.save(), shared.state.save())
def save(self):
try:
os.mkdir(constants.SAVE_DIRECTORY)
except FileExistsError:
pass
save_object = {
'mode_name': self._mode_name,
'mode_data': self._mode_data,
'shared_data': self._shared_data,
}
file_path = os.path.join(constants.SAVE_DIRECTORY, self.file_name)
try:
with open(file_path, 'w') as file:
json.dump(save_object, file, cls=saveable.SaveableJSONEncoder)
return True
except IOError:
return False
def load(self):
shared.state = State.load(self._shared_data)
mode_cls = getattr(mode, self._mode_name)
new_mode = mode_cls.load(self._mode_data)
return new_mode | src/save.py | import os
import json
import constants
import shared
import saveable
import mode
from state import State
class Save(object):
__slots__ = (
'file_name',
'_mode_name',
'_mode_data',
'_shared_data',
)
def __init__(self, file_name: str, mode_name: str, mode_data, shared_data):
self.file_name = file_name
self._mode_name = mode_name
self._mode_data = mode_data
self._shared_data = shared_data
@staticmethod
def willOverwrite(file_name: str):
return os.path.exists(
os.path.join(constants.SAVE_DIRECTORY, file_name)
)
@staticmethod
def _getSaveFiles():
if not os.path.isdir(constants.SAVE_DIRECTORY):
return ()
return (
file_name
for file_name
in os.listdir(constants.SAVE_DIRECTORY)
if os.path.isfile(
os.path.join(constants.SAVE_DIRECTORY, file_name)
)
)
@classmethod
def getAllFromFiles(cls):
return tuple(
sorted(
(
save
for save
in (
cls.getFromFile(file)
for file
in cls._getSaveFiles()
)
if save
),
key=lambda s: (s.file_name.lower(), s.file_name)
)
)
@classmethod
def getFromFile(cls, file_name: str):
file_path = os.path.join(constants.SAVE_DIRECTORY, file_name)
try:
with open(file_path, 'r') as file:
save_object = json.load(file, object_hook=saveable.decodeSaveable)
return cls(file_name, save_object['mode_name'], save_object['mode_data'], save_object['shared_data'])
except (IOError, json.decoder.JSONDecodeError):
return False
@classmethod
def getFromMode(cls, file_name: str, from_mode: saveable.Saveable):
return cls(file_name, type(from_mode).__name__, from_mode.save(), shared.state.save())
def save(self):
try:
os.mkdir(constants.SAVE_DIRECTORY)
except FileExistsError:
pass
save_object = {
'mode_name': self._mode_name,
'mode_data': self._mode_data,
'shared_data': self._shared_data,
}
file_path = os.path.join(constants.SAVE_DIRECTORY, self.file_name)
try:
with open(file_path, 'w') as file:
json.dump(save_object, file, cls=saveable.SaveableJSONEncoder)
return True
except IOError:
return False
def load(self):
shared.state = State.load(self._shared_data)
mode_cls = getattr(mode, self._mode_name)
new_mode = mode_cls.load(self._mode_data)
return new_mode | 0.329607 | 0.05301 |
import oauth
import json
import sys
import argparse
def __parse_entity_results(resp):
ents = resp['batch'].get('entityResults') or []
ret = []
for ent in ents:
ent = ent['entity']
copy = {
'key': {
'path': ent['key']['path']
},
'properties': {}
}
for k, v in ent.get('properties', {}).iteritems():
prop = v.copy()
excludeFromIndexes = prop.pop('excludeFromIndexes', False)
prop['excludeFromIndexes'] = excludeFromIndexes
if v:
copy['properties'][k] = prop
ret.append(copy)
return ret
def query(dataset, gql, namespace=None, limit=1000, startCursor=None):
url = 'https://datastore.googleapis.com/v1/projects/%s:runQuery' % (dataset)
queryString = gql
if limit:
queryString = '%s limit %i' % (gql, limit)
if startCursor:
queryString += ' offset @startCursor'
params = {
'partitionId': {
'projectId': dataset,
'namespaceId': namespace
},
'gqlQuery': {
'allowLiterals': True,
'queryString': queryString
}
}
if startCursor:
params['gqlQuery']['namedBindings'] = { 'startCursor': { 'cursor': startCursor } };
resp = oauth.oauth_req_json('POST', url, params)
ret = {}
ret['entities'] = __parse_entity_results(resp)
ret['endCursor'] = resp['batch'].get('endCursor')
return ret
def iterate(dataset, gql, namespace=None, bulkSize=1000, startCursor=None, context=None):
while True:
page = query(dataset, gql, namespace, bulkSize, startCursor)
if not page['entities']:
if context != None:
context['cursor'] = startCursor
return
startCursor = page.get('endCursor')
for ent in page['entities']:
yield ent
def print_iterate(dataset, gql, namespace=None, msg='', startCursor=None, context=None):
it = iterate(dataset, gql, namespace, startCursor=startCursor, context=context)
loaded = 0
try:
while True:
loaded += 1
if loaded % 1000 == 0:
print >> sys.stderr, 'loaded', msg, loaded
string = json.dumps(it.next(), sort_keys=True)
print string
except StopIteration:
pass
print >> sys.stderr, 'Done', msg, loaded-1
def argparse_prepare(sub):
sub.add_argument('-d', '--dataset', required=True, help='dataset')
sub.add_argument('-n', '--namespace', help='namespace')
sub.add_argument('-q', '--gql', required=True, help='gql')
def argparse_exec(args):
print_iterate(args.dataset, args.gql, args.namespace) | dsopz/reader.py | import oauth
import json
import sys
import argparse
def __parse_entity_results(resp):
ents = resp['batch'].get('entityResults') or []
ret = []
for ent in ents:
ent = ent['entity']
copy = {
'key': {
'path': ent['key']['path']
},
'properties': {}
}
for k, v in ent.get('properties', {}).iteritems():
prop = v.copy()
excludeFromIndexes = prop.pop('excludeFromIndexes', False)
prop['excludeFromIndexes'] = excludeFromIndexes
if v:
copy['properties'][k] = prop
ret.append(copy)
return ret
def query(dataset, gql, namespace=None, limit=1000, startCursor=None):
url = 'https://datastore.googleapis.com/v1/projects/%s:runQuery' % (dataset)
queryString = gql
if limit:
queryString = '%s limit %i' % (gql, limit)
if startCursor:
queryString += ' offset @startCursor'
params = {
'partitionId': {
'projectId': dataset,
'namespaceId': namespace
},
'gqlQuery': {
'allowLiterals': True,
'queryString': queryString
}
}
if startCursor:
params['gqlQuery']['namedBindings'] = { 'startCursor': { 'cursor': startCursor } };
resp = oauth.oauth_req_json('POST', url, params)
ret = {}
ret['entities'] = __parse_entity_results(resp)
ret['endCursor'] = resp['batch'].get('endCursor')
return ret
def iterate(dataset, gql, namespace=None, bulkSize=1000, startCursor=None, context=None):
while True:
page = query(dataset, gql, namespace, bulkSize, startCursor)
if not page['entities']:
if context != None:
context['cursor'] = startCursor
return
startCursor = page.get('endCursor')
for ent in page['entities']:
yield ent
def print_iterate(dataset, gql, namespace=None, msg='', startCursor=None, context=None):
it = iterate(dataset, gql, namespace, startCursor=startCursor, context=context)
loaded = 0
try:
while True:
loaded += 1
if loaded % 1000 == 0:
print >> sys.stderr, 'loaded', msg, loaded
string = json.dumps(it.next(), sort_keys=True)
print string
except StopIteration:
pass
print >> sys.stderr, 'Done', msg, loaded-1
def argparse_prepare(sub):
sub.add_argument('-d', '--dataset', required=True, help='dataset')
sub.add_argument('-n', '--namespace', help='namespace')
sub.add_argument('-q', '--gql', required=True, help='gql')
def argparse_exec(args):
print_iterate(args.dataset, args.gql, args.namespace) | 0.126947 | 0.09122 |
from __future__ import division, print_function
import os
import numpy as np
from scipy import linalg, stats
try:
import modshogun as sg
except ImportError: # new versions just call it shogun
import shogun as sg
if 'OMP_NUM_THREADS' in os.environ:
num_threads = int(os.environ['OMP_NUM_THREADS'])
else:
import multiprocessing as mp
num_threads = mp.cpu_count()
sg.get_global_parallel().set_num_threads(num_threads)
def rbf_mmd_test(X, Y, bandwidth='median', null_samples=1000,
median_samples=1000, cache_size=32):
'''
Run an MMD test using a Gaussian kernel.
Parameters
----------
X : row-instance feature array
Y : row-instance feature array
bandwidth : float or 'median'
The bandwidth of the RBF kernel (sigma).
If 'median', estimates the median pairwise distance in the
aggregate sample and uses that.
null_samples : int
How many times to sample from the null distribution.
median_samples : int
How many points to use for estimating the bandwidth.
Returns
-------
p_val : float
The obtained p value of the test.
stat : float
The test statistic.
null_samples : array of length null_samples
The samples from the null distribution.
bandwidth : float
The used kernel bandwidth
'''
if bandwidth == 'median':
from sklearn.metrics.pairwise import euclidean_distances
sub = lambda feats, n: feats[np.random.choice(
feats.shape[0], min(feats.shape[0], n), replace=False)]
Z = np.r_[sub(X, median_samples // 2), sub(Y, median_samples // 2)]
D2 = euclidean_distances(Z, squared=True)
upper = D2[np.triu_indices_from(D2, k=1)]
kernel_width = np.median(upper, overwrite_input=True)
bandwidth = np.sqrt(kernel_width / 2)
# sigma = median / sqrt(2); works better, sometimes at least
del Z, D2, upper
else:
kernel_width = 2 * bandwidth**2
mmd = sg.QuadraticTimeMMD()
mmd.set_p(sg.RealFeatures(X.T.astype(np.float64)))
mmd.set_q(sg.RealFeatures(Y.T.astype(np.float64)))
mmd.set_kernel(sg.GaussianKernel(cache_size, kernel_width))
mmd.set_num_null_samples(null_samples)
samps = mmd.sample_null()
stat = mmd.compute_statistic()
p_val = np.mean(stat <= samps)
return p_val, stat, samps, bandwidth
def linear_mmd_test(X, Y, null_samples=1000):
mmd = sg.QuadraticTimeMMD()
mmd.set_p(sg.RealFeatures(X.T.astype(np.float64)))
mmd.set_q(sg.RealFeatures(Y.T.astype(np.float64)))
mmd.set_kernel(sg.LinearKernel())
mmd.set_num_null_samples(null_samples)
samps = mmd.sample_null()
stat = mmd.compute_statistic()
p_val = np.mean(stat <= samps)
return p_val, stat, samps
def linear_hotelling_test(X, Y, reg=0):
n, p = X.shape
Z = X - Y
Z_bar = Z.mean(axis=0)
Z -= Z_bar
S = Z.T.dot(Z)
S /= (n - 1)
if reg:
S[::p + 1] += reg
# z' inv(S) z = z' inv(L L') z = z' inv(L)' inv(L) z = ||inv(L) z||^2
L = linalg.cholesky(S, lower=True, overwrite_a=True)
Linv_Z_bar = linalg.solve_triangular(L, Z_bar, lower=True, overwrite_b=True)
stat = n * Linv_Z_bar.dot(Linv_Z_bar)
p_val = stats.chi2.sf(stat, p)
return p_val, stat | two_sample/mmd_test.py | from __future__ import division, print_function
import os
import numpy as np
from scipy import linalg, stats
try:
import modshogun as sg
except ImportError: # new versions just call it shogun
import shogun as sg
if 'OMP_NUM_THREADS' in os.environ:
num_threads = int(os.environ['OMP_NUM_THREADS'])
else:
import multiprocessing as mp
num_threads = mp.cpu_count()
sg.get_global_parallel().set_num_threads(num_threads)
def rbf_mmd_test(X, Y, bandwidth='median', null_samples=1000,
median_samples=1000, cache_size=32):
'''
Run an MMD test using a Gaussian kernel.
Parameters
----------
X : row-instance feature array
Y : row-instance feature array
bandwidth : float or 'median'
The bandwidth of the RBF kernel (sigma).
If 'median', estimates the median pairwise distance in the
aggregate sample and uses that.
null_samples : int
How many times to sample from the null distribution.
median_samples : int
How many points to use for estimating the bandwidth.
Returns
-------
p_val : float
The obtained p value of the test.
stat : float
The test statistic.
null_samples : array of length null_samples
The samples from the null distribution.
bandwidth : float
The used kernel bandwidth
'''
if bandwidth == 'median':
from sklearn.metrics.pairwise import euclidean_distances
sub = lambda feats, n: feats[np.random.choice(
feats.shape[0], min(feats.shape[0], n), replace=False)]
Z = np.r_[sub(X, median_samples // 2), sub(Y, median_samples // 2)]
D2 = euclidean_distances(Z, squared=True)
upper = D2[np.triu_indices_from(D2, k=1)]
kernel_width = np.median(upper, overwrite_input=True)
bandwidth = np.sqrt(kernel_width / 2)
# sigma = median / sqrt(2); works better, sometimes at least
del Z, D2, upper
else:
kernel_width = 2 * bandwidth**2
mmd = sg.QuadraticTimeMMD()
mmd.set_p(sg.RealFeatures(X.T.astype(np.float64)))
mmd.set_q(sg.RealFeatures(Y.T.astype(np.float64)))
mmd.set_kernel(sg.GaussianKernel(cache_size, kernel_width))
mmd.set_num_null_samples(null_samples)
samps = mmd.sample_null()
stat = mmd.compute_statistic()
p_val = np.mean(stat <= samps)
return p_val, stat, samps, bandwidth
def linear_mmd_test(X, Y, null_samples=1000):
mmd = sg.QuadraticTimeMMD()
mmd.set_p(sg.RealFeatures(X.T.astype(np.float64)))
mmd.set_q(sg.RealFeatures(Y.T.astype(np.float64)))
mmd.set_kernel(sg.LinearKernel())
mmd.set_num_null_samples(null_samples)
samps = mmd.sample_null()
stat = mmd.compute_statistic()
p_val = np.mean(stat <= samps)
return p_val, stat, samps
def linear_hotelling_test(X, Y, reg=0):
n, p = X.shape
Z = X - Y
Z_bar = Z.mean(axis=0)
Z -= Z_bar
S = Z.T.dot(Z)
S /= (n - 1)
if reg:
S[::p + 1] += reg
# z' inv(S) z = z' inv(L L') z = z' inv(L)' inv(L) z = ||inv(L) z||^2
L = linalg.cholesky(S, lower=True, overwrite_a=True)
Linv_Z_bar = linalg.solve_triangular(L, Z_bar, lower=True, overwrite_b=True)
stat = n * Linv_Z_bar.dot(Linv_Z_bar)
p_val = stats.chi2.sf(stat, p)
return p_val, stat | 0.800614 | 0.592254 |
import pickle
from datetime import datetime as DT
from loguru import logger
from copy import deepcopy
import pandas as pd
from theano import shared
from pymc_models import PyMCModel
from pymc_models import hs_regression
from sklearn.preprocessing import PolynomialFeatures
def run_model():
# load datasets
with open('../PickleJar/DataSets/AphiTrainTestSplitDataSets.pkl', 'rb') as fb:
datadict = pickle.load(fb)
X_s_train = datadict['x_train_s']
y_train = datadict['y_train']
X_s_test = datadict['x_test_s']
y_test = datadict['y_test']
poly_tranf = PolynomialFeatures(interaction_only=True, include_bias=False)
X_s_train_w_int = pd.DataFrame(poly_tranf.fit_transform(X_s_train),
columns=poly_tranf.get_feature_names(input_features=
X_s_train.columns),
index=X_s_train.index)
X_s_test_w_int = pd.DataFrame(poly_tranf.fit_transform(X_s_test),
columns=poly_tranf.get_feature_names(input_features=
X_s_train.columns),
index=X_s_test.index)
bands = [411, 443, 489, 510, 555, 670]
# create band-keyed dictionary to contain models
model_dict=dict.fromkeys(bands)
# create theano shared variable
X_shared = shared(X_s_train_w_int.values)
y_shared = shared(y_train['log10_aphy%d' % bands[0]].values)
# Fitting aphi411 model:
# Instantiate PyMC3 model with bnn likelihood
for band in bands:
logger.info("processing aphi{band}", band=band)
X_shared.set_value(X_s_train_w_int.values)
y_shared.set_value(y_train['log10_aphy%d' % band].values)
hshoe_wi_ = PyMCModel(hs_regression, X_shared, y_shared )
hshoe_wi_.model.name = 'hshoe_wi_aphy%d' %band
hshoe_wi_.fit(n_samples=2000, cores=4, chains=4, tune=10000,
nuts_kwargs=dict(target_accept=0.95))
ppc_train_ = hshoe_wi_.predict(likelihood_name='likelihood')
waic_train = hshoe_wi_.get_waic()
loo_train = hshoe_wi_.get_loo()
model = deepcopy(hshoe_wi_.model)
trace = deepcopy(hshoe_wi_.trace_)
run_dict = dict(model=model, trace=trace,
ppc_train=ppc_train_, loo_train=loo_train, waic_train=waic_train)
X_shared.set_value(X_s_test_w_int.values)
y_shared.set_value(y_test['log10_aphy%d' % band].values)
model_test = deepcopy(hshoe_wi_.model)
ppc_test_ = hshoe_wi_.predict(likelihood_name='likelihood')
waic_test = hshoe_wi_.get_waic()
loo_test = hshoe_wi_.get_loo()
run_dict.update(dict(model_test=model_test, ppc_test=ppc_test_,
waic_test=waic_test, loo_test=loo_test))
model_dict[band] = run_dict
with open('../PickleJar/Results/hshoe_wi_model_dict_%s.pkl' %DT.now(), 'wb') as fb:
pickle.dump(model_dict, fb, protocol=pickle.HIGHEST_PROTOCOL)
if __name__ == "__main__":
logger.add("linreg_wi_{time}.log")
run_model()
logger.info("done!") | Code/scripts/Old/run_linreg_w_interactions.py |
import pickle
from datetime import datetime as DT
from loguru import logger
from copy import deepcopy
import pandas as pd
from theano import shared
from pymc_models import PyMCModel
from pymc_models import hs_regression
from sklearn.preprocessing import PolynomialFeatures
def run_model():
# load datasets
with open('../PickleJar/DataSets/AphiTrainTestSplitDataSets.pkl', 'rb') as fb:
datadict = pickle.load(fb)
X_s_train = datadict['x_train_s']
y_train = datadict['y_train']
X_s_test = datadict['x_test_s']
y_test = datadict['y_test']
poly_tranf = PolynomialFeatures(interaction_only=True, include_bias=False)
X_s_train_w_int = pd.DataFrame(poly_tranf.fit_transform(X_s_train),
columns=poly_tranf.get_feature_names(input_features=
X_s_train.columns),
index=X_s_train.index)
X_s_test_w_int = pd.DataFrame(poly_tranf.fit_transform(X_s_test),
columns=poly_tranf.get_feature_names(input_features=
X_s_train.columns),
index=X_s_test.index)
bands = [411, 443, 489, 510, 555, 670]
# create band-keyed dictionary to contain models
model_dict=dict.fromkeys(bands)
# create theano shared variable
X_shared = shared(X_s_train_w_int.values)
y_shared = shared(y_train['log10_aphy%d' % bands[0]].values)
# Fitting aphi411 model:
# Instantiate PyMC3 model with bnn likelihood
for band in bands:
logger.info("processing aphi{band}", band=band)
X_shared.set_value(X_s_train_w_int.values)
y_shared.set_value(y_train['log10_aphy%d' % band].values)
hshoe_wi_ = PyMCModel(hs_regression, X_shared, y_shared )
hshoe_wi_.model.name = 'hshoe_wi_aphy%d' %band
hshoe_wi_.fit(n_samples=2000, cores=4, chains=4, tune=10000,
nuts_kwargs=dict(target_accept=0.95))
ppc_train_ = hshoe_wi_.predict(likelihood_name='likelihood')
waic_train = hshoe_wi_.get_waic()
loo_train = hshoe_wi_.get_loo()
model = deepcopy(hshoe_wi_.model)
trace = deepcopy(hshoe_wi_.trace_)
run_dict = dict(model=model, trace=trace,
ppc_train=ppc_train_, loo_train=loo_train, waic_train=waic_train)
X_shared.set_value(X_s_test_w_int.values)
y_shared.set_value(y_test['log10_aphy%d' % band].values)
model_test = deepcopy(hshoe_wi_.model)
ppc_test_ = hshoe_wi_.predict(likelihood_name='likelihood')
waic_test = hshoe_wi_.get_waic()
loo_test = hshoe_wi_.get_loo()
run_dict.update(dict(model_test=model_test, ppc_test=ppc_test_,
waic_test=waic_test, loo_test=loo_test))
model_dict[band] = run_dict
with open('../PickleJar/Results/hshoe_wi_model_dict_%s.pkl' %DT.now(), 'wb') as fb:
pickle.dump(model_dict, fb, protocol=pickle.HIGHEST_PROTOCOL)
if __name__ == "__main__":
logger.add("linreg_wi_{time}.log")
run_model()
logger.info("done!") | 0.62601 | 0.265407 |
import numpy as np
import gudhi as gd
from numpy.lib.stride_tricks import as_strided
import tensorflow as tf
from tensorflow.python.framework import ops
import timeit
def compute_dgm(f, card, hom_dim):
"""
Computes the persistence diagram of an image.
:param f: image
:param card: maximum number of bars kept
:param hom_dim: dimension of homology
:return: persistence diagram, critical pixels
"""
dgm = np.zeros([card, 2], dtype=np.float32)
cof = np.zeros([card, 2], dtype=np.int32)
cc = gd.CubicalComplex(dimensions=f.shape, top_dimensional_cells=f.ravel())
cc.compute_persistence()
# Return zero arrays if no finite bars
num_bars = len(cc.persistence_intervals_in_dimension(hom_dim))
if ((hom_dim == 0) and (num_bars == 1)) or ((hom_dim > 0) and (num_bars == 0)):
return dgm, cof
# These are all the critical pixels
all_cof = cc.cofaces_of_persistence_pairs()[0][hom_dim]
# Generate the persistence diagram
birth_times, death_times = f.flat[all_cof[:, 0]], f.flat[all_cof[:, 1]]
# Return at most param:card bars
min_card = min(len(birth_times), card)
dgm[:min_card, 0], dgm[:min_card, 1] = birth_times[:min_card], death_times[:min_card]
cof[:min_card, :] = all_cof[:min_card, :]
return dgm, cof
def compute_dgm_grad(grad_dgm, cof, f):
"""
Uses grad_dgm to compute birth/death critical pixels
:param grad_dgm: gradient wrt dgm
:param cof: critical pixels
:param f: input image
:return: gradient of births/deaths wrt f
"""
grad_f_births = np.zeros(f.shape, dtype=np.float32)
grad_f_deaths = np.zeros(f.shape, dtype=np.float32)
# Identify which rows correspond to a persistence dot.
is_nonzero = cof.any(axis=1)
if not np.any(is_nonzero):
return grad_f_births, grad_f_deaths
# Filter by relevant rows
cof_nonzero = cof[is_nonzero, :]
grad_dgm_nonzero = grad_dgm[is_nonzero, :]
# Add gradient at appropriate places.
np.add.at(grad_f_births.ravel(), cof_nonzero[:, 0].ravel(), grad_dgm_nonzero[:, 0].ravel())
np.add.at(grad_f_deaths.ravel(), cof_nonzero[:, 1].ravel(), grad_dgm_nonzero[:, 1].ravel())
return grad_f_births, grad_f_deaths
def compute_thresh_dgm(f, card, hom_dim, pers_region=None):
"""
Computes thresholded persistent homology of an image.
:param f: input image
:param card: max cardinality of persistence diagram
:param hom_dim: degree of homology
:param pers_region: np.array([birth_low, birth_high, lifetime_low, lifetime_high])
:return: persistence diagram and associated critical pixels
"""
dgm = np.zeros([card, 2], dtype=np.float32)
cof = np.zeros([card, 2], dtype=np.int32)
cc = gd.CubicalComplex(dimensions=f.shape, top_dimensional_cells=f.ravel())
cc.compute_persistence()
# Return zero arrays if no finite bars
num_bars = len(cc.persistence_intervals_in_dimension(hom_dim))
if ((hom_dim == 0) and (num_bars == 1)) or ((hom_dim > 0) and (num_bars == 0)):
return dgm, cof
# These are all the critical pixels
all_cof = cc.cofaces_of_persistence_pairs()[0][hom_dim]
# Generate the persistence diagram
birth_times, death_times = f.flat[all_cof[:, 0]], f.flat[all_cof[:, 1]]
# Threshold by persistence region if one was provided
if pers_region is not None:
lifetimes = death_times - birth_times
rel_ind = (pers_region[0] < birth_times) & (birth_times < pers_region[1]) & \
(pers_region[2] < lifetimes) & (lifetimes < pers_region[3])
birth_times, death_times, all_cof = birth_times[rel_ind], death_times[rel_ind], all_cof[rel_ind, :]
min_card = min(len(birth_times), card)
dgm[:min_card, 0], dgm[:min_card, 1] = birth_times[:min_card], death_times[:min_card]
cof[:min_card, :] = all_cof[:min_card, :]
return dgm, cof
def compute_spawn_sw(grad_dgm, dgm, f, card,
hom_dim, kernel_size, pool_mode, noise, samples, M,
pers_region=None):
bsm = np.zeros(f.shape, dtype='float32')
dsm = np.zeros(f.shape, dtype='float32')
# Find nonzero rows of dgm
dgm_up_nonzero = dgm.any(axis=1)
if not np.any(dgm_up_nonzero):
return bsm, dsm
dgm_up = dgm[dgm_up_nonzero, :]
grad_dgm_up = grad_dgm[dgm_up_nonzero, :]
# Project nonzero rows of dgm to diagonal
dgm_up_proj = np.column_stack(((dgm_up[:, 0] + dgm_up[:, 1]) / 2, (dgm_up[:, 0] + dgm_up[:, 1]) / 2))
# For each random sample, compute fuzzy sliced-Wasserstein pairing
for t in range(samples):
g = f + np.random.uniform(-noise, noise, size=f.shape)
x_down, switch = spool(g, kernel_size, pool_mode)
# Compute persistence diagram and critical pixels.
dgm_down, cof_down = compute_thresh_dgm(x_down, card, hom_dim, pers_region)
bsm_down, dsm_down = np.zeros(x_down.shape), np.zeros(x_down.shape) # Initialize low-res smears.
# Get nonzero rows of dgm_down
dgm_down_nonzero = dgm_down.any(axis=1)
if not np.any(dgm_down_nonzero): # Skip iteration if downsampled image has no persistent homology.
continue
dgm_down = dgm_down[dgm_down_nonzero, :]
cof_down = cof_down[dgm_down_nonzero, :]
# Project nonzero rows of downsampled dgm onto diagonal
dgm_down_proj = np.column_stack(((dgm_down[:, 0] + dgm_down[:, 1]) / 2, (dgm_down[:, 0] + dgm_down[:, 1]) / 2))
theta = -np.pi / 2
for i in range(M):
theta_vec = np.array([np.cos(theta), np.sin(theta)])
# Symmetrize the pair dgm_up and dgm_down
V1 = np.concatenate([np.dot(dgm_up, theta_vec), np.dot(dgm_down_proj, theta_vec)])
V2 = np.concatenate([np.dot(dgm_down, theta_vec), np.dot(dgm_up_proj, theta_vec)])
V1_sort = V1.argsort()
V2_sort = V2.argsort()
for j in range(len(V1)):
dot1 = V1_sort[j]
dot2 = V2_sort[j]
# Check if pair happened between non-diagonal points
if (dot1 < dgm_up.shape[0]) and (dot2 < dgm_down.shape[0]):
bsm_down.ravel()[cof_down[dot2, 0]] += (grad_dgm_up[dot1, 0] / M)
dsm_down.ravel()[cof_down[dot2, 1]] += (grad_dgm_up[dot1, 1] / M)
theta += np.pi / M
bsm += unspool(bsm_down, kernel_size, switch)
dsm += unspool(dsm_down, kernel_size, switch)
bsm, dsm = bsm / samples, dsm / samples
return bsm, dsm
def robustness_test(f, eps, n, pers_region, p, hom_dim):
num_eps = len(eps)
pers_avgs = np.zeros(num_eps)
pers_mins = np.zeros(num_eps)
pers_maxs = np.zeros(num_eps)
for t in range(num_eps):
S = np.zeros(n)
for i in range(n):
g = f + np.random.uniform(low=-eps[t], high=eps[t], size=np.shape(f))
g = np.clip(g, 0, 255)
dgm = compute_dgm(g, 10000, hom_dim)[0]
lifetimes = dgm[:, 1] - dgm[:, 0]
idx = (pers_region[0] < dgm[:, 0]) & (dgm[:, 0] < pers_region[1]) & \
(pers_region[2] < lifetimes) & (lifetimes < pers_region[3])
S[i] = np.linalg.norm(lifetimes[idx], p)
pers_avgs[t] = np.average(S)
pers_mins[t] = np.min(S)
pers_maxs[t] = np.max(S)
return pers_avgs, pers_mins, pers_maxs
def spool(f, kernel_size, pool_mode):
"""
Stochastically pools an image.
:param f: image
:param kernel_size: integer kernel size
:param pool_mode: 'max', 'min', 'uniform', 'simplex'
:return: downsampled image, switch for unspooling
"""
# Set stride to kernel size
stride = kernel_size
# Check that pool_mode is valid
assert pool_mode in ['max', 'min', 'uniform', 'simplex']
# Reshape image according to kernel size and stride
assert ~((f.shape[0] - kernel_size) % stride or (f.shape[1] - kernel_size) % stride), \
'Chosen kernel and stride misses some of the image.'
downsample_shape = ((f.shape[0] - kernel_size) // stride + 1, (f.shape[1] - kernel_size) // stride + 1)
f_window = as_strided(f,
shape=downsample_shape + (kernel_size, kernel_size),
strides=(stride * f.strides[0], stride * f.strides[1]) + f.strides)
# Reshape f_window so each row corresponds to a window.
f_window = f_window.reshape(-1, kernel_size ** 2)
# Choose switch according to pool_mode
if pool_mode == 'max':
switch = np.zeros(f_window.shape, dtype=np.float32)
switch[np.arange(switch.shape[0]), f_window.argmax(1)] = 1
if pool_mode == 'min':
switch = np.zeros(f_window.shape, dtype=np.float32)
switch[np.arange(switch.shape[0]), f_window.argmin(1)] = 1
if pool_mode == 'uniform':
switch = np.zeros(f_window.shape, dtype=np.float32)
switch[np.arange(switch.shape[0]),
np.random.randint(0, switch.shape[1], switch.shape[0])] = 1
if pool_mode == 'simplex':
switch = np.random.uniform(0, 1, f_window.shape).astype('float32')
switch = switch / switch.sum(axis=1)[:, None]
# Get corresponding values and reshape to downsampled image size.
f_down = np.sum(f_window * switch, axis=1).reshape(downsample_shape)
return f_down, switch
def unspool(f, kernel_size, switch):
"""
Deterministically un-pools an image using a switch.
:param f: image
:param kernel_size: kernel_size used in spool()
:param switch: switch output by spool()
:return: upscaled image
"""
stride = kernel_size
# Initialize upsampled image.
f_up = np.zeros(((f.shape[0] - 1) * stride + kernel_size, (f.shape[1] - 1) * stride + kernel_size),
dtype=np.float32)
f_window = as_strided(f_up,
shape=f.shape + (kernel_size, kernel_size),
strides=(stride * f_up.strides[0], stride * f_up.strides[1]) + f_up.strides)
f_window[:, :, :, :] = (switch * f.ravel()[:, None]).reshape(f.shape + (kernel_size, kernel_size))
return f_up
# py_func() and Cubical() are modified from GUDHI tutorials here: https://github.com/GUDHI/TDA-tutorial
def py_func(func, inp, Tout, stateful=True, name=None, grad=None):
"""
Wraps Python function as TensorFlow op
:param func: Python function
:param inp: inputs to func
:param Tout: types of func's outputs
:param stateful:
:param name:
:param grad: TensorFlow function computing gradient of func
:return: TensorFlow wrapper of func
"""
rnd_name = "PyFuncGrad" + str(np.random.randint(0, 1e+8))
tf.RegisterGradient(rnd_name)(grad)
g = tf.get_default_graph()
with g.gradient_override_map({"PyFunc": rnd_name}):
return tf.py_func(func, inp, Tout, stateful=stateful, name=name)
def Spool(x, kernel_size, pool_mode, name=None):
"""
TF op that stochastically pools an image.
:param x: image
:param kernel_size: integer kernel size
:param pool_mode: 'max', 'min', 'uniform', 'simplex'
:param name:
:return: TF operation
"""
# Define override gradient
def _Spool(op, grad_xdown, grad_switch):
switch = op.outputs[1]
grad_x = tf.py_func(lambda y, z: unspool(y, kernel_size, z),
[grad_xdown, switch],
[tf.float32])[0]
return grad_x
# Create the operation
with ops.op_scope([x], name, "Spool") as name:
return py_func(lambda y: spool(y, kernel_size, pool_mode),
[x],
[tf.float32, tf.float32],
name=name,
grad=_Spool)
def Cubical(x, card, hom_dim, update_func, name=None):
"""
TF op that computes the persistence diagram of an image.
:param x: image
:param card: maximum number of bars kept
:param hom_dim: dimension of homology
:param update_func: update_func(grad_dgm, dgm, cof, x) gives the direction of update
:param name:
:return: TF operation
"""
# Define override gradient
def _Cubical(op, grad_dgm, grad_cof):
dgm, cof = op.outputs[0], op.outputs[1]
x = op.inputs[0]
grad_x = tf.py_func(lambda a, b, c, d: update_func(a, b, c, d),
[grad_dgm, dgm, cof, x],
[tf.float32])[0]
return grad_x
# Create the operation
with ops.op_scope([x], name, "Cubical") as name:
return py_func(lambda y: compute_dgm(y, card, hom_dim),
[x],
[tf.float32, tf.int32],
name=name,
grad=_Cubical)
def UniformNoise(x, eps):
"""
TF op that adds Uniform noise to an image.
:param x: image
:param eps: amount of noise
:return: TF operation
"""
noise = tf.random_uniform(shape=tf.shape(x), minval=-eps, maxval=eps, dtype=tf.float32)
return x + noise
def SqPersInRegion(dgm, pers_region):
"""
TF op that computes the sum of squared persistence in a region.
:param dgm: persistence diagram
:param pers_region: np.array([birth_low, birth_high, lifetime_low, lifetime_high])
:return: TF operation
"""
birthtimes = dgm[:, 0]
lifetimes = dgm[:, 1] - dgm[:, 0]
idx = tf.where((pers_region[0] < birthtimes) & (birthtimes < pers_region[1]) &
(pers_region[2] < lifetimes) & (lifetimes < pers_region[3]))
return tf.reduce_sum(tf.square(tf.gather(dgm[:, 1], idx) - tf.gather(dgm[:, 0], idx)))
def AbsPersInRegion(dgm, pers_region):
"""
TF op that computes the sum of absolute persistence in a region.
:param dgm: persistence diagram
:param pers_region: np.array([birth_low, birth_high, lifetime_low, lifetime_high])
:return: TF operation
"""
lifetimes = dgm[:, 1] - dgm[:, 0]
idx = tf.where((pers_region[0] < dgm[:, 0]) & (dgm[:, 0] < pers_region[1]) &
(pers_region[2] < lifetimes) & (lifetimes < pers_region[3]) &
tf.logical_not(tf.equal(dgm[:, 0], dgm[:, 1])))
return tf.reduce_sum(tf.abs(tf.gather(dgm[:, 1], idx) - tf.gather(dgm[:, 0], idx)))
def TopBackprop(f, TopLoss, a, lr, steps):
# Initialize TensorFlow
tf.reset_default_graph()
# Create input variable initialized with image values
x = tf.get_variable("X", initializer=np.array(f), trainable=True)
# Compute persistence
bad_top = TopLoss(x)
# Compute loss
loss = a * bad_top + (1 - a) * tf.losses.mean_squared_error(f, x)
# Optimization
opt = tf.train.AdamOptimizer(learning_rate=lr)
# Train it
train = opt.minimize(loss)
# Training!
init = tf.global_variables_initializer()
start_time = timeit.default_timer()
with tf.Session() as sess:
sess.run(init)
for epoch in range(steps):
sess.run(train)
print('Computation Time = ' + str(timeit.default_timer() - start_time))
return sess.run(tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, "X")[0]) | topnet.py | import numpy as np
import gudhi as gd
from numpy.lib.stride_tricks import as_strided
import tensorflow as tf
from tensorflow.python.framework import ops
import timeit
def compute_dgm(f, card, hom_dim):
"""
Computes the persistence diagram of an image.
:param f: image
:param card: maximum number of bars kept
:param hom_dim: dimension of homology
:return: persistence diagram, critical pixels
"""
dgm = np.zeros([card, 2], dtype=np.float32)
cof = np.zeros([card, 2], dtype=np.int32)
cc = gd.CubicalComplex(dimensions=f.shape, top_dimensional_cells=f.ravel())
cc.compute_persistence()
# Return zero arrays if no finite bars
num_bars = len(cc.persistence_intervals_in_dimension(hom_dim))
if ((hom_dim == 0) and (num_bars == 1)) or ((hom_dim > 0) and (num_bars == 0)):
return dgm, cof
# These are all the critical pixels
all_cof = cc.cofaces_of_persistence_pairs()[0][hom_dim]
# Generate the persistence diagram
birth_times, death_times = f.flat[all_cof[:, 0]], f.flat[all_cof[:, 1]]
# Return at most param:card bars
min_card = min(len(birth_times), card)
dgm[:min_card, 0], dgm[:min_card, 1] = birth_times[:min_card], death_times[:min_card]
cof[:min_card, :] = all_cof[:min_card, :]
return dgm, cof
def compute_dgm_grad(grad_dgm, cof, f):
"""
Uses grad_dgm to compute birth/death critical pixels
:param grad_dgm: gradient wrt dgm
:param cof: critical pixels
:param f: input image
:return: gradient of births/deaths wrt f
"""
grad_f_births = np.zeros(f.shape, dtype=np.float32)
grad_f_deaths = np.zeros(f.shape, dtype=np.float32)
# Identify which rows correspond to a persistence dot.
is_nonzero = cof.any(axis=1)
if not np.any(is_nonzero):
return grad_f_births, grad_f_deaths
# Filter by relevant rows
cof_nonzero = cof[is_nonzero, :]
grad_dgm_nonzero = grad_dgm[is_nonzero, :]
# Add gradient at appropriate places.
np.add.at(grad_f_births.ravel(), cof_nonzero[:, 0].ravel(), grad_dgm_nonzero[:, 0].ravel())
np.add.at(grad_f_deaths.ravel(), cof_nonzero[:, 1].ravel(), grad_dgm_nonzero[:, 1].ravel())
return grad_f_births, grad_f_deaths
def compute_thresh_dgm(f, card, hom_dim, pers_region=None):
"""
Computes thresholded persistent homology of an image.
:param f: input image
:param card: max cardinality of persistence diagram
:param hom_dim: degree of homology
:param pers_region: np.array([birth_low, birth_high, lifetime_low, lifetime_high])
:return: persistence diagram and associated critical pixels
"""
dgm = np.zeros([card, 2], dtype=np.float32)
cof = np.zeros([card, 2], dtype=np.int32)
cc = gd.CubicalComplex(dimensions=f.shape, top_dimensional_cells=f.ravel())
cc.compute_persistence()
# Return zero arrays if no finite bars
num_bars = len(cc.persistence_intervals_in_dimension(hom_dim))
if ((hom_dim == 0) and (num_bars == 1)) or ((hom_dim > 0) and (num_bars == 0)):
return dgm, cof
# These are all the critical pixels
all_cof = cc.cofaces_of_persistence_pairs()[0][hom_dim]
# Generate the persistence diagram
birth_times, death_times = f.flat[all_cof[:, 0]], f.flat[all_cof[:, 1]]
# Threshold by persistence region if one was provided
if pers_region is not None:
lifetimes = death_times - birth_times
rel_ind = (pers_region[0] < birth_times) & (birth_times < pers_region[1]) & \
(pers_region[2] < lifetimes) & (lifetimes < pers_region[3])
birth_times, death_times, all_cof = birth_times[rel_ind], death_times[rel_ind], all_cof[rel_ind, :]
min_card = min(len(birth_times), card)
dgm[:min_card, 0], dgm[:min_card, 1] = birth_times[:min_card], death_times[:min_card]
cof[:min_card, :] = all_cof[:min_card, :]
return dgm, cof
def compute_spawn_sw(grad_dgm, dgm, f, card,
hom_dim, kernel_size, pool_mode, noise, samples, M,
pers_region=None):
bsm = np.zeros(f.shape, dtype='float32')
dsm = np.zeros(f.shape, dtype='float32')
# Find nonzero rows of dgm
dgm_up_nonzero = dgm.any(axis=1)
if not np.any(dgm_up_nonzero):
return bsm, dsm
dgm_up = dgm[dgm_up_nonzero, :]
grad_dgm_up = grad_dgm[dgm_up_nonzero, :]
# Project nonzero rows of dgm to diagonal
dgm_up_proj = np.column_stack(((dgm_up[:, 0] + dgm_up[:, 1]) / 2, (dgm_up[:, 0] + dgm_up[:, 1]) / 2))
# For each random sample, compute fuzzy sliced-Wasserstein pairing
for t in range(samples):
g = f + np.random.uniform(-noise, noise, size=f.shape)
x_down, switch = spool(g, kernel_size, pool_mode)
# Compute persistence diagram and critical pixels.
dgm_down, cof_down = compute_thresh_dgm(x_down, card, hom_dim, pers_region)
bsm_down, dsm_down = np.zeros(x_down.shape), np.zeros(x_down.shape) # Initialize low-res smears.
# Get nonzero rows of dgm_down
dgm_down_nonzero = dgm_down.any(axis=1)
if not np.any(dgm_down_nonzero): # Skip iteration if downsampled image has no persistent homology.
continue
dgm_down = dgm_down[dgm_down_nonzero, :]
cof_down = cof_down[dgm_down_nonzero, :]
# Project nonzero rows of downsampled dgm onto diagonal
dgm_down_proj = np.column_stack(((dgm_down[:, 0] + dgm_down[:, 1]) / 2, (dgm_down[:, 0] + dgm_down[:, 1]) / 2))
theta = -np.pi / 2
for i in range(M):
theta_vec = np.array([np.cos(theta), np.sin(theta)])
# Symmetrize the pair dgm_up and dgm_down
V1 = np.concatenate([np.dot(dgm_up, theta_vec), np.dot(dgm_down_proj, theta_vec)])
V2 = np.concatenate([np.dot(dgm_down, theta_vec), np.dot(dgm_up_proj, theta_vec)])
V1_sort = V1.argsort()
V2_sort = V2.argsort()
for j in range(len(V1)):
dot1 = V1_sort[j]
dot2 = V2_sort[j]
# Check if pair happened between non-diagonal points
if (dot1 < dgm_up.shape[0]) and (dot2 < dgm_down.shape[0]):
bsm_down.ravel()[cof_down[dot2, 0]] += (grad_dgm_up[dot1, 0] / M)
dsm_down.ravel()[cof_down[dot2, 1]] += (grad_dgm_up[dot1, 1] / M)
theta += np.pi / M
bsm += unspool(bsm_down, kernel_size, switch)
dsm += unspool(dsm_down, kernel_size, switch)
bsm, dsm = bsm / samples, dsm / samples
return bsm, dsm
def robustness_test(f, eps, n, pers_region, p, hom_dim):
num_eps = len(eps)
pers_avgs = np.zeros(num_eps)
pers_mins = np.zeros(num_eps)
pers_maxs = np.zeros(num_eps)
for t in range(num_eps):
S = np.zeros(n)
for i in range(n):
g = f + np.random.uniform(low=-eps[t], high=eps[t], size=np.shape(f))
g = np.clip(g, 0, 255)
dgm = compute_dgm(g, 10000, hom_dim)[0]
lifetimes = dgm[:, 1] - dgm[:, 0]
idx = (pers_region[0] < dgm[:, 0]) & (dgm[:, 0] < pers_region[1]) & \
(pers_region[2] < lifetimes) & (lifetimes < pers_region[3])
S[i] = np.linalg.norm(lifetimes[idx], p)
pers_avgs[t] = np.average(S)
pers_mins[t] = np.min(S)
pers_maxs[t] = np.max(S)
return pers_avgs, pers_mins, pers_maxs
def spool(f, kernel_size, pool_mode):
"""
Stochastically pools an image.
:param f: image
:param kernel_size: integer kernel size
:param pool_mode: 'max', 'min', 'uniform', 'simplex'
:return: downsampled image, switch for unspooling
"""
# Set stride to kernel size
stride = kernel_size
# Check that pool_mode is valid
assert pool_mode in ['max', 'min', 'uniform', 'simplex']
# Reshape image according to kernel size and stride
assert ~((f.shape[0] - kernel_size) % stride or (f.shape[1] - kernel_size) % stride), \
'Chosen kernel and stride misses some of the image.'
downsample_shape = ((f.shape[0] - kernel_size) // stride + 1, (f.shape[1] - kernel_size) // stride + 1)
f_window = as_strided(f,
shape=downsample_shape + (kernel_size, kernel_size),
strides=(stride * f.strides[0], stride * f.strides[1]) + f.strides)
# Reshape f_window so each row corresponds to a window.
f_window = f_window.reshape(-1, kernel_size ** 2)
# Choose switch according to pool_mode
if pool_mode == 'max':
switch = np.zeros(f_window.shape, dtype=np.float32)
switch[np.arange(switch.shape[0]), f_window.argmax(1)] = 1
if pool_mode == 'min':
switch = np.zeros(f_window.shape, dtype=np.float32)
switch[np.arange(switch.shape[0]), f_window.argmin(1)] = 1
if pool_mode == 'uniform':
switch = np.zeros(f_window.shape, dtype=np.float32)
switch[np.arange(switch.shape[0]),
np.random.randint(0, switch.shape[1], switch.shape[0])] = 1
if pool_mode == 'simplex':
switch = np.random.uniform(0, 1, f_window.shape).astype('float32')
switch = switch / switch.sum(axis=1)[:, None]
# Get corresponding values and reshape to downsampled image size.
f_down = np.sum(f_window * switch, axis=1).reshape(downsample_shape)
return f_down, switch
def unspool(f, kernel_size, switch):
"""
Deterministically un-pools an image using a switch.
:param f: image
:param kernel_size: kernel_size used in spool()
:param switch: switch output by spool()
:return: upscaled image
"""
stride = kernel_size
# Initialize upsampled image.
f_up = np.zeros(((f.shape[0] - 1) * stride + kernel_size, (f.shape[1] - 1) * stride + kernel_size),
dtype=np.float32)
f_window = as_strided(f_up,
shape=f.shape + (kernel_size, kernel_size),
strides=(stride * f_up.strides[0], stride * f_up.strides[1]) + f_up.strides)
f_window[:, :, :, :] = (switch * f.ravel()[:, None]).reshape(f.shape + (kernel_size, kernel_size))
return f_up
# py_func() and Cubical() are modified from GUDHI tutorials here: https://github.com/GUDHI/TDA-tutorial
def py_func(func, inp, Tout, stateful=True, name=None, grad=None):
"""
Wraps Python function as TensorFlow op
:param func: Python function
:param inp: inputs to func
:param Tout: types of func's outputs
:param stateful:
:param name:
:param grad: TensorFlow function computing gradient of func
:return: TensorFlow wrapper of func
"""
rnd_name = "PyFuncGrad" + str(np.random.randint(0, 1e+8))
tf.RegisterGradient(rnd_name)(grad)
g = tf.get_default_graph()
with g.gradient_override_map({"PyFunc": rnd_name}):
return tf.py_func(func, inp, Tout, stateful=stateful, name=name)
def Spool(x, kernel_size, pool_mode, name=None):
"""
TF op that stochastically pools an image.
:param x: image
:param kernel_size: integer kernel size
:param pool_mode: 'max', 'min', 'uniform', 'simplex'
:param name:
:return: TF operation
"""
# Define override gradient
def _Spool(op, grad_xdown, grad_switch):
switch = op.outputs[1]
grad_x = tf.py_func(lambda y, z: unspool(y, kernel_size, z),
[grad_xdown, switch],
[tf.float32])[0]
return grad_x
# Create the operation
with ops.op_scope([x], name, "Spool") as name:
return py_func(lambda y: spool(y, kernel_size, pool_mode),
[x],
[tf.float32, tf.float32],
name=name,
grad=_Spool)
def Cubical(x, card, hom_dim, update_func, name=None):
"""
TF op that computes the persistence diagram of an image.
:param x: image
:param card: maximum number of bars kept
:param hom_dim: dimension of homology
:param update_func: update_func(grad_dgm, dgm, cof, x) gives the direction of update
:param name:
:return: TF operation
"""
# Define override gradient
def _Cubical(op, grad_dgm, grad_cof):
dgm, cof = op.outputs[0], op.outputs[1]
x = op.inputs[0]
grad_x = tf.py_func(lambda a, b, c, d: update_func(a, b, c, d),
[grad_dgm, dgm, cof, x],
[tf.float32])[0]
return grad_x
# Create the operation
with ops.op_scope([x], name, "Cubical") as name:
return py_func(lambda y: compute_dgm(y, card, hom_dim),
[x],
[tf.float32, tf.int32],
name=name,
grad=_Cubical)
def UniformNoise(x, eps):
"""
TF op that adds Uniform noise to an image.
:param x: image
:param eps: amount of noise
:return: TF operation
"""
noise = tf.random_uniform(shape=tf.shape(x), minval=-eps, maxval=eps, dtype=tf.float32)
return x + noise
def SqPersInRegion(dgm, pers_region):
"""
TF op that computes the sum of squared persistence in a region.
:param dgm: persistence diagram
:param pers_region: np.array([birth_low, birth_high, lifetime_low, lifetime_high])
:return: TF operation
"""
birthtimes = dgm[:, 0]
lifetimes = dgm[:, 1] - dgm[:, 0]
idx = tf.where((pers_region[0] < birthtimes) & (birthtimes < pers_region[1]) &
(pers_region[2] < lifetimes) & (lifetimes < pers_region[3]))
return tf.reduce_sum(tf.square(tf.gather(dgm[:, 1], idx) - tf.gather(dgm[:, 0], idx)))
def AbsPersInRegion(dgm, pers_region):
"""
TF op that computes the sum of absolute persistence in a region.
:param dgm: persistence diagram
:param pers_region: np.array([birth_low, birth_high, lifetime_low, lifetime_high])
:return: TF operation
"""
lifetimes = dgm[:, 1] - dgm[:, 0]
idx = tf.where((pers_region[0] < dgm[:, 0]) & (dgm[:, 0] < pers_region[1]) &
(pers_region[2] < lifetimes) & (lifetimes < pers_region[3]) &
tf.logical_not(tf.equal(dgm[:, 0], dgm[:, 1])))
return tf.reduce_sum(tf.abs(tf.gather(dgm[:, 1], idx) - tf.gather(dgm[:, 0], idx)))
def TopBackprop(f, TopLoss, a, lr, steps):
# Initialize TensorFlow
tf.reset_default_graph()
# Create input variable initialized with image values
x = tf.get_variable("X", initializer=np.array(f), trainable=True)
# Compute persistence
bad_top = TopLoss(x)
# Compute loss
loss = a * bad_top + (1 - a) * tf.losses.mean_squared_error(f, x)
# Optimization
opt = tf.train.AdamOptimizer(learning_rate=lr)
# Train it
train = opt.minimize(loss)
# Training!
init = tf.global_variables_initializer()
start_time = timeit.default_timer()
with tf.Session() as sess:
sess.run(init)
for epoch in range(steps):
sess.run(train)
print('Computation Time = ' + str(timeit.default_timer() - start_time))
return sess.run(tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, "X")[0]) | 0.817356 | 0.562837 |
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from django_tables2 import RequestConfig
from django.apps import apps
from .tables import ConvertedPTable, convertPData, ConvertedGTable, convertGData, convertCData, convertCarData
from .models import Skaterseasons, Goalieseasons, Season, Player
# Create your views here.
class IndexView(generic.ListView):
model = Player
template_name = 'timemachine/index.html'
def goaliestats(request):
# set menus
base = Season.objects.get(pk=request.GET['baseSeason'])
target = Season.objects.get(pk=request.GET['targetSeason'])
season1 = Season.objects.order_by('seasonid')
season2 = Season.objects.order_by('seasonid')
# prepare data
baseData = Goalieseasons.objects.filter(seasonid=str(request.GET['baseSeason']))
data = convertGData(baseData, base, target)
if 'include' in request.GET:
baseData = Goalieseasons.objects.filter(seasonid=str(request.GET['targetSeason']))
data += (convertGData(baseData, target, target))
table = ConvertedGTable(data, order_by='-w')
table.paginate(page=request.GET.get('page', 1), per_page=30)
RequestConfig(request, paginate={'per_page': 30}).configure(table)
return render(request, 'timemachine/goaliestats.html', {'base': base, 'target': target, 'season1': season1, 'season2': season2, 'table': table})
def skaterstats(request):
# set menus
base = Season.objects.get(pk=request.GET['baseSeason'])
target = Season.objects.get(pk=request.GET['targetSeason'])
season1 = Season.objects.order_by('seasonid')
season2 = Season.objects.order_by('seasonid')
# prepare data
baseData = Skaterseasons.objects.filter(seasonid=str(request.GET['baseSeason']))
data = convertPData(baseData, base, target)
if 'include' in request.GET:
baseData = Skaterseasons.objects.filter(seasonid=str(request.GET['targetSeason']))
data += (convertPData(baseData, target, target))
table = ConvertedPTable(data, order_by='-p')
RequestConfig(request, paginate={'per_page': 35}).configure(table)
return render(request, 'timemachine/skaterstats.html', {'base': base, 'target': target, 'season1': season1, 'season2': season2, 'table': table})
def comparator(request):
# set menus
base = Season.objects.get(pk=request.GET['baseSeason'])
season1 = Season.objects.order_by('seasonid')
players1 = Player.objects.order_by('playername')
players2 = Player.objects.order_by('playername')
# prepare data
player1Data = Skaterseasons.objects.filter(playerid=str(request.GET['player1']))
player2Data = Skaterseasons.objects.filter(playerid=str(request.GET['player2']))
player1 = Player.objects.get(pk=(request.GET['player1']))
player2 = Player.objects.get(pk=(request.GET['player2']))
if 'seasons' in request.GET:
data = convertCData(base, player1Data, player2Data)
else:
data = convertCarData(base, player1Data, player2Data)
table = ConvertedPTable(data, order_by='-p')
RequestConfig(request, paginate={'per_page': 35}).configure(table)
return render(request, 'timemachine/comparator.html', {'base': base, 'season1': season1, 'players1': players1, 'players2': players2, 'player1': player1, 'player2': player2, 'table': table}) | timemachine/views.py | from django.shortcuts import render, get_object_or_404
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.views import generic
from django_tables2 import RequestConfig
from django.apps import apps
from .tables import ConvertedPTable, convertPData, ConvertedGTable, convertGData, convertCData, convertCarData
from .models import Skaterseasons, Goalieseasons, Season, Player
# Create your views here.
class IndexView(generic.ListView):
model = Player
template_name = 'timemachine/index.html'
def goaliestats(request):
# set menus
base = Season.objects.get(pk=request.GET['baseSeason'])
target = Season.objects.get(pk=request.GET['targetSeason'])
season1 = Season.objects.order_by('seasonid')
season2 = Season.objects.order_by('seasonid')
# prepare data
baseData = Goalieseasons.objects.filter(seasonid=str(request.GET['baseSeason']))
data = convertGData(baseData, base, target)
if 'include' in request.GET:
baseData = Goalieseasons.objects.filter(seasonid=str(request.GET['targetSeason']))
data += (convertGData(baseData, target, target))
table = ConvertedGTable(data, order_by='-w')
table.paginate(page=request.GET.get('page', 1), per_page=30)
RequestConfig(request, paginate={'per_page': 30}).configure(table)
return render(request, 'timemachine/goaliestats.html', {'base': base, 'target': target, 'season1': season1, 'season2': season2, 'table': table})
def skaterstats(request):
# set menus
base = Season.objects.get(pk=request.GET['baseSeason'])
target = Season.objects.get(pk=request.GET['targetSeason'])
season1 = Season.objects.order_by('seasonid')
season2 = Season.objects.order_by('seasonid')
# prepare data
baseData = Skaterseasons.objects.filter(seasonid=str(request.GET['baseSeason']))
data = convertPData(baseData, base, target)
if 'include' in request.GET:
baseData = Skaterseasons.objects.filter(seasonid=str(request.GET['targetSeason']))
data += (convertPData(baseData, target, target))
table = ConvertedPTable(data, order_by='-p')
RequestConfig(request, paginate={'per_page': 35}).configure(table)
return render(request, 'timemachine/skaterstats.html', {'base': base, 'target': target, 'season1': season1, 'season2': season2, 'table': table})
def comparator(request):
# set menus
base = Season.objects.get(pk=request.GET['baseSeason'])
season1 = Season.objects.order_by('seasonid')
players1 = Player.objects.order_by('playername')
players2 = Player.objects.order_by('playername')
# prepare data
player1Data = Skaterseasons.objects.filter(playerid=str(request.GET['player1']))
player2Data = Skaterseasons.objects.filter(playerid=str(request.GET['player2']))
player1 = Player.objects.get(pk=(request.GET['player1']))
player2 = Player.objects.get(pk=(request.GET['player2']))
if 'seasons' in request.GET:
data = convertCData(base, player1Data, player2Data)
else:
data = convertCarData(base, player1Data, player2Data)
table = ConvertedPTable(data, order_by='-p')
RequestConfig(request, paginate={'per_page': 35}).configure(table)
return render(request, 'timemachine/comparator.html', {'base': base, 'season1': season1, 'players1': players1, 'players2': players2, 'player1': player1, 'player2': player2, 'table': table}) | 0.243283 | 0.087876 |
import os
import ml_collections
import tensorflow as tf
from tensorflow.keras import Input, Model
from tensorflow.keras import optimizers, models
from .sampling import GaussianSampler
from .networks import build_encoder, build_generator, build_discriminator
from ..losses import (
generator_loss,
kl_divergence_loss,
DiscriminatorLoss,
FeatureMatchingLoss,
VGGFeatureMatchingLoss,
)
from ..metrics import KID
class GauGAN(Model):
def __init__(
self,
image_size: int,
num_classes: int,
batch_size: int,
hyperparameters: ml_collections.ConfigDict,
**kwargs,
):
super().__init__(**kwargs)
self.image_size = image_size
self.latent_dim = hyperparameters.latent_dimention
self.batch_size = batch_size
self.num_classes = num_classes
self.image_shape = (image_size, image_size, 3)
self.mask_shape = (image_size, image_size, num_classes)
self.feature_loss_coeff = hyperparameters.feature_loss_coefficient
self.vgg_feature_loss_coeff = hyperparameters.vgg_feature_loss_coefficient
self.kl_divergence_loss_coeff = hyperparameters.kl_divergence_loss_coefficient
self.discriminator = build_discriminator(
self.image_shape,
downsample_factor=hyperparameters.discriminator_downsample_factor,
alpha=hyperparameters.alpha,
dropout=hyperparameters.dropout,
)
self.generator = build_generator(
self.mask_shape, latent_dim=self.latent_dim, alpha=hyperparameters.alpha
)
self.encoder = build_encoder(
self.image_shape,
encoder_downsample_factor=hyperparameters.encoder_downsample_factor,
latent_dim=self.latent_dim,
alpha=hyperparameters.alpha,
dropout=hyperparameters.dropout,
)
self.sampler = GaussianSampler(batch_size, self.latent_dim)
self.patch_size, self.combined_model = self.build_combined_generator()
self.disc_loss_tracker = tf.keras.metrics.Mean(name="disc_loss")
self.gen_loss_tracker = tf.keras.metrics.Mean(name="gen_loss")
self.feat_loss_tracker = tf.keras.metrics.Mean(name="feat_loss")
self.vgg_loss_tracker = tf.keras.metrics.Mean(name="vgg_loss")
self.kl_loss_tracker = tf.keras.metrics.Mean(name="kl_loss")
self.kid = KID(image_size)
@property
def metrics(self):
return [
self.disc_loss_tracker,
self.gen_loss_tracker,
self.feat_loss_tracker,
self.vgg_loss_tracker,
self.kl_loss_tracker,
self.kid,
]
def build_combined_generator(self):
# This method builds a model that takes as inputs the following:
# latent vector, one-hot encoded segmentation label map, and
# a segmentation map. It then (i) generates an image with the generator,
# (ii) passes the generated images and segmentation map to the discriminator.
# Finally, the model produces the following outputs: (a) discriminator outputs,
# (b) generated image.
# We will be using this model to simplify the implementation.
self.discriminator.trainable = False
mask_input = Input(shape=self.mask_shape, name="mask")
image_input = Input(shape=self.image_shape, name="image")
latent_input = Input(shape=(self.latent_dim), name="latent")
generated_image = self.generator([latent_input, mask_input])
discriminator_output = self.discriminator([image_input, generated_image])
patch_size = discriminator_output[-1].shape[1]
combined_model = Model(
[latent_input, mask_input, image_input],
[discriminator_output, generated_image],
)
return patch_size, combined_model
def compile(self, gen_lr: float = 1e-4, disc_lr: float = 4e-4, **kwargs):
super().compile(**kwargs)
self.generator_optimizer = optimizers.Adam(gen_lr, beta_1=0.0, beta_2=0.999)
self.discriminator_optimizer = optimizers.Adam(
disc_lr, beta_1=0.0, beta_2=0.999
)
self.discriminator_loss = DiscriminatorLoss()
self.feature_matching_loss = FeatureMatchingLoss()
self.vgg_loss = VGGFeatureMatchingLoss()
def train_discriminator(self, latent_vector, segmentation_map, real_image, labels):
fake_images = self.generator([latent_vector, labels])
with tf.GradientTape() as gradient_tape:
pred_fake = self.discriminator([segmentation_map, fake_images])[-1]
pred_real = self.discriminator([segmentation_map, real_image])[-1]
loss_fake = self.discriminator_loss(pred_fake, False)
loss_real = self.discriminator_loss(pred_real, True)
total_loss = 0.5 * (loss_fake + loss_real)
self.discriminator.trainable = True
gradients = gradient_tape.gradient(
total_loss, self.discriminator.trainable_variables
)
self.discriminator_optimizer.apply_gradients(
zip(gradients, self.discriminator.trainable_variables)
)
return total_loss
def train_generator(
self, latent_vector, segmentation_map, labels, image, mean, variance
):
# Generator learns through the signal provided by the discriminator. During
# backpropagation, we only update the generator parameters.
self.discriminator.trainable = False
with tf.GradientTape() as tape:
real_d_output = self.discriminator([segmentation_map, image])
fake_d_output, fake_image = self.combined_model(
[latent_vector, labels, segmentation_map]
)
pred = fake_d_output[-1]
# Compute generator losses.
g_loss = generator_loss(pred)
kl_loss = self.kl_divergence_loss_coeff * kl_divergence_loss(mean, variance)
vgg_loss = self.vgg_feature_loss_coeff * self.vgg_loss(image, fake_image)
feature_loss = self.feature_loss_coeff * self.feature_matching_loss(
real_d_output, fake_d_output
)
total_loss = g_loss + kl_loss + vgg_loss + feature_loss
all_trainable_variables = (
self.combined_model.trainable_variables + self.encoder.trainable_variables
)
gradients = tape.gradient(total_loss, all_trainable_variables,)
self.generator_optimizer.apply_gradients(
zip(gradients, all_trainable_variables,)
)
return total_loss, feature_loss, vgg_loss, kl_loss
def train_step(self, data):
segmentation_map, image, labels = data
mean, variance = self.encoder(image)
latent_vector = self.sampler([mean, variance])
discriminator_loss = self.train_discriminator(
latent_vector, segmentation_map, image, labels
)
(generator_loss, feature_loss, vgg_loss, kl_loss) = self.train_generator(
latent_vector, segmentation_map, labels, image, mean, variance
)
# Report progress.
self.disc_loss_tracker.update_state(discriminator_loss)
self.gen_loss_tracker.update_state(generator_loss)
self.feat_loss_tracker.update_state(feature_loss)
self.vgg_loss_tracker.update_state(vgg_loss)
self.kl_loss_tracker.update_state(kl_loss)
results = {m.name: m.result() for m in self.metrics[:-1]}
return results
def test_step(self, data):
segmentation_map, image, labels = data
# Obtain the learned moments of the real image distribution.
mean, variance = self.encoder(image)
# Sample a latent from the distribution defined by the learned moments.
latent_vector = self.sampler([mean, variance])
# Generate the fake images,
fake_images = self.generator([latent_vector, labels])
# Calculate the losses.
pred_fake = self.discriminator([segmentation_map, fake_images])[-1]
pred_real = self.discriminator([segmentation_map, image])[-1]
loss_fake = self.discriminator_loss(pred_fake, False)
loss_real = self.discriminator_loss(pred_real, True)
total_discriminator_loss = 0.5 * (loss_fake + loss_real)
real_d_output = self.discriminator([segmentation_map, image])
fake_d_output, fake_image = self.combined_model(
[latent_vector, labels, segmentation_map]
)
pred = fake_d_output[-1]
g_loss = generator_loss(pred)
kl_loss = self.kl_divergence_loss_coeff * kl_divergence_loss(mean, variance)
vgg_loss = self.vgg_feature_loss_coeff * self.vgg_loss(image, fake_image)
feature_loss = self.feature_loss_coeff * self.feature_matching_loss(
real_d_output, fake_d_output
)
total_generator_loss = g_loss + kl_loss + vgg_loss + feature_loss
# Report progress.
self.kid.update_state(image, fake_images)
self.disc_loss_tracker.update_state(total_discriminator_loss)
self.gen_loss_tracker.update_state(total_generator_loss)
self.feat_loss_tracker.update_state(feature_loss)
self.vgg_loss_tracker.update_state(vgg_loss)
self.kl_loss_tracker.update_state(kl_loss)
results = {m.name: m.result() for m in self.metrics}
return results
def call(self, inputs):
latent_vectors, labels = inputs
return self.generator([latent_vectors, labels])
def save(
self,
filepath,
overwrite=True,
include_optimizer=True,
save_format=None,
signatures=None,
options=None,
save_traces=True,
):
self.generator.save(
os.path.join(filepath, "generator"),
overwrite=overwrite,
include_optimizer=include_optimizer,
save_format=save_format,
signatures=signatures,
options=options,
save_traces=save_traces,
)
self.discriminator.save(
os.path.join(filepath, "discriminator"),
overwrite=overwrite,
include_optimizer=include_optimizer,
save_format=save_format,
signatures=signatures,
options=options,
save_traces=save_traces,
)
def load(self, generator_filepath: str, discriminator_filepath: str):
self.generator = models.load_model(generator_filepath)
self.discriminator = models.load_model(discriminator_filepath)
def save_weights(self, filepath, overwrite=True, save_format=None, options=None):
self.generator.save_weights(
os.path.join(filepath, "generator-checkpoints"),
overwrite=overwrite,
save_format=save_format,
options=options,
)
self.discriminator.save_weights(
os.path.join(filepath, "discriminator-checkpoints"),
overwrite=overwrite,
save_format=save_format,
options=options,
)
def load_weights(self, filepath, by_name=False, skip_mismatch=False, options=None):
self.generator.load_weights(
os.path.join(filepath, "generator-checkpoints"),
by_name=by_name,
skip_mismatch=skip_mismatch,
options=options,
)
self.discriminator.load_weights(
os.path.join(filepath, "discriminator-checkpoints"),
by_name=by_name,
skip_mismatch=skip_mismatch,
options=options,
) | gaugan/models/gaugan.py | import os
import ml_collections
import tensorflow as tf
from tensorflow.keras import Input, Model
from tensorflow.keras import optimizers, models
from .sampling import GaussianSampler
from .networks import build_encoder, build_generator, build_discriminator
from ..losses import (
generator_loss,
kl_divergence_loss,
DiscriminatorLoss,
FeatureMatchingLoss,
VGGFeatureMatchingLoss,
)
from ..metrics import KID
class GauGAN(Model):
def __init__(
self,
image_size: int,
num_classes: int,
batch_size: int,
hyperparameters: ml_collections.ConfigDict,
**kwargs,
):
super().__init__(**kwargs)
self.image_size = image_size
self.latent_dim = hyperparameters.latent_dimention
self.batch_size = batch_size
self.num_classes = num_classes
self.image_shape = (image_size, image_size, 3)
self.mask_shape = (image_size, image_size, num_classes)
self.feature_loss_coeff = hyperparameters.feature_loss_coefficient
self.vgg_feature_loss_coeff = hyperparameters.vgg_feature_loss_coefficient
self.kl_divergence_loss_coeff = hyperparameters.kl_divergence_loss_coefficient
self.discriminator = build_discriminator(
self.image_shape,
downsample_factor=hyperparameters.discriminator_downsample_factor,
alpha=hyperparameters.alpha,
dropout=hyperparameters.dropout,
)
self.generator = build_generator(
self.mask_shape, latent_dim=self.latent_dim, alpha=hyperparameters.alpha
)
self.encoder = build_encoder(
self.image_shape,
encoder_downsample_factor=hyperparameters.encoder_downsample_factor,
latent_dim=self.latent_dim,
alpha=hyperparameters.alpha,
dropout=hyperparameters.dropout,
)
self.sampler = GaussianSampler(batch_size, self.latent_dim)
self.patch_size, self.combined_model = self.build_combined_generator()
self.disc_loss_tracker = tf.keras.metrics.Mean(name="disc_loss")
self.gen_loss_tracker = tf.keras.metrics.Mean(name="gen_loss")
self.feat_loss_tracker = tf.keras.metrics.Mean(name="feat_loss")
self.vgg_loss_tracker = tf.keras.metrics.Mean(name="vgg_loss")
self.kl_loss_tracker = tf.keras.metrics.Mean(name="kl_loss")
self.kid = KID(image_size)
@property
def metrics(self):
return [
self.disc_loss_tracker,
self.gen_loss_tracker,
self.feat_loss_tracker,
self.vgg_loss_tracker,
self.kl_loss_tracker,
self.kid,
]
def build_combined_generator(self):
# This method builds a model that takes as inputs the following:
# latent vector, one-hot encoded segmentation label map, and
# a segmentation map. It then (i) generates an image with the generator,
# (ii) passes the generated images and segmentation map to the discriminator.
# Finally, the model produces the following outputs: (a) discriminator outputs,
# (b) generated image.
# We will be using this model to simplify the implementation.
self.discriminator.trainable = False
mask_input = Input(shape=self.mask_shape, name="mask")
image_input = Input(shape=self.image_shape, name="image")
latent_input = Input(shape=(self.latent_dim), name="latent")
generated_image = self.generator([latent_input, mask_input])
discriminator_output = self.discriminator([image_input, generated_image])
patch_size = discriminator_output[-1].shape[1]
combined_model = Model(
[latent_input, mask_input, image_input],
[discriminator_output, generated_image],
)
return patch_size, combined_model
def compile(self, gen_lr: float = 1e-4, disc_lr: float = 4e-4, **kwargs):
super().compile(**kwargs)
self.generator_optimizer = optimizers.Adam(gen_lr, beta_1=0.0, beta_2=0.999)
self.discriminator_optimizer = optimizers.Adam(
disc_lr, beta_1=0.0, beta_2=0.999
)
self.discriminator_loss = DiscriminatorLoss()
self.feature_matching_loss = FeatureMatchingLoss()
self.vgg_loss = VGGFeatureMatchingLoss()
def train_discriminator(self, latent_vector, segmentation_map, real_image, labels):
fake_images = self.generator([latent_vector, labels])
with tf.GradientTape() as gradient_tape:
pred_fake = self.discriminator([segmentation_map, fake_images])[-1]
pred_real = self.discriminator([segmentation_map, real_image])[-1]
loss_fake = self.discriminator_loss(pred_fake, False)
loss_real = self.discriminator_loss(pred_real, True)
total_loss = 0.5 * (loss_fake + loss_real)
self.discriminator.trainable = True
gradients = gradient_tape.gradient(
total_loss, self.discriminator.trainable_variables
)
self.discriminator_optimizer.apply_gradients(
zip(gradients, self.discriminator.trainable_variables)
)
return total_loss
def train_generator(
self, latent_vector, segmentation_map, labels, image, mean, variance
):
# Generator learns through the signal provided by the discriminator. During
# backpropagation, we only update the generator parameters.
self.discriminator.trainable = False
with tf.GradientTape() as tape:
real_d_output = self.discriminator([segmentation_map, image])
fake_d_output, fake_image = self.combined_model(
[latent_vector, labels, segmentation_map]
)
pred = fake_d_output[-1]
# Compute generator losses.
g_loss = generator_loss(pred)
kl_loss = self.kl_divergence_loss_coeff * kl_divergence_loss(mean, variance)
vgg_loss = self.vgg_feature_loss_coeff * self.vgg_loss(image, fake_image)
feature_loss = self.feature_loss_coeff * self.feature_matching_loss(
real_d_output, fake_d_output
)
total_loss = g_loss + kl_loss + vgg_loss + feature_loss
all_trainable_variables = (
self.combined_model.trainable_variables + self.encoder.trainable_variables
)
gradients = tape.gradient(total_loss, all_trainable_variables,)
self.generator_optimizer.apply_gradients(
zip(gradients, all_trainable_variables,)
)
return total_loss, feature_loss, vgg_loss, kl_loss
def train_step(self, data):
segmentation_map, image, labels = data
mean, variance = self.encoder(image)
latent_vector = self.sampler([mean, variance])
discriminator_loss = self.train_discriminator(
latent_vector, segmentation_map, image, labels
)
(generator_loss, feature_loss, vgg_loss, kl_loss) = self.train_generator(
latent_vector, segmentation_map, labels, image, mean, variance
)
# Report progress.
self.disc_loss_tracker.update_state(discriminator_loss)
self.gen_loss_tracker.update_state(generator_loss)
self.feat_loss_tracker.update_state(feature_loss)
self.vgg_loss_tracker.update_state(vgg_loss)
self.kl_loss_tracker.update_state(kl_loss)
results = {m.name: m.result() for m in self.metrics[:-1]}
return results
def test_step(self, data):
segmentation_map, image, labels = data
# Obtain the learned moments of the real image distribution.
mean, variance = self.encoder(image)
# Sample a latent from the distribution defined by the learned moments.
latent_vector = self.sampler([mean, variance])
# Generate the fake images,
fake_images = self.generator([latent_vector, labels])
# Calculate the losses.
pred_fake = self.discriminator([segmentation_map, fake_images])[-1]
pred_real = self.discriminator([segmentation_map, image])[-1]
loss_fake = self.discriminator_loss(pred_fake, False)
loss_real = self.discriminator_loss(pred_real, True)
total_discriminator_loss = 0.5 * (loss_fake + loss_real)
real_d_output = self.discriminator([segmentation_map, image])
fake_d_output, fake_image = self.combined_model(
[latent_vector, labels, segmentation_map]
)
pred = fake_d_output[-1]
g_loss = generator_loss(pred)
kl_loss = self.kl_divergence_loss_coeff * kl_divergence_loss(mean, variance)
vgg_loss = self.vgg_feature_loss_coeff * self.vgg_loss(image, fake_image)
feature_loss = self.feature_loss_coeff * self.feature_matching_loss(
real_d_output, fake_d_output
)
total_generator_loss = g_loss + kl_loss + vgg_loss + feature_loss
# Report progress.
self.kid.update_state(image, fake_images)
self.disc_loss_tracker.update_state(total_discriminator_loss)
self.gen_loss_tracker.update_state(total_generator_loss)
self.feat_loss_tracker.update_state(feature_loss)
self.vgg_loss_tracker.update_state(vgg_loss)
self.kl_loss_tracker.update_state(kl_loss)
results = {m.name: m.result() for m in self.metrics}
return results
def call(self, inputs):
latent_vectors, labels = inputs
return self.generator([latent_vectors, labels])
def save(
self,
filepath,
overwrite=True,
include_optimizer=True,
save_format=None,
signatures=None,
options=None,
save_traces=True,
):
self.generator.save(
os.path.join(filepath, "generator"),
overwrite=overwrite,
include_optimizer=include_optimizer,
save_format=save_format,
signatures=signatures,
options=options,
save_traces=save_traces,
)
self.discriminator.save(
os.path.join(filepath, "discriminator"),
overwrite=overwrite,
include_optimizer=include_optimizer,
save_format=save_format,
signatures=signatures,
options=options,
save_traces=save_traces,
)
def load(self, generator_filepath: str, discriminator_filepath: str):
self.generator = models.load_model(generator_filepath)
self.discriminator = models.load_model(discriminator_filepath)
def save_weights(self, filepath, overwrite=True, save_format=None, options=None):
self.generator.save_weights(
os.path.join(filepath, "generator-checkpoints"),
overwrite=overwrite,
save_format=save_format,
options=options,
)
self.discriminator.save_weights(
os.path.join(filepath, "discriminator-checkpoints"),
overwrite=overwrite,
save_format=save_format,
options=options,
)
def load_weights(self, filepath, by_name=False, skip_mismatch=False, options=None):
self.generator.load_weights(
os.path.join(filepath, "generator-checkpoints"),
by_name=by_name,
skip_mismatch=skip_mismatch,
options=options,
)
self.discriminator.load_weights(
os.path.join(filepath, "discriminator-checkpoints"),
by_name=by_name,
skip_mismatch=skip_mismatch,
options=options,
) | 0.911827 | 0.335596 |
import json
import os
import pickle
import gin
import numpy as np
from keras.models import model_from_json
from keras.preprocessing.sequence import pad_sequences
from pipelines import utils as sc
from pipelines.text_processors import strip_accents
@gin.configurable
class CategoryModel:
def __init__(self, model_folder: str, cutoff: float = 0.9):
self.model = self.load_model(model_folder)
self.encoders = self.load_encoders(model_folder)
self.category_cutoff=cutoff
def load_model(self, model_path: str):
model_json = os.path.join(model_path, "model_arc.json")
model_weights = [os.path.join(model_path, x) for x in os.listdir(model_path) if 'weights' in x]
with open(model_json, 'r') as dt:
model_arc = json.load(dt)
model = model_from_json(json.dumps(model_arc))
model.load_weights(model_weights[0])
sc.message("Category model loaded!")
return model
def load_encoders(self, model_folder: str):
"""
Load encoders from pickle.
:return: list of dicts with name and the encoder object
"""
encoders_path = os.path.join(model_folder, "encoders")
if not os.path.isdir(encoders_path):
raise Exception("Encoders folder was not found!")
encoders = [os.path.join(encoders_path, encoder_file) for encoder_file in os.listdir(encoders_path)]
unpickled_encoders = []
for label_path in encoders:
if label_path.endswith('.pckl'):
with open(label_path, 'rb') as dt:
unpickled_encoders.append({'name': os.path.split(label_path)[-1], 'encoder': pickle.load(dt)})
if len(unpickled_encoders) != 3:
raise Exception("There are missing encoders. Please check model/encoders folder!")
return unpickled_encoders
def get_category(self, text_input: str):
# Tokenize text_input
tokenizer, cat_encoder = None, None
for label in self.encoders:
if "category" in label["name"]:
cat_encoder = label["encoder"]
elif "tokenizer" in label["name"]:
tokenizer = label["encoder"]
padding = self.model.get_layer('text_input').input_shape[1]
model_input = self.tokenize_input(text_input, tokenizer, padding)
_, cat_pred = self.model.predict_on_batch(model_input)
category = self.format_category(cat_pred, cat_encoder)
return category
@staticmethod
def tokenize_input(text_input, tokenizer, padding):
sequence = tokenizer.texts_to_sequences([text_input])
sequence = pad_sequences(sequence, maxlen=padding, padding='post').tolist()
return np.array(sequence)
def format_category(self, category_prediction, encoder):
category = []
sufficient_prediction = [x for x in category_prediction[0] if x > self.category_cutoff]
if len(sufficient_prediction) == 0:
category.append("variados")
else:
category = encoder.inverse_transform(category_prediction, 0)
return strip_accents(category[0]) | pipelines/clients.py | import json
import os
import pickle
import gin
import numpy as np
from keras.models import model_from_json
from keras.preprocessing.sequence import pad_sequences
from pipelines import utils as sc
from pipelines.text_processors import strip_accents
@gin.configurable
class CategoryModel:
def __init__(self, model_folder: str, cutoff: float = 0.9):
self.model = self.load_model(model_folder)
self.encoders = self.load_encoders(model_folder)
self.category_cutoff=cutoff
def load_model(self, model_path: str):
model_json = os.path.join(model_path, "model_arc.json")
model_weights = [os.path.join(model_path, x) for x in os.listdir(model_path) if 'weights' in x]
with open(model_json, 'r') as dt:
model_arc = json.load(dt)
model = model_from_json(json.dumps(model_arc))
model.load_weights(model_weights[0])
sc.message("Category model loaded!")
return model
def load_encoders(self, model_folder: str):
"""
Load encoders from pickle.
:return: list of dicts with name and the encoder object
"""
encoders_path = os.path.join(model_folder, "encoders")
if not os.path.isdir(encoders_path):
raise Exception("Encoders folder was not found!")
encoders = [os.path.join(encoders_path, encoder_file) for encoder_file in os.listdir(encoders_path)]
unpickled_encoders = []
for label_path in encoders:
if label_path.endswith('.pckl'):
with open(label_path, 'rb') as dt:
unpickled_encoders.append({'name': os.path.split(label_path)[-1], 'encoder': pickle.load(dt)})
if len(unpickled_encoders) != 3:
raise Exception("There are missing encoders. Please check model/encoders folder!")
return unpickled_encoders
def get_category(self, text_input: str):
# Tokenize text_input
tokenizer, cat_encoder = None, None
for label in self.encoders:
if "category" in label["name"]:
cat_encoder = label["encoder"]
elif "tokenizer" in label["name"]:
tokenizer = label["encoder"]
padding = self.model.get_layer('text_input').input_shape[1]
model_input = self.tokenize_input(text_input, tokenizer, padding)
_, cat_pred = self.model.predict_on_batch(model_input)
category = self.format_category(cat_pred, cat_encoder)
return category
@staticmethod
def tokenize_input(text_input, tokenizer, padding):
sequence = tokenizer.texts_to_sequences([text_input])
sequence = pad_sequences(sequence, maxlen=padding, padding='post').tolist()
return np.array(sequence)
def format_category(self, category_prediction, encoder):
category = []
sufficient_prediction = [x for x in category_prediction[0] if x > self.category_cutoff]
if len(sufficient_prediction) == 0:
category.append("variados")
else:
category = encoder.inverse_transform(category_prediction, 0)
return strip_accents(category[0]) | 0.71602 | 0.141786 |
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Device'
db.create_table(u'pesteh_device', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['base.Member'], null=True)),
('token', self.gf('django.db.models.fields.CharField')(unique=True, max_length=127)),
))
db.send_create_signal(u'pesteh', ['Device'])
# Adding model 'Message'
db.create_table(u'pesteh_message', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('message_id', self.gf('django.db.models.fields.CharField')(max_length=63, null=True)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2015, 5, 24, 0, 0))),
('body', self.gf('django.db.models.fields.TextField')()),
('type', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['base.Member'])),
))
db.send_create_signal(u'pesteh', ['Message'])
# Adding M2M table for field devices on 'Message'
m2m_table_name = db.shorten_name(u'pesteh_message_devices')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('message', models.ForeignKey(orm[u'pesteh.message'], null=False)),
('device', models.ForeignKey(orm[u'pesteh.device'], null=False))
))
db.create_unique(m2m_table_name, ['message_id', 'device_id'])
def backwards(self, orm):
# Deleting model 'Device'
db.delete_table(u'pesteh_device')
# Deleting model 'Message'
db.delete_table(u'pesteh_message')
# Removing M2M table for field devices on 'Message'
db.delete_table(db.shorten_name(u'pesteh_message_devices'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'base.baby': {
'Meta': {'object_name': 'Baby'},
'birth_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'birth_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['base.City']"}),
'doctor': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'father_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'gender': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'height': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'hospital': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'mother_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'pregnancy': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'relatives': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['base.Member']", 'through': u"orm['base.BabyFamilyMembers']", 'symmetrical': 'False'}),
'show_in_search': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'weight': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'base.babyfamilymembers': {
'Meta': {'object_name': 'BabyFamilyMembers'},
'baby': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['base.Baby']"}),
'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['base.Member']"}),
'relationship': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['base.Relationship']"})
},
u'base.city': {
'Meta': {'object_name': 'City'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'base.member': {
'Meta': {'object_name': 'Member'},
'birth_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['base.City']"}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'member_default_page'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['network.Page']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '63', 'blank': 'True'}),
'password': ('<PASSWORD>', [], {'max_length': '128'}),
'settings': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.MemberSettings']", 'null': 'True', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'base.membersettings': {
'Meta': {'object_name': 'MemberSettings'},
'feed_help': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page_help': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'search_help': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'base.relationship': {
'Meta': {'object_name': 'Relationship'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_female': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1000'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'network.page': {
'Meta': {'object_name': 'Page'},
'baby': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'page'", 'unique': 'True', 'to': u"orm['base.Baby']"}),
'blocked': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'blocked'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['base.Member']"}),
'cover_photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'pages_liked'", 'to': u"orm['base.Member']", 'through': u"orm['network.Page_likes']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pages'", 'to': u"orm['base.Member']"}),
'setting': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'})
},
u'network.page_likes': {
'Meta': {'object_name': 'Page_likes'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.Member']"}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['network.Page']"})
},
u'pesteh.device': {
'Meta': {'object_name': 'Device'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '127'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.Member']", 'null': 'True'})
},
u'pesteh.message': {
'Meta': {'object_name': 'Message'},
'body': ('django.db.models.fields.TextField', [], {}),
'devices': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['pesteh.Device']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message_id': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 5, 24, 0, 0)'}),
'type': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.Member']"})
}
}
complete_apps = ['pesteh'] | south_migrations/0001_initial.py | from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Device'
db.create_table(u'pesteh_device', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['base.Member'], null=True)),
('token', self.gf('django.db.models.fields.CharField')(unique=True, max_length=127)),
))
db.send_create_signal(u'pesteh', ['Device'])
# Adding model 'Message'
db.create_table(u'pesteh_message', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('message_id', self.gf('django.db.models.fields.CharField')(max_length=63, null=True)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime(2015, 5, 24, 0, 0))),
('body', self.gf('django.db.models.fields.TextField')()),
('type', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['base.Member'])),
))
db.send_create_signal(u'pesteh', ['Message'])
# Adding M2M table for field devices on 'Message'
m2m_table_name = db.shorten_name(u'pesteh_message_devices')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('message', models.ForeignKey(orm[u'pesteh.message'], null=False)),
('device', models.ForeignKey(orm[u'pesteh.device'], null=False))
))
db.create_unique(m2m_table_name, ['message_id', 'device_id'])
def backwards(self, orm):
# Deleting model 'Device'
db.delete_table(u'pesteh_device')
# Deleting model 'Message'
db.delete_table(u'pesteh_message')
# Removing M2M table for field devices on 'Message'
db.delete_table(db.shorten_name(u'pesteh_message_devices'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'base.baby': {
'Meta': {'object_name': 'Baby'},
'birth_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'birth_time': ('django.db.models.fields.TimeField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['base.City']"}),
'doctor': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'father_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'gender': ('django.db.models.fields.PositiveSmallIntegerField', [], {}),
'height': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'hospital': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'mother_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'pregnancy': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'relatives': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['base.Member']", 'through': u"orm['base.BabyFamilyMembers']", 'symmetrical': 'False'}),
'show_in_search': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'weight': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'base.babyfamilymembers': {
'Meta': {'object_name': 'BabyFamilyMembers'},
'baby': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['base.Baby']"}),
'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['base.Member']"}),
'relationship': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'+'", 'to': u"orm['base.Relationship']"})
},
u'base.city': {
'Meta': {'object_name': 'City'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'base.member': {
'Meta': {'object_name': 'Member'},
'birth_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': u"orm['base.City']"}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_page': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'member_default_page'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['network.Page']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gender': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'nickname': ('django.db.models.fields.CharField', [], {'max_length': '63', 'blank': 'True'}),
'password': ('<PASSWORD>', [], {'max_length': '128'}),
'settings': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.MemberSettings']", 'null': 'True', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'base.membersettings': {
'Meta': {'object_name': 'MemberSettings'},
'feed_help': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page_help': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'search_help': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'base.relationship': {
'Meta': {'object_name': 'Relationship'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_female': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'order': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1000'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'network.page': {
'Meta': {'object_name': 'Page'},
'baby': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'page'", 'unique': 'True', 'to': u"orm['base.Baby']"}),
'blocked': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'blocked'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['base.Member']"}),
'cover_photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'likes': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'pages_liked'", 'to': u"orm['base.Member']", 'through': u"orm['network.Page_likes']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'pages'", 'to': u"orm['base.Member']"}),
'setting': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'blank': 'True'})
},
u'network.page_likes': {
'Meta': {'object_name': 'Page_likes'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.Member']"}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['network.Page']"})
},
u'pesteh.device': {
'Meta': {'object_name': 'Device'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'token': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '127'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.Member']", 'null': 'True'})
},
u'pesteh.message': {
'Meta': {'object_name': 'Message'},
'body': ('django.db.models.fields.TextField', [], {}),
'devices': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['pesteh.Device']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message_id': ('django.db.models.fields.CharField', [], {'max_length': '63', 'null': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2015, 5, 24, 0, 0)'}),
'type': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.Member']"})
}
}
complete_apps = ['pesteh'] | 0.498291 | 0.126677 |
import sys
import time
from codalab.lib.formatting import pretty_json, ratio_str
def open_line(s, f=sys.stderr):
print >> f, '\r\033[K%s' % s,
def clear_line(f=sys.stderr):
print >> f, '\r\033[K',
def pretty_print_json(obj, f=sys.stdout):
f.write(pretty_json(obj))
f.write('\n')
f.flush()
class FileTransferProgress(object):
"""
Formats and displays progress on operations involving transferring bytes.
Should be used as a context manager:
with FileTransferProgress('Uploading ', total_bytes) as progress:
while 1:
...
progress.update(num_bytes)
...
"""
def __init__(self, prefix, bytes_total=None, f=sys.stderr):
"""
:param prefix: Message to prepend the progress text.
:param bytes_total: Number of bytes total to transfer, or None if unknown
:param f: Destination file for progress messages.
"""
self.prefix = prefix
self.bytes_total = bytes_total
self.f = f
@staticmethod
def format_size(num_bytes):
# Simply formats number of mebibytes
return "%.2fMiB" % (num_bytes / 1024.0 / 1024.0)
def __enter__(self):
self.start_time = time.time()
return self
def update(self, bytes_done):
"""
Update progress display.
:param bytes_done: Number of bytes transferred
:returns True: To resume connections for breakable
operations like uploads
"""
self.f.write('\r')
self.f.write(self.prefix)
if self.bytes_total is None:
self.f.write(self.format_size(bytes_done))
else:
self.f.write(ratio_str(self.format_size, bytes_done, self.bytes_total))
speed = float(bytes_done) / (time.time() - self.start_time)
self.f.write(' [%s/sec]' % self.format_size(speed))
self.f.write(' \t\t\t')
self.f.flush()
return True
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.write('\n') | codalab/lib/print_util.py | import sys
import time
from codalab.lib.formatting import pretty_json, ratio_str
def open_line(s, f=sys.stderr):
print >> f, '\r\033[K%s' % s,
def clear_line(f=sys.stderr):
print >> f, '\r\033[K',
def pretty_print_json(obj, f=sys.stdout):
f.write(pretty_json(obj))
f.write('\n')
f.flush()
class FileTransferProgress(object):
"""
Formats and displays progress on operations involving transferring bytes.
Should be used as a context manager:
with FileTransferProgress('Uploading ', total_bytes) as progress:
while 1:
...
progress.update(num_bytes)
...
"""
def __init__(self, prefix, bytes_total=None, f=sys.stderr):
"""
:param prefix: Message to prepend the progress text.
:param bytes_total: Number of bytes total to transfer, or None if unknown
:param f: Destination file for progress messages.
"""
self.prefix = prefix
self.bytes_total = bytes_total
self.f = f
@staticmethod
def format_size(num_bytes):
# Simply formats number of mebibytes
return "%.2fMiB" % (num_bytes / 1024.0 / 1024.0)
def __enter__(self):
self.start_time = time.time()
return self
def update(self, bytes_done):
"""
Update progress display.
:param bytes_done: Number of bytes transferred
:returns True: To resume connections for breakable
operations like uploads
"""
self.f.write('\r')
self.f.write(self.prefix)
if self.bytes_total is None:
self.f.write(self.format_size(bytes_done))
else:
self.f.write(ratio_str(self.format_size, bytes_done, self.bytes_total))
speed = float(bytes_done) / (time.time() - self.start_time)
self.f.write(' [%s/sec]' % self.format_size(speed))
self.f.write(' \t\t\t')
self.f.flush()
return True
def __exit__(self, exc_type, exc_val, exc_tb):
self.f.write('\n') | 0.480966 | 0.139396 |
# no functions
# classes
class ColorDialog(object):
""" ColorDialog() """
def SetDialogTabs(self, value):
""" SetDialogTabs(self: ColorDialog, value: ColorTabs) """
pass
def ShowDialog(self):
""" ShowDialog(self: ColorDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: ColorDialog) -> Nullable[bool] """
pass
Color = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Color(self: ColorDialog) -> Color
Set: Color(self: ColorDialog) = value
"""
IncludeByBlockByLayer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IncludeByBlockByLayer(self: ColorDialog) -> bool
Set: IncludeByBlockByLayer(self: ColorDialog) = value
"""
ColorTabs = None
class Menu(object):
# no doc
MenuItems = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MenuItems(self: Menu) -> MenuItemCollection
"""
class ContextMenuExtension(Menu):
""" ContextMenuExtension() """
def Dispose(self):
""" Dispose(self: ContextMenuExtension) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
Title = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Title(self: ContextMenuExtension) -> str
Set: Title(self: ContextMenuExtension) = value
"""
Popup = None
class DefaultPane(Enum):
""" enum DefaultPane, values: All (24), CursorCoordinates (1), DynamicInput (18), DynamicUcs (19), Float (12), Grid (3), LayoutIcon (22), LayoutModelIcons (20), LayoutMoreIcon (23), LineWeight (7), Model (10), ModelIcon (21), ModeMacro (0), ObjectSnap (11), ObjectTrack (6), Ortho (4), Paper (9), PaperModel (8), Polar (5), Snap (2), Spacer (14), Table (13), ViewportMaximize (16), ViewportMaximizeNext (17), ViewportMaximizePrevious (15) """
All = None
CursorCoordinates = None
DynamicInput = None
DynamicUcs = None
Float = None
Grid = None
LayoutIcon = None
LayoutModelIcons = None
LayoutMoreIcon = None
LineWeight = None
Model = None
ModelIcon = None
ModeMacro = None
ObjectSnap = None
ObjectTrack = None
Ortho = None
Paper = None
PaperModel = None
Polar = None
Snap = None
Spacer = None
Table = None
value__ = None
ViewportMaximize = None
ViewportMaximizeNext = None
ViewportMaximizePrevious = None
class DockSides(Enum):
""" enum (flags) DockSides, values: Bottom (32768), Left (4096), None (0), Right (16384), Top (8192) """
Bottom = None
Left = None
None = None
Right = None
Top = None
value__ = None
class Window(DisposableWrapper):
# no doc
def Close(self):
""" Close(self: Window) """
pass
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def Focus(self):
""" Focus(self: Window) -> bool """
pass
@staticmethod
def FromHandle(handle):
""" FromHandle(handle: IntPtr) -> Window """
pass
@staticmethod
def GetDeviceIndependentScale(hWnd):
""" GetDeviceIndependentScale(hWnd: IntPtr) -> Vector """
pass
def GetWndPtr(self, *args): #cannot find CLR method
""" GetWndPtr(self: Window) -> IntPtr """
pass
@staticmethod # known case of __new__
def __new__(self, *args): #cannot find CLR constructor
""" __new__(cls: type, ptr: IntPtr, autoDelete: bool) """
pass
DeviceIndependentLocation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentLocation(self: Window) -> Point
Set: DeviceIndependentLocation(self: Window) = value
"""
DeviceIndependentSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentSize(self: Window) -> Size
Set: DeviceIndependentSize(self: Window) = value
"""
Handle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Handle(self: Window) -> IntPtr
"""
Text = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Text(self: Window) -> str
Set: Text(self: Window) = value
"""
UnmanagedWindow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: UnmanagedWindow(self: Window) -> IntPtr
"""
Visible = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Visible(self: Window) -> bool
Set: Visible(self: Window) = value
"""
WindowState = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: WindowState(self: Window) -> State
Set: WindowState(self: Window) = value
"""
State = None
class DocumentWindow(Window):
# no doc
def Activate(self):
""" Activate(self: DocumentWindow) """
pass
def Close(self):
""" Close(self: DocumentWindow) """
pass
def Dispose(self):
""" Dispose(self: DocumentWindow, A_0: bool) """
pass
@staticmethod # known case of __new__
def __new__(self, *args): #cannot find CLR constructor
""" __new__(cls: type, docWindow: AcApDocWindow*) """
pass
CanClose = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CanClose(self: DocumentWindow) -> bool
"""
CanUpdate = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CanUpdate(self: DocumentWindow) -> bool
"""
Document = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Document(self: DocumentWindow) -> object
"""
Title = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Title(self: DocumentWindow) -> str
Set: Title(self: DocumentWindow) = value
"""
DocumentWindowLoaded = None
DocumentWindowUpdated = None
class DocumentWindowLoadedEventHandler(MulticastDelegate):
""" DocumentWindowLoadedEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: DocumentWindowLoadedEventHandler, sender: object, e: EventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: DocumentWindowLoadedEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: DocumentWindowLoadedEventHandler, sender: object, e: EventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class DocumentWindowUpdatedEventHandler(MulticastDelegate):
""" DocumentWindowUpdatedEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: DocumentWindowUpdatedEventHandler, sender: object, e: EventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: DocumentWindowUpdatedEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: DocumentWindowUpdatedEventHandler, sender: object, e: EventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class DrawingDocumentWindow(DocumentWindow):
# no doc
def Dispose(self):
""" Dispose(self: DocumentWindow, A_0: bool) """
pass
Document = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Document(self: DrawingDocumentWindow) -> Document
"""
class DropTarget(object):
# no doc
def OnDragEnter(self, e):
""" OnDragEnter(self: DropTarget, e: DragEventArgs) """
pass
def OnDragLeave(self):
""" OnDragLeave(self: DropTarget) """
pass
def OnDragOver(self, e):
""" OnDragOver(self: DropTarget, e: DragEventArgs) """
pass
def OnDrop(self, e):
""" OnDrop(self: DropTarget, e: DragEventArgs) """
pass
class IconType(Enum):
""" enum IconType, values: Critical (2), Information (1), None (0), Warning (3) """
Critical = None
Information = None
None = None
value__ = None
Warning = None
class InfoCenter(object):
""" InfoCenter() """
def InfoToolbarSizeChanged(self, bExpand):
""" InfoToolbarSizeChanged(self: InfoCenter, bExpand: bool) """
pass
def InvokeToolbarMoveEvent(self):
""" InvokeToolbarMoveEvent(self: InfoCenter) """
pass
def InvokeToolbarResizeEvent(self, width):
""" InvokeToolbarResizeEvent(self: InfoCenter, width: int) """
pass
def LaunchSubAwareModule(self, resReqid, strCourseId, strModuleId):
""" LaunchSubAwareModule(self: InfoCenter, resReqid: Int16, strCourseId: str, strModuleId: str) """
pass
Host = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Host(self: InfoCenter) -> HwndSource
Set: Host(self: InfoCenter) = value
"""
KeepFocus = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: KeepFocus(self: InfoCenter) -> bool
Set: KeepFocus(self: InfoCenter) = value
"""
SubAwareClientInfo = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SubAwareClientInfo(self: InfoCenter) -> str
"""
UPIXMLData = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: UPIXMLData(self: InfoCenter) -> str
"""
Visible = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Visible(self: InfoCenter) -> bool
Set: Visible(self: InfoCenter) = value
"""
mInitWidth = None
m_pToolbarMoveDelegate = None
m_pToolbarResizeDelegate = None
class InfoToolbarMoveDelegate(MulticastDelegate):
""" InfoToolbarMoveDelegate(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, callback, obj):
""" BeginInvoke(self: InfoToolbarMoveDelegate, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: InfoToolbarMoveDelegate, result: IAsyncResult) """
pass
def Invoke(self):
""" Invoke(self: InfoToolbarMoveDelegate) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class InfoToolbarResizeTo(MulticastDelegate):
""" InfoToolbarResizeTo(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, width, callback, obj):
""" BeginInvoke(self: InfoToolbarResizeTo, width: int, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: InfoToolbarResizeTo, result: IAsyncResult) """
pass
def Invoke(self, width):
""" Invoke(self: InfoToolbarResizeTo, width: int) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class LayerTransparencyDialog(object):
""" LayerTransparencyDialog() """
def ShowDialog(self):
""" ShowDialog(self: LayerTransparencyDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: LayerTransparencyDialog) -> Nullable[bool] """
pass
Percent = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Percent(self: LayerTransparencyDialog) -> UInt32
Set: Percent(self: LayerTransparencyDialog) = value
"""
class LinetypeDialog(object):
""" LinetypeDialog() """
def ShowDialog(self):
""" ShowDialog(self: LinetypeDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: LinetypeDialog) -> Nullable[bool] """
pass
IncludeByBlockByLayer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IncludeByBlockByLayer(self: LinetypeDialog) -> bool
Set: IncludeByBlockByLayer(self: LinetypeDialog) = value
"""
Linetype = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Linetype(self: LinetypeDialog) -> ObjectId
Set: Linetype(self: LinetypeDialog) = value
"""
class LineWeightDialog(object):
""" LineWeightDialog() """
def ShowDialog(self):
""" ShowDialog(self: LineWeightDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: LineWeightDialog) -> Nullable[bool] """
pass
IncludeByBlockByLayer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IncludeByBlockByLayer(self: LineWeightDialog) -> bool
Set: IncludeByBlockByLayer(self: LineWeightDialog) = value
"""
LineWeight = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LineWeight(self: LineWeightDialog) -> LineWeight
Set: LineWeight(self: LineWeightDialog) = value
"""
class MenuItem(Menu):
"""
MenuItem(value: str, icon: Icon)
MenuItem(value: str)
"""
@staticmethod # known case of __new__
def __new__(self, value, icon=None):
"""
__new__(cls: type, value: str, icon: Icon)
__new__(cls: type, value: str)
"""
pass
Checked = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Checked(self: MenuItem) -> bool
Set: Checked(self: MenuItem) = value
"""
Enabled = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Enabled(self: MenuItem) -> bool
Set: Enabled(self: MenuItem) = value
"""
Icon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Icon(self: MenuItem) -> Icon
Set: Icon(self: MenuItem) = value
"""
Text = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Text(self: MenuItem) -> str
Set: Text(self: MenuItem) = value
"""
Visible = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Visible(self: MenuItem) -> bool
Set: Visible(self: MenuItem) = value
"""
Click = None
class MenuItemCollection(object):
""" MenuItemCollection(owner: Menu) """
def Add(self, value):
""" Add(self: MenuItemCollection, value: MenuItem) -> int """
pass
def Clear(self):
""" Clear(self: MenuItemCollection) """
pass
def Contains(self, value):
""" Contains(self: MenuItemCollection, value: MenuItem) -> bool """
pass
def CopyTo(self, array, index):
""" CopyTo(self: MenuItemCollection, array: Array[MenuItem], index: int) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: MenuItemCollection) -> IEnumerator[IMenuItem] """
pass
def IndexOf(self, value):
""" IndexOf(self: MenuItemCollection, value: MenuItem) -> int """
pass
def Insert(self, index, value):
""" Insert(self: MenuItemCollection, index: int, value: MenuItem) """
pass
def Remove(self, value):
""" Remove(self: MenuItemCollection, value: MenuItem) """
pass
def RemoveAt(self, index):
""" RemoveAt(self: MenuItemCollection, index: int) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__(self: IList, value: object) -> bool """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, owner):
""" __new__(cls: type, owner: Menu) """
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: MenuItemCollection) -> int
"""
IsFixedSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsFixedSize(self: MenuItemCollection) -> bool
"""
IsReadOnly = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsReadOnly(self: MenuItemCollection) -> bool
"""
class OpenFileDialog(object):
""" OpenFileDialog(title: str, defaultName: str, extension: str, dialogName: str, flags: OpenFileDialogFlags) """
def GetFilenames(self):
""" GetFilenames(self: OpenFileDialog) -> Array[str] """
pass
def ShowDialog(self):
""" ShowDialog(self: OpenFileDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: OpenFileDialog) -> Nullable[bool] """
pass
@staticmethod # known case of __new__
def __new__(self, title, defaultName, extension, dialogName, flags):
""" __new__(cls: type, title: str, defaultName: str, extension: str, dialogName: str, flags: OpenFileDialogFlags) """
pass
Filename = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Filename(self: OpenFileDialog) -> str
"""
OpenFileDialogFlags = None
class OpenFileOrFolderDialog(object):
""" OpenFileOrFolderDialog(title: str, defaultName: str, extension: str, dialogName: str, flags: OpenFileDialogFlags) """
def ShowDialog(self):
""" ShowDialog(self: OpenFileOrFolderDialog) -> DialogResult """
pass
@staticmethod # known case of __new__
def __new__(self, title, defaultName, extension, dialogName, flags):
""" __new__(cls: type, title: str, defaultName: str, extension: str, dialogName: str, flags: OpenFileDialogFlags) """
pass
FileOrFoldername = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FileOrFoldername(self: OpenFileOrFolderDialog) -> str
"""
class Palette(object):
# no doc
Name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Name(self: Palette) -> str
Set: Name(self: Palette) = value
"""
PaletteSet = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PaletteSet(self: Palette) -> PaletteSet
"""
class PaletteActivatedEventArgs(EventArgs):
""" PaletteActivatedEventArgs(activated: Palette, deactivated: Palette) """
@staticmethod # known case of __new__
def __new__(self, activated, deactivated):
""" __new__(cls: type, activated: Palette, deactivated: Palette) """
pass
Activated = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Activated(self: PaletteActivatedEventArgs) -> Palette
"""
Deactivated = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Deactivated(self: PaletteActivatedEventArgs) -> Palette
"""
class PaletteActivatedEventHandler(MulticastDelegate):
""" PaletteActivatedEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteActivatedEventHandler, sender: object, e: PaletteActivatedEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteActivatedEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteActivatedEventHandler, sender: object, e: PaletteActivatedEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteAddContextMenuEventArgs(EventArgs):
""" PaletteAddContextMenuEventArgs(menuitems: List[MenuItem], removeMenuItems: List[int], nHitFlag: int, nRightClkTab: int) """
@staticmethod # known case of __new__
def __new__(self, menuitems, removeMenuItems, nHitFlag, nRightClkTab):
""" __new__(cls: type, menuitems: List[MenuItem], removeMenuItems: List[int], nHitFlag: int, nRightClkTab: int) """
pass
HitFlag = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: HitFlag(self: PaletteAddContextMenuEventArgs) -> int
"""
MenuItems = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MenuItems(self: PaletteAddContextMenuEventArgs) -> List[MenuItem]
"""
RemoveMenuItems = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RemoveMenuItems(self: PaletteAddContextMenuEventArgs) -> List[int]
"""
RightClickTab = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RightClickTab(self: PaletteAddContextMenuEventArgs) -> int
"""
class PaletteAddContextMenuEventHandler(MulticastDelegate):
""" PaletteAddContextMenuEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteAddContextMenuEventHandler, sender: object, e: PaletteAddContextMenuEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteAddContextMenuEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteAddContextMenuEventHandler, sender: object, e: PaletteAddContextMenuEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteEnterSizeMoveEventArgs(EventArgs):
""" PaletteEnterSizeMoveEventArgs(bEnterSizeMove: bool) """
@staticmethod # known case of __new__
def __new__(self, bEnterSizeMove):
""" __new__(cls: type, bEnterSizeMove: bool) """
pass
EnterSizeMove = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: EnterSizeMove(self: PaletteEnterSizeMoveEventArgs) -> bool
"""
class PaletteEnterSizeMoveEventHandler(MulticastDelegate):
""" PaletteEnterSizeMoveEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteEnterSizeMoveEventHandler, sender: object, e: PaletteEnterSizeMoveEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteEnterSizeMoveEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteEnterSizeMoveEventHandler, sender: object, e: PaletteEnterSizeMoveEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PalettePersistEventArgs(EventArgs):
""" PalettePersistEventArgs(configurationSection: IConfigurationSection) """
@staticmethod # known case of __new__
def __new__(self, configurationSection):
""" __new__(cls: type, configurationSection: IConfigurationSection) """
pass
ConfigurationSection = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ConfigurationSection(self: PalettePersistEventArgs) -> IConfigurationSection
"""
class PalettePersistEventHandler(MulticastDelegate):
""" PalettePersistEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PalettePersistEventHandler, sender: object, e: PalettePersistEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PalettePersistEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PalettePersistEventHandler, sender: object, e: PalettePersistEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSet(Window):
"""
PaletteSet(name: str, cmd: str, toolID: Guid)
PaletteSet(name: str, toolID: Guid)
PaletteSet(name: str)
"""
def Activate(self, index):
""" Activate(self: PaletteSet, index: int) """
pass
def Add(self, name, *__args):
"""
Add(self: PaletteSet, name: str, control: Control) -> Palette
Add(self: PaletteSet, name: str, htmlPage: Uri) -> Palette
"""
pass
def AddVisual(self, name, control, bResizeContentToPaletteSize=None):
"""
AddVisual(self: PaletteSet, name: str, control: Visual) -> Palette
AddVisual(self: PaletteSet, name: str, control: Visual, bResizeContentToPaletteSize: bool) -> Palette
"""
pass
def AddVisualBrowser(self, name, control, htmlPage, scriptableName, bShowBrowserFirst):
""" AddVisualBrowser(self: PaletteSet, name: str, control: Visual, htmlPage: str, scriptableName: str, bShowBrowserFirst: bool) -> Palette """
pass
def CopyTo(self, array, index):
""" CopyTo(self: PaletteSet, array: Array[Palette], index: int) """
pass
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def EnableTransparency(self, value):
""" EnableTransparency(self: PaletteSet, value: bool) -> bool """
pass
def FloatControl(self, *__args):
""" FloatControl(self: PaletteSet, pointOnScreen: Point)FloatControl(self: PaletteSet, value: Rect) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: PaletteSet) -> IEnumerator """
pass
def GetThemedIcon(self, bBigIcon):
""" GetThemedIcon(self: PaletteSet, bBigIcon: bool) -> Icon """
pass
def InitializeFloatingPosition(self, value):
""" InitializeFloatingPosition(self: PaletteSet, value: Rect) """
pass
def RecalculateDockSiteLayout(self):
""" RecalculateDockSiteLayout(self: PaletteSet) """
pass
def Remove(self, index):
""" Remove(self: PaletteSet, index: int) """
pass
def SetThemedIcon(self, value, theme):
""" SetThemedIcon(self: PaletteSet, value: Icon, theme: ColorThemeEnum) """
pass
def switchVisualBrowser(self, index, bVisual):
""" switchVisualBrowser(self: PaletteSet, index: int, bVisual: bool) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+yx.__add__(y) <==> x+y """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, name, *__args):
"""
__new__(cls: type, name: str, cmd: str, toolID: Guid)
__new__(cls: type, name: str, toolID: Guid)
__new__(cls: type, name: str)
"""
pass
Anchored = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Anchored(self: PaletteSet) -> bool
"""
AutoRollUp = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AutoRollUp(self: PaletteSet) -> bool
Set: AutoRollUp(self: PaletteSet) = value
"""
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: PaletteSet) -> int
"""
DarkThemedIcon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DarkThemedIcon(self: PaletteSet) -> Icon
Set: DarkThemedIcon(self: PaletteSet) = value
"""
DeviceIndependentLocation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentLocation(self: PaletteSet) -> Point
Set: DeviceIndependentLocation(self: PaletteSet) = value
"""
DeviceIndependentMinimumSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentMinimumSize(self: PaletteSet) -> Size
Set: DeviceIndependentMinimumSize(self: PaletteSet) = value
"""
DeviceIndependentSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentSize(self: PaletteSet) -> Size
Set: DeviceIndependentSize(self: PaletteSet) = value
"""
Dock = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Dock(self: PaletteSet) -> DockSides
Set: Dock(self: PaletteSet) = value
"""
DockEnabled = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DockEnabled(self: PaletteSet) -> DockSides
Set: DockEnabled(self: PaletteSet) = value
"""
Icon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Icon(self: PaletteSet) -> Icon
Set: Icon(self: PaletteSet) = value
"""
KeepFocus = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: KeepFocus(self: PaletteSet) -> bool
Set: KeepFocus(self: PaletteSet) = value
"""
LargeDarkThemedIcon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LargeDarkThemedIcon(self: PaletteSet) -> Icon
Set: LargeDarkThemedIcon(self: PaletteSet) = value
"""
LargeLightThemedIcon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LargeLightThemedIcon(self: PaletteSet) -> Icon
Set: LargeLightThemedIcon(self: PaletteSet) = value
"""
LightThemedIcon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LightThemedIcon(self: PaletteSet) -> Icon
Set: LightThemedIcon(self: PaletteSet) = value
"""
Location = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Location(self: PaletteSet) -> Point
Set: Location(self: PaletteSet) = value
"""
MinimumSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MinimumSize(self: PaletteSet) -> Size
Set: MinimumSize(self: PaletteSet) = value
"""
Name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Name(self: PaletteSet) -> str
Set: Name(self: PaletteSet) = value
"""
Opacity = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Opacity(self: PaletteSet) -> int
Set: Opacity(self: PaletteSet) = value
"""
PaletteSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PaletteSize(self: PaletteSet) -> Size
"""
RolledUp = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RolledUp(self: PaletteSet) -> bool
Set: RolledUp(self: PaletteSet) = value
"""
Size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Size(self: PaletteSet) -> Size
Set: Size(self: PaletteSet) = value
"""
Style = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Style(self: PaletteSet) -> PaletteSetStyles
Set: Style(self: PaletteSet) = value
"""
TitleBarLocation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TitleBarLocation(self: PaletteSet) -> PaletteSetTitleBarLocation
Set: TitleBarLocation(self: PaletteSet) = value
"""
Visible = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Visible(self: PaletteSet) -> bool
Set: Visible(self: PaletteSet) = value
"""
Focused = None
Help = None
Load = None
PaletteActivated = None
PaletteAddContextMenu = None
PaletteSetDestroy = None
PaletteSetEnterSizeMove = None
PaletteSetHostMoved = None
PaletteSetMoved = None
PaletteSetShowDockBar = None
PaletteSetTitleBarLocationChange = None
Save = None
Saving = None
SizeChanged = None
StateChanged = None
class PaletteSetDestroyEventHandler(MulticastDelegate):
""" PaletteSetDestroyEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetDestroyEventHandler, sender: object, e: EventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetDestroyEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetDestroyEventHandler, sender: object, e: EventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetDockSite(object):
""" PaletteSetDockSite() """
def CanDock(self, mousePosition):
""" CanDock(self: PaletteSetDockSite, mousePosition: Point) -> Nullable[Rect] """
pass
def Dock(self, paletteset):
""" Dock(self: PaletteSetDockSite, paletteset: PaletteSet) -> bool """
pass
def Initialize(self, paletteset, desiredSize, dockSyle):
""" Initialize(self: PaletteSetDockSite, paletteset: PaletteSet, desiredSize: Size, dockSyle: int) """
pass
def Uninitialize(self):
""" Uninitialize(self: PaletteSetDockSite) """
pass
class PaletteSetFocusedEventArgs(EventArgs):
""" PaletteSetFocusedEventArgs() """
class PaletteSetFocusedEventHandler(MulticastDelegate):
""" PaletteSetFocusedEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetFocusedEventHandler, sender: object, e: PaletteSetFocusedEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetFocusedEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetFocusedEventHandler, sender: object, e: PaletteSetFocusedEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetHelpEventArgs(EventArgs):
""" PaletteSetHelpEventArgs() """
class PaletteSetHelpEventHandler(MulticastDelegate):
""" PaletteSetHelpEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetHelpEventHandler, sender: object, e: PaletteSetHelpEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetHelpEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetHelpEventHandler, sender: object, e: PaletteSetHelpEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetMoveEventArgs(EventArgs):
""" PaletteSetMoveEventArgs(x: int, y: int) """
@staticmethod # known case of __new__
def __new__(self, x, y):
""" __new__(cls: type, x: int, y: int) """
pass
X = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: X(self: PaletteSetMoveEventArgs) -> int
"""
y = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: y(self: PaletteSetMoveEventArgs) -> int
"""
class PaletteSetMoveEventHandler(MulticastDelegate):
""" PaletteSetMoveEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetMoveEventHandler, sender: object, e: PaletteSetMoveEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetMoveEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetMoveEventHandler, sender: object, e: PaletteSetMoveEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetShowDockBarEventHandler(MulticastDelegate):
""" PaletteSetShowDockBarEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetShowDockBarEventHandler, sender: object, e: PaletteShowDockBarEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetShowDockBarEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetShowDockBarEventHandler, sender: object, e: PaletteShowDockBarEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetSizeEventArgs(EventArgs):
""" PaletteSetSizeEventArgs(cx: int, cy: int, dx: float, dy: float) """
@staticmethod # known case of __new__
def __new__(self, cx, cy, dx, dy):
""" __new__(cls: type, cx: int, cy: int, dx: float, dy: float) """
pass
DeviceIndependentHeight = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentHeight(self: PaletteSetSizeEventArgs) -> float
"""
DeviceIndependentWidth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentWidth(self: PaletteSetSizeEventArgs) -> float
"""
Height = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Height(self: PaletteSetSizeEventArgs) -> int
"""
Width = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Width(self: PaletteSetSizeEventArgs) -> int
"""
class PaletteSetSizeEventHandler(MulticastDelegate):
""" PaletteSetSizeEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetSizeEventHandler, sender: object, e: PaletteSetSizeEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetSizeEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetSizeEventHandler, sender: object, e: PaletteSetSizeEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetStateEventArgs(EventArgs):
""" PaletteSetStateEventArgs(state: StateEventIndex) """
@staticmethod # known case of __new__
def __new__(self, state):
""" __new__(cls: type, state: StateEventIndex) """
pass
NewState = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NewState(self: PaletteSetStateEventArgs) -> StateEventIndex
"""
class PaletteSetStateEventHandler(MulticastDelegate):
""" PaletteSetStateEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetStateEventHandler, sender: object, e: PaletteSetStateEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetStateEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetStateEventHandler, sender: object, e: PaletteSetStateEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetStyles(Enum):
""" enum (flags) PaletteSetStyles, values: NameEditable (16), Notify (1024), NoTitleBar (32768), PauseAutoRollupForChildModalDialog (65536), ShowAutoHideButton (2), ShowCloseButton (8), ShowPropertiesMenu (4), ShowTabForSingle (64), SingleColDock (4096), SingleRowDock (512), SingleRowNoVertResize (2048), Snappable (32), UsePaletteNameAsTitleForSingle (128) """
NameEditable = None
Notify = None
NoTitleBar = None
PauseAutoRollupForChildModalDialog = None
ShowAutoHideButton = None
ShowCloseButton = None
ShowPropertiesMenu = None
ShowTabForSingle = None
SingleColDock = None
SingleRowDock = None
SingleRowNoVertResize = None
Snappable = None
UsePaletteNameAsTitleForSingle = None
value__ = None
class PaletteSetTitleBarLocation(Enum):
""" enum PaletteSetTitleBarLocation, values: Left (0), Right (1) """
Left = None
Right = None
value__ = None
class PaletteSetTitleBarLocationChangeEventHandler(MulticastDelegate):
""" PaletteSetTitleBarLocationChangeEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetTitleBarLocationChangeEventHandler, sender: object, e: EventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetTitleBarLocationChangeEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetTitleBarLocationChangeEventHandler, sender: object, e: EventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteShowDockBarEventArgs(EventArgs):
""" PaletteShowDockBarEventArgs(bShowDockBar: bool) """
@staticmethod # known case of __new__
def __new__(self, bShowDockBar):
""" __new__(cls: type, bShowDockBar: bool) """
pass
ShowDockBar = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ShowDockBar(self: PaletteShowDockBarEventArgs) -> bool
"""
class StatusBarItem(DisposableWrapper):
# no doc
def DisplayContextMenu(self, menu, p):
""" DisplayContextMenu(self: StatusBarItem, menu: ContextMenu, p: Point) """
pass
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def PointToClient(self, p):
""" PointToClient(self: StatusBarItem, p: Point) -> Point """
pass
def PointToScreen(self, p):
""" PointToScreen(self: StatusBarItem, p: Point) -> Point """
pass
@staticmethod # known case of __new__
def __new__(self, *args): #cannot find CLR constructor
"""
__new__(cls: type, unmanagedPointer: IntPtr, autoDelete: bool)
__new__(cls: type)
"""
pass
Enabled = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Enabled(self: StatusBarItem) -> bool
Set: Enabled(self: StatusBarItem) = value
"""
Icon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Icon(self: StatusBarItem) -> Icon
Set: Icon(self: StatusBarItem) = value
"""
ToolTipText = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ToolTipText(self: StatusBarItem) -> str
Set: ToolTipText(self: StatusBarItem) = value
"""
Visible = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Visible(self: StatusBarItem) -> bool
Set: Visible(self: StatusBarItem) = value
"""
Deleted = None
MouseDown = None
class Pane(StatusBarItem):
""" Pane() """
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
@staticmethod # known case of __new__
def __new__(self):
"""
__new__(cls: type, unmanagedPointer: IntPtr, autoDelete: bool)
__new__(cls: type)
"""
pass
MaximumWidth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MaximumWidth(self: Pane) -> int
Set: MaximumWidth(self: Pane) = value
"""
MinimumWidth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MinimumWidth(self: Pane) -> int
Set: MinimumWidth(self: Pane) = value
"""
Style = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Style(self: Pane) -> PaneStyles
Set: Style(self: Pane) = value
"""
Text = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Text(self: Pane) -> str
Set: Text(self: Pane) = value
"""
class PaneCollection(object):
# no doc
def Add(self, value):
""" Add(self: PaneCollection, value: Pane) -> int """
pass
def Clear(self):
""" Clear(self: PaneCollection) """
pass
def Contains(self, value):
""" Contains(self: PaneCollection, value: Pane) -> bool """
pass
def CopyTo(self, array, index):
""" CopyTo(self: PaneCollection, array: Array[Pane], index: int) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: PaneCollection) -> IEnumerator """
pass
def IndexOf(self, value):
""" IndexOf(self: PaneCollection, value: Pane) -> int """
pass
def Insert(self, index, value):
""" Insert(self: PaneCollection, index: int, value: Pane) """
pass
def Remove(self, value):
""" Remove(self: PaneCollection, value: Pane) """
pass
def RemoveAt(self, index):
""" RemoveAt(self: PaneCollection, index: int) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__(self: IList, value: object) -> bool """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: PaneCollection) -> int
Set: Count(self: PaneCollection) = value
"""
IsFixedSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsFixedSize(self: PaneCollection) -> bool
"""
IsReadOnly = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsReadOnly(self: PaneCollection) -> bool
"""
class PaneStyles(Enum):
""" enum (flags) PaneStyles, values: Command (16), NoBorders (1), Normal (8), PopOut (2), PopUp (32), Stretch (4) """
Command = None
NoBorders = None
Normal = None
PopOut = None
PopUp = None
Stretch = None
value__ = None
class PlotStyleDialog(object):
""" PlotStyleDialog() """
def ShowDialog(self):
""" ShowDialog(self: PlotStyleDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: PlotStyleDialog) -> Nullable[bool] """
pass
IncludeByBlockByLayer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IncludeByBlockByLayer(self: PlotStyleDialog) -> bool
Set: IncludeByBlockByLayer(self: PlotStyleDialog) = value
"""
PlotStyle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PlotStyle(self: PlotStyleDialog) -> str
Set: PlotStyle(self: PlotStyleDialog) = value
"""
class SaveFileDialog(object):
""" SaveFileDialog(title: str, defaultName: str, extension: str, dialogName: str, flags: SaveFileDialogFlags) """
def ShowDialog(self):
""" ShowDialog(self: SaveFileDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: SaveFileDialog) -> Nullable[bool] """
pass
@staticmethod # known case of __new__
def __new__(self, title, defaultName, extension, dialogName, flags):
""" __new__(cls: type, title: str, defaultName: str, extension: str, dialogName: str, flags: SaveFileDialogFlags) """
pass
Filename = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Filename(self: SaveFileDialog) -> str
"""
SaveFileDialogFlags = None
class StateEventIndex(Enum):
""" enum StateEventIndex, values: Hide (0), Show (1), ThemeChange (4) """
Hide = None
Show = None
ThemeChange = None
value__ = None
class StatusBar(object):
# no doc
def CloseBubbleWindows(self):
""" CloseBubbleWindows(self: StatusBar) """
pass
def GetDefaultPane(self, pane):
""" GetDefaultPane(self: StatusBar, pane: DefaultPane) -> Pane """
pass
def RemoveDefaultPane(self, pane):
""" RemoveDefaultPane(self: StatusBar, pane: DefaultPane) """
pass
def Update(self):
""" Update(self: StatusBar) """
pass
@staticmethod # known case of __new__
def __new__(self, *args): #cannot find CLR constructor
""" __new__(cls: type, impObj: AcApStatusBar*) """
pass
Panes = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Panes(self: StatusBar) -> PaneCollection
"""
TrayItems = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TrayItems(self: StatusBar) -> TrayItemCollection
"""
Window = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Window(self: StatusBar) -> Window
"""
class StatusBarMouseDownEventArgs(EventArgs):
# no doc
Button = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Button(self: StatusBarMouseDownEventArgs) -> MouseButtons
"""
DoubleClick = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DoubleClick(self: StatusBarMouseDownEventArgs) -> bool
"""
X = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: X(self: StatusBarMouseDownEventArgs) -> int
"""
Y = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Y(self: StatusBarMouseDownEventArgs) -> int
"""
class StatusBarMouseDownEventHandler(MulticastDelegate):
""" StatusBarMouseDownEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: StatusBarMouseDownEventHandler, sender: object, e: StatusBarMouseDownEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: StatusBarMouseDownEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: StatusBarMouseDownEventHandler, sender: object, e: StatusBarMouseDownEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class TrayItem(StatusBarItem):
""" TrayItem() """
def CloseBubbleWindows(self):
""" CloseBubbleWindows(self: TrayItem) """
pass
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def ShowBubbleWindow(self, bubble):
""" ShowBubbleWindow(self: TrayItem, bubble: TrayItemBubbleWindow) """
pass
@staticmethod # known case of __new__
def __new__(self):
"""
__new__(cls: type, unmanagedPointer: IntPtr, autoDelete: bool)
__new__(cls: type)
"""
pass
class TrayItemBubbleWindow(DisposableWrapper):
""" TrayItemBubbleWindow() """
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
HyperLink = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: HyperLink(self: TrayItemBubbleWindow) -> str
Set: HyperLink(self: TrayItemBubbleWindow) = value
"""
HyperText = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: HyperText(self: TrayItemBubbleWindow) -> str
Set: HyperText(self: TrayItemBubbleWindow) = value
"""
IconType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IconType(self: TrayItemBubbleWindow) -> IconType
Set: IconType(self: TrayItemBubbleWindow) = value
"""
Text = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Text(self: TrayItemBubbleWindow) -> str
Set: Text(self: TrayItemBubbleWindow) = value
"""
Text2 = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Text2(self: TrayItemBubbleWindow) -> str
Set: Text2(self: TrayItemBubbleWindow) = value
"""
Title = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Title(self: TrayItemBubbleWindow) -> str
Set: Title(self: TrayItemBubbleWindow) = value
"""
Closed = None
class TrayItemBubbleWindowClosedEventArgs(EventArgs):
# no doc
CloseReason = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CloseReason(self: TrayItemBubbleWindowClosedEventArgs) -> TrayItemBubbleWindowCloseReason
"""
class TrayItemBubbleWindowClosedEventHandler(MulticastDelegate):
""" TrayItemBubbleWindowClosedEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: TrayItemBubbleWindowClosedEventHandler, sender: object, e: TrayItemBubbleWindowClosedEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: TrayItemBubbleWindowClosedEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: TrayItemBubbleWindowClosedEventHandler, sender: object, e: TrayItemBubbleWindowClosedEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class TrayItemBubbleWindowCloseReason(Enum):
""" enum TrayItemBubbleWindowCloseReason, values: ClosedByUser (3), DocumentDeactivated (7), FailedToCreate (0), HyperlinkClicked (5), NoIcons (1), NoNotifications (2), TimedOut (4) """
ClosedByUser = None
DocumentDeactivated = None
FailedToCreate = None
HyperlinkClicked = None
NoIcons = None
NoNotifications = None
TimedOut = None
value__ = None
class TrayItemCollection(object):
# no doc
def Add(self, value):
""" Add(self: TrayItemCollection, value: TrayItem) -> int """
pass
def Clear(self):
""" Clear(self: TrayItemCollection) """
pass
def Contains(self, value):
""" Contains(self: TrayItemCollection, value: TrayItem) -> bool """
pass
def CopyTo(self, array, index):
""" CopyTo(self: TrayItemCollection, array: Array[TrayItem], index: int) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: TrayItemCollection) -> IEnumerator """
pass
def IndexOf(self, value):
""" IndexOf(self: TrayItemCollection, value: TrayItem) -> int """
pass
def Insert(self, index, value):
""" Insert(self: TrayItemCollection, index: int, value: TrayItem) """
pass
def Remove(self, value):
""" Remove(self: TrayItemCollection, value: TrayItem) """
pass
def RemoveAt(self, index):
""" RemoveAt(self: TrayItemCollection, index: int) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__(self: IList, value: object) -> bool """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: TrayItemCollection) -> int
Set: Count(self: TrayItemCollection) = value
"""
IsFixedSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsFixedSize(self: TrayItemCollection) -> bool
"""
IsReadOnly = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsReadOnly(self: TrayItemCollection) -> bool
"""
class Visuals(object):
# no doc
ApplicationIcon = None
PickSetBitmap = None
class WindowExtension(object):
# no doc
@staticmethod
def GetIcon(window):
""" GetIcon(window: Window) -> Icon """
pass
@staticmethod
def GetLocation(window):
""" GetLocation(window: Window) -> Point """
pass
@staticmethod
def GetSize(window):
""" GetSize(window: Window) -> Size """
pass
@staticmethod
def SetIcon(window, value):
""" SetIcon(window: Window, value: Icon) """
pass
@staticmethod
def SetLocation(window, value):
""" SetLocation(window: Window, value: Point) """
pass
@staticmethod
def SetSize(window, value):
""" SetSize(window: Window, value: Size) """
pass
__all__ = [
'GetIcon',
'GetLocation',
'GetSize',
'SetIcon',
'SetLocation',
'SetSize',
]
class WPFDocumentWindow(DocumentWindow):
""" WPFDocumentWindow(wpfVisual: Visual) """
def Dispose(self):
""" Dispose(self: DocumentWindow, A_0: bool) """
pass
def OnActivate(self, *args): #cannot find CLR method
""" OnActivate(self: WPFDocumentWindow) """
pass
def OnCreate(self, *args): #cannot find CLR method
""" OnCreate(self: WPFDocumentWindow) """
pass
def OnDestroy(self, *args): #cannot find CLR method
""" OnDestroy(self: WPFDocumentWindow) """
pass
def OnLoad(self, *args): #cannot find CLR method
""" OnLoad(self: WPFDocumentWindow) """
pass
def SetDocument(self, *args): #cannot find CLR method
""" SetDocument(self: WPFDocumentWindow, document: object) """
pass
@staticmethod # known case of __new__
def __new__(self, wpfVisual):
""" __new__(cls: type, wpfVisual: Visual) """
pass
# variables with complex values | release/stubs/Autodesk/AutoCAD/Windows/__init__.py |
# no functions
# classes
class ColorDialog(object):
""" ColorDialog() """
def SetDialogTabs(self, value):
""" SetDialogTabs(self: ColorDialog, value: ColorTabs) """
pass
def ShowDialog(self):
""" ShowDialog(self: ColorDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: ColorDialog) -> Nullable[bool] """
pass
Color = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Color(self: ColorDialog) -> Color
Set: Color(self: ColorDialog) = value
"""
IncludeByBlockByLayer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IncludeByBlockByLayer(self: ColorDialog) -> bool
Set: IncludeByBlockByLayer(self: ColorDialog) = value
"""
ColorTabs = None
class Menu(object):
# no doc
MenuItems = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MenuItems(self: Menu) -> MenuItemCollection
"""
class ContextMenuExtension(Menu):
""" ContextMenuExtension() """
def Dispose(self):
""" Dispose(self: ContextMenuExtension) """
pass
def __enter__(self, *args): #cannot find CLR method
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args): #cannot find CLR method
""" __exit__(self: IDisposable, exc_type: object, exc_value: object, exc_back: object) """
pass
Title = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Title(self: ContextMenuExtension) -> str
Set: Title(self: ContextMenuExtension) = value
"""
Popup = None
class DefaultPane(Enum):
""" enum DefaultPane, values: All (24), CursorCoordinates (1), DynamicInput (18), DynamicUcs (19), Float (12), Grid (3), LayoutIcon (22), LayoutModelIcons (20), LayoutMoreIcon (23), LineWeight (7), Model (10), ModelIcon (21), ModeMacro (0), ObjectSnap (11), ObjectTrack (6), Ortho (4), Paper (9), PaperModel (8), Polar (5), Snap (2), Spacer (14), Table (13), ViewportMaximize (16), ViewportMaximizeNext (17), ViewportMaximizePrevious (15) """
All = None
CursorCoordinates = None
DynamicInput = None
DynamicUcs = None
Float = None
Grid = None
LayoutIcon = None
LayoutModelIcons = None
LayoutMoreIcon = None
LineWeight = None
Model = None
ModelIcon = None
ModeMacro = None
ObjectSnap = None
ObjectTrack = None
Ortho = None
Paper = None
PaperModel = None
Polar = None
Snap = None
Spacer = None
Table = None
value__ = None
ViewportMaximize = None
ViewportMaximizeNext = None
ViewportMaximizePrevious = None
class DockSides(Enum):
""" enum (flags) DockSides, values: Bottom (32768), Left (4096), None (0), Right (16384), Top (8192) """
Bottom = None
Left = None
None = None
Right = None
Top = None
value__ = None
class Window(DisposableWrapper):
# no doc
def Close(self):
""" Close(self: Window) """
pass
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def Focus(self):
""" Focus(self: Window) -> bool """
pass
@staticmethod
def FromHandle(handle):
""" FromHandle(handle: IntPtr) -> Window """
pass
@staticmethod
def GetDeviceIndependentScale(hWnd):
""" GetDeviceIndependentScale(hWnd: IntPtr) -> Vector """
pass
def GetWndPtr(self, *args): #cannot find CLR method
""" GetWndPtr(self: Window) -> IntPtr """
pass
@staticmethod # known case of __new__
def __new__(self, *args): #cannot find CLR constructor
""" __new__(cls: type, ptr: IntPtr, autoDelete: bool) """
pass
DeviceIndependentLocation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentLocation(self: Window) -> Point
Set: DeviceIndependentLocation(self: Window) = value
"""
DeviceIndependentSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentSize(self: Window) -> Size
Set: DeviceIndependentSize(self: Window) = value
"""
Handle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Handle(self: Window) -> IntPtr
"""
Text = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Text(self: Window) -> str
Set: Text(self: Window) = value
"""
UnmanagedWindow = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: UnmanagedWindow(self: Window) -> IntPtr
"""
Visible = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Visible(self: Window) -> bool
Set: Visible(self: Window) = value
"""
WindowState = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: WindowState(self: Window) -> State
Set: WindowState(self: Window) = value
"""
State = None
class DocumentWindow(Window):
# no doc
def Activate(self):
""" Activate(self: DocumentWindow) """
pass
def Close(self):
""" Close(self: DocumentWindow) """
pass
def Dispose(self):
""" Dispose(self: DocumentWindow, A_0: bool) """
pass
@staticmethod # known case of __new__
def __new__(self, *args): #cannot find CLR constructor
""" __new__(cls: type, docWindow: AcApDocWindow*) """
pass
CanClose = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CanClose(self: DocumentWindow) -> bool
"""
CanUpdate = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CanUpdate(self: DocumentWindow) -> bool
"""
Document = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Document(self: DocumentWindow) -> object
"""
Title = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Title(self: DocumentWindow) -> str
Set: Title(self: DocumentWindow) = value
"""
DocumentWindowLoaded = None
DocumentWindowUpdated = None
class DocumentWindowLoadedEventHandler(MulticastDelegate):
""" DocumentWindowLoadedEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: DocumentWindowLoadedEventHandler, sender: object, e: EventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: DocumentWindowLoadedEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: DocumentWindowLoadedEventHandler, sender: object, e: EventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class DocumentWindowUpdatedEventHandler(MulticastDelegate):
""" DocumentWindowUpdatedEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: DocumentWindowUpdatedEventHandler, sender: object, e: EventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: DocumentWindowUpdatedEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: DocumentWindowUpdatedEventHandler, sender: object, e: EventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class DrawingDocumentWindow(DocumentWindow):
# no doc
def Dispose(self):
""" Dispose(self: DocumentWindow, A_0: bool) """
pass
Document = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Document(self: DrawingDocumentWindow) -> Document
"""
class DropTarget(object):
# no doc
def OnDragEnter(self, e):
""" OnDragEnter(self: DropTarget, e: DragEventArgs) """
pass
def OnDragLeave(self):
""" OnDragLeave(self: DropTarget) """
pass
def OnDragOver(self, e):
""" OnDragOver(self: DropTarget, e: DragEventArgs) """
pass
def OnDrop(self, e):
""" OnDrop(self: DropTarget, e: DragEventArgs) """
pass
class IconType(Enum):
""" enum IconType, values: Critical (2), Information (1), None (0), Warning (3) """
Critical = None
Information = None
None = None
value__ = None
Warning = None
class InfoCenter(object):
""" InfoCenter() """
def InfoToolbarSizeChanged(self, bExpand):
""" InfoToolbarSizeChanged(self: InfoCenter, bExpand: bool) """
pass
def InvokeToolbarMoveEvent(self):
""" InvokeToolbarMoveEvent(self: InfoCenter) """
pass
def InvokeToolbarResizeEvent(self, width):
""" InvokeToolbarResizeEvent(self: InfoCenter, width: int) """
pass
def LaunchSubAwareModule(self, resReqid, strCourseId, strModuleId):
""" LaunchSubAwareModule(self: InfoCenter, resReqid: Int16, strCourseId: str, strModuleId: str) """
pass
Host = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Host(self: InfoCenter) -> HwndSource
Set: Host(self: InfoCenter) = value
"""
KeepFocus = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: KeepFocus(self: InfoCenter) -> bool
Set: KeepFocus(self: InfoCenter) = value
"""
SubAwareClientInfo = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: SubAwareClientInfo(self: InfoCenter) -> str
"""
UPIXMLData = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: UPIXMLData(self: InfoCenter) -> str
"""
Visible = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Visible(self: InfoCenter) -> bool
Set: Visible(self: InfoCenter) = value
"""
mInitWidth = None
m_pToolbarMoveDelegate = None
m_pToolbarResizeDelegate = None
class InfoToolbarMoveDelegate(MulticastDelegate):
""" InfoToolbarMoveDelegate(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, callback, obj):
""" BeginInvoke(self: InfoToolbarMoveDelegate, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: InfoToolbarMoveDelegate, result: IAsyncResult) """
pass
def Invoke(self):
""" Invoke(self: InfoToolbarMoveDelegate) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class InfoToolbarResizeTo(MulticastDelegate):
""" InfoToolbarResizeTo(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, width, callback, obj):
""" BeginInvoke(self: InfoToolbarResizeTo, width: int, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: InfoToolbarResizeTo, result: IAsyncResult) """
pass
def Invoke(self, width):
""" Invoke(self: InfoToolbarResizeTo, width: int) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class LayerTransparencyDialog(object):
""" LayerTransparencyDialog() """
def ShowDialog(self):
""" ShowDialog(self: LayerTransparencyDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: LayerTransparencyDialog) -> Nullable[bool] """
pass
Percent = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Percent(self: LayerTransparencyDialog) -> UInt32
Set: Percent(self: LayerTransparencyDialog) = value
"""
class LinetypeDialog(object):
""" LinetypeDialog() """
def ShowDialog(self):
""" ShowDialog(self: LinetypeDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: LinetypeDialog) -> Nullable[bool] """
pass
IncludeByBlockByLayer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IncludeByBlockByLayer(self: LinetypeDialog) -> bool
Set: IncludeByBlockByLayer(self: LinetypeDialog) = value
"""
Linetype = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Linetype(self: LinetypeDialog) -> ObjectId
Set: Linetype(self: LinetypeDialog) = value
"""
class LineWeightDialog(object):
""" LineWeightDialog() """
def ShowDialog(self):
""" ShowDialog(self: LineWeightDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: LineWeightDialog) -> Nullable[bool] """
pass
IncludeByBlockByLayer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IncludeByBlockByLayer(self: LineWeightDialog) -> bool
Set: IncludeByBlockByLayer(self: LineWeightDialog) = value
"""
LineWeight = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LineWeight(self: LineWeightDialog) -> LineWeight
Set: LineWeight(self: LineWeightDialog) = value
"""
class MenuItem(Menu):
"""
MenuItem(value: str, icon: Icon)
MenuItem(value: str)
"""
@staticmethod # known case of __new__
def __new__(self, value, icon=None):
"""
__new__(cls: type, value: str, icon: Icon)
__new__(cls: type, value: str)
"""
pass
Checked = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Checked(self: MenuItem) -> bool
Set: Checked(self: MenuItem) = value
"""
Enabled = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Enabled(self: MenuItem) -> bool
Set: Enabled(self: MenuItem) = value
"""
Icon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Icon(self: MenuItem) -> Icon
Set: Icon(self: MenuItem) = value
"""
Text = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Text(self: MenuItem) -> str
Set: Text(self: MenuItem) = value
"""
Visible = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Visible(self: MenuItem) -> bool
Set: Visible(self: MenuItem) = value
"""
Click = None
class MenuItemCollection(object):
""" MenuItemCollection(owner: Menu) """
def Add(self, value):
""" Add(self: MenuItemCollection, value: MenuItem) -> int """
pass
def Clear(self):
""" Clear(self: MenuItemCollection) """
pass
def Contains(self, value):
""" Contains(self: MenuItemCollection, value: MenuItem) -> bool """
pass
def CopyTo(self, array, index):
""" CopyTo(self: MenuItemCollection, array: Array[MenuItem], index: int) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: MenuItemCollection) -> IEnumerator[IMenuItem] """
pass
def IndexOf(self, value):
""" IndexOf(self: MenuItemCollection, value: MenuItem) -> int """
pass
def Insert(self, index, value):
""" Insert(self: MenuItemCollection, index: int, value: MenuItem) """
pass
def Remove(self, value):
""" Remove(self: MenuItemCollection, value: MenuItem) """
pass
def RemoveAt(self, index):
""" RemoveAt(self: MenuItemCollection, index: int) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__(self: IList, value: object) -> bool """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, owner):
""" __new__(cls: type, owner: Menu) """
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: MenuItemCollection) -> int
"""
IsFixedSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsFixedSize(self: MenuItemCollection) -> bool
"""
IsReadOnly = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsReadOnly(self: MenuItemCollection) -> bool
"""
class OpenFileDialog(object):
""" OpenFileDialog(title: str, defaultName: str, extension: str, dialogName: str, flags: OpenFileDialogFlags) """
def GetFilenames(self):
""" GetFilenames(self: OpenFileDialog) -> Array[str] """
pass
def ShowDialog(self):
""" ShowDialog(self: OpenFileDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: OpenFileDialog) -> Nullable[bool] """
pass
@staticmethod # known case of __new__
def __new__(self, title, defaultName, extension, dialogName, flags):
""" __new__(cls: type, title: str, defaultName: str, extension: str, dialogName: str, flags: OpenFileDialogFlags) """
pass
Filename = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Filename(self: OpenFileDialog) -> str
"""
OpenFileDialogFlags = None
class OpenFileOrFolderDialog(object):
""" OpenFileOrFolderDialog(title: str, defaultName: str, extension: str, dialogName: str, flags: OpenFileDialogFlags) """
def ShowDialog(self):
""" ShowDialog(self: OpenFileOrFolderDialog) -> DialogResult """
pass
@staticmethod # known case of __new__
def __new__(self, title, defaultName, extension, dialogName, flags):
""" __new__(cls: type, title: str, defaultName: str, extension: str, dialogName: str, flags: OpenFileDialogFlags) """
pass
FileOrFoldername = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: FileOrFoldername(self: OpenFileOrFolderDialog) -> str
"""
class Palette(object):
# no doc
Name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Name(self: Palette) -> str
Set: Name(self: Palette) = value
"""
PaletteSet = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PaletteSet(self: Palette) -> PaletteSet
"""
class PaletteActivatedEventArgs(EventArgs):
""" PaletteActivatedEventArgs(activated: Palette, deactivated: Palette) """
@staticmethod # known case of __new__
def __new__(self, activated, deactivated):
""" __new__(cls: type, activated: Palette, deactivated: Palette) """
pass
Activated = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Activated(self: PaletteActivatedEventArgs) -> Palette
"""
Deactivated = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Deactivated(self: PaletteActivatedEventArgs) -> Palette
"""
class PaletteActivatedEventHandler(MulticastDelegate):
""" PaletteActivatedEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteActivatedEventHandler, sender: object, e: PaletteActivatedEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteActivatedEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteActivatedEventHandler, sender: object, e: PaletteActivatedEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteAddContextMenuEventArgs(EventArgs):
""" PaletteAddContextMenuEventArgs(menuitems: List[MenuItem], removeMenuItems: List[int], nHitFlag: int, nRightClkTab: int) """
@staticmethod # known case of __new__
def __new__(self, menuitems, removeMenuItems, nHitFlag, nRightClkTab):
""" __new__(cls: type, menuitems: List[MenuItem], removeMenuItems: List[int], nHitFlag: int, nRightClkTab: int) """
pass
HitFlag = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: HitFlag(self: PaletteAddContextMenuEventArgs) -> int
"""
MenuItems = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MenuItems(self: PaletteAddContextMenuEventArgs) -> List[MenuItem]
"""
RemoveMenuItems = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RemoveMenuItems(self: PaletteAddContextMenuEventArgs) -> List[int]
"""
RightClickTab = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RightClickTab(self: PaletteAddContextMenuEventArgs) -> int
"""
class PaletteAddContextMenuEventHandler(MulticastDelegate):
""" PaletteAddContextMenuEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteAddContextMenuEventHandler, sender: object, e: PaletteAddContextMenuEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteAddContextMenuEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteAddContextMenuEventHandler, sender: object, e: PaletteAddContextMenuEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteEnterSizeMoveEventArgs(EventArgs):
""" PaletteEnterSizeMoveEventArgs(bEnterSizeMove: bool) """
@staticmethod # known case of __new__
def __new__(self, bEnterSizeMove):
""" __new__(cls: type, bEnterSizeMove: bool) """
pass
EnterSizeMove = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: EnterSizeMove(self: PaletteEnterSizeMoveEventArgs) -> bool
"""
class PaletteEnterSizeMoveEventHandler(MulticastDelegate):
""" PaletteEnterSizeMoveEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteEnterSizeMoveEventHandler, sender: object, e: PaletteEnterSizeMoveEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteEnterSizeMoveEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteEnterSizeMoveEventHandler, sender: object, e: PaletteEnterSizeMoveEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PalettePersistEventArgs(EventArgs):
""" PalettePersistEventArgs(configurationSection: IConfigurationSection) """
@staticmethod # known case of __new__
def __new__(self, configurationSection):
""" __new__(cls: type, configurationSection: IConfigurationSection) """
pass
ConfigurationSection = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ConfigurationSection(self: PalettePersistEventArgs) -> IConfigurationSection
"""
class PalettePersistEventHandler(MulticastDelegate):
""" PalettePersistEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PalettePersistEventHandler, sender: object, e: PalettePersistEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PalettePersistEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PalettePersistEventHandler, sender: object, e: PalettePersistEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSet(Window):
"""
PaletteSet(name: str, cmd: str, toolID: Guid)
PaletteSet(name: str, toolID: Guid)
PaletteSet(name: str)
"""
def Activate(self, index):
""" Activate(self: PaletteSet, index: int) """
pass
def Add(self, name, *__args):
"""
Add(self: PaletteSet, name: str, control: Control) -> Palette
Add(self: PaletteSet, name: str, htmlPage: Uri) -> Palette
"""
pass
def AddVisual(self, name, control, bResizeContentToPaletteSize=None):
"""
AddVisual(self: PaletteSet, name: str, control: Visual) -> Palette
AddVisual(self: PaletteSet, name: str, control: Visual, bResizeContentToPaletteSize: bool) -> Palette
"""
pass
def AddVisualBrowser(self, name, control, htmlPage, scriptableName, bShowBrowserFirst):
""" AddVisualBrowser(self: PaletteSet, name: str, control: Visual, htmlPage: str, scriptableName: str, bShowBrowserFirst: bool) -> Palette """
pass
def CopyTo(self, array, index):
""" CopyTo(self: PaletteSet, array: Array[Palette], index: int) """
pass
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def EnableTransparency(self, value):
""" EnableTransparency(self: PaletteSet, value: bool) -> bool """
pass
def FloatControl(self, *__args):
""" FloatControl(self: PaletteSet, pointOnScreen: Point)FloatControl(self: PaletteSet, value: Rect) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: PaletteSet) -> IEnumerator """
pass
def GetThemedIcon(self, bBigIcon):
""" GetThemedIcon(self: PaletteSet, bBigIcon: bool) -> Icon """
pass
def InitializeFloatingPosition(self, value):
""" InitializeFloatingPosition(self: PaletteSet, value: Rect) """
pass
def RecalculateDockSiteLayout(self):
""" RecalculateDockSiteLayout(self: PaletteSet) """
pass
def Remove(self, index):
""" Remove(self: PaletteSet, index: int) """
pass
def SetThemedIcon(self, value, theme):
""" SetThemedIcon(self: PaletteSet, value: Icon, theme: ColorThemeEnum) """
pass
def switchVisualBrowser(self, index, bVisual):
""" switchVisualBrowser(self: PaletteSet, index: int, bVisual: bool) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+yx.__add__(y) <==> x+y """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
@staticmethod # known case of __new__
def __new__(self, name, *__args):
"""
__new__(cls: type, name: str, cmd: str, toolID: Guid)
__new__(cls: type, name: str, toolID: Guid)
__new__(cls: type, name: str)
"""
pass
Anchored = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Anchored(self: PaletteSet) -> bool
"""
AutoRollUp = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: AutoRollUp(self: PaletteSet) -> bool
Set: AutoRollUp(self: PaletteSet) = value
"""
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: PaletteSet) -> int
"""
DarkThemedIcon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DarkThemedIcon(self: PaletteSet) -> Icon
Set: DarkThemedIcon(self: PaletteSet) = value
"""
DeviceIndependentLocation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentLocation(self: PaletteSet) -> Point
Set: DeviceIndependentLocation(self: PaletteSet) = value
"""
DeviceIndependentMinimumSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentMinimumSize(self: PaletteSet) -> Size
Set: DeviceIndependentMinimumSize(self: PaletteSet) = value
"""
DeviceIndependentSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentSize(self: PaletteSet) -> Size
Set: DeviceIndependentSize(self: PaletteSet) = value
"""
Dock = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Dock(self: PaletteSet) -> DockSides
Set: Dock(self: PaletteSet) = value
"""
DockEnabled = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DockEnabled(self: PaletteSet) -> DockSides
Set: DockEnabled(self: PaletteSet) = value
"""
Icon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Icon(self: PaletteSet) -> Icon
Set: Icon(self: PaletteSet) = value
"""
KeepFocus = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: KeepFocus(self: PaletteSet) -> bool
Set: KeepFocus(self: PaletteSet) = value
"""
LargeDarkThemedIcon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LargeDarkThemedIcon(self: PaletteSet) -> Icon
Set: LargeDarkThemedIcon(self: PaletteSet) = value
"""
LargeLightThemedIcon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LargeLightThemedIcon(self: PaletteSet) -> Icon
Set: LargeLightThemedIcon(self: PaletteSet) = value
"""
LightThemedIcon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: LightThemedIcon(self: PaletteSet) -> Icon
Set: LightThemedIcon(self: PaletteSet) = value
"""
Location = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Location(self: PaletteSet) -> Point
Set: Location(self: PaletteSet) = value
"""
MinimumSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MinimumSize(self: PaletteSet) -> Size
Set: MinimumSize(self: PaletteSet) = value
"""
Name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Name(self: PaletteSet) -> str
Set: Name(self: PaletteSet) = value
"""
Opacity = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Opacity(self: PaletteSet) -> int
Set: Opacity(self: PaletteSet) = value
"""
PaletteSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PaletteSize(self: PaletteSet) -> Size
"""
RolledUp = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: RolledUp(self: PaletteSet) -> bool
Set: RolledUp(self: PaletteSet) = value
"""
Size = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Size(self: PaletteSet) -> Size
Set: Size(self: PaletteSet) = value
"""
Style = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Style(self: PaletteSet) -> PaletteSetStyles
Set: Style(self: PaletteSet) = value
"""
TitleBarLocation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TitleBarLocation(self: PaletteSet) -> PaletteSetTitleBarLocation
Set: TitleBarLocation(self: PaletteSet) = value
"""
Visible = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Visible(self: PaletteSet) -> bool
Set: Visible(self: PaletteSet) = value
"""
Focused = None
Help = None
Load = None
PaletteActivated = None
PaletteAddContextMenu = None
PaletteSetDestroy = None
PaletteSetEnterSizeMove = None
PaletteSetHostMoved = None
PaletteSetMoved = None
PaletteSetShowDockBar = None
PaletteSetTitleBarLocationChange = None
Save = None
Saving = None
SizeChanged = None
StateChanged = None
class PaletteSetDestroyEventHandler(MulticastDelegate):
""" PaletteSetDestroyEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetDestroyEventHandler, sender: object, e: EventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetDestroyEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetDestroyEventHandler, sender: object, e: EventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetDockSite(object):
""" PaletteSetDockSite() """
def CanDock(self, mousePosition):
""" CanDock(self: PaletteSetDockSite, mousePosition: Point) -> Nullable[Rect] """
pass
def Dock(self, paletteset):
""" Dock(self: PaletteSetDockSite, paletteset: PaletteSet) -> bool """
pass
def Initialize(self, paletteset, desiredSize, dockSyle):
""" Initialize(self: PaletteSetDockSite, paletteset: PaletteSet, desiredSize: Size, dockSyle: int) """
pass
def Uninitialize(self):
""" Uninitialize(self: PaletteSetDockSite) """
pass
class PaletteSetFocusedEventArgs(EventArgs):
""" PaletteSetFocusedEventArgs() """
class PaletteSetFocusedEventHandler(MulticastDelegate):
""" PaletteSetFocusedEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetFocusedEventHandler, sender: object, e: PaletteSetFocusedEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetFocusedEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetFocusedEventHandler, sender: object, e: PaletteSetFocusedEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetHelpEventArgs(EventArgs):
""" PaletteSetHelpEventArgs() """
class PaletteSetHelpEventHandler(MulticastDelegate):
""" PaletteSetHelpEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetHelpEventHandler, sender: object, e: PaletteSetHelpEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetHelpEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetHelpEventHandler, sender: object, e: PaletteSetHelpEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetMoveEventArgs(EventArgs):
""" PaletteSetMoveEventArgs(x: int, y: int) """
@staticmethod # known case of __new__
def __new__(self, x, y):
""" __new__(cls: type, x: int, y: int) """
pass
X = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: X(self: PaletteSetMoveEventArgs) -> int
"""
y = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: y(self: PaletteSetMoveEventArgs) -> int
"""
class PaletteSetMoveEventHandler(MulticastDelegate):
""" PaletteSetMoveEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetMoveEventHandler, sender: object, e: PaletteSetMoveEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetMoveEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetMoveEventHandler, sender: object, e: PaletteSetMoveEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetShowDockBarEventHandler(MulticastDelegate):
""" PaletteSetShowDockBarEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetShowDockBarEventHandler, sender: object, e: PaletteShowDockBarEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetShowDockBarEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetShowDockBarEventHandler, sender: object, e: PaletteShowDockBarEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetSizeEventArgs(EventArgs):
""" PaletteSetSizeEventArgs(cx: int, cy: int, dx: float, dy: float) """
@staticmethod # known case of __new__
def __new__(self, cx, cy, dx, dy):
""" __new__(cls: type, cx: int, cy: int, dx: float, dy: float) """
pass
DeviceIndependentHeight = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentHeight(self: PaletteSetSizeEventArgs) -> float
"""
DeviceIndependentWidth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DeviceIndependentWidth(self: PaletteSetSizeEventArgs) -> float
"""
Height = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Height(self: PaletteSetSizeEventArgs) -> int
"""
Width = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Width(self: PaletteSetSizeEventArgs) -> int
"""
class PaletteSetSizeEventHandler(MulticastDelegate):
""" PaletteSetSizeEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetSizeEventHandler, sender: object, e: PaletteSetSizeEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetSizeEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetSizeEventHandler, sender: object, e: PaletteSetSizeEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetStateEventArgs(EventArgs):
""" PaletteSetStateEventArgs(state: StateEventIndex) """
@staticmethod # known case of __new__
def __new__(self, state):
""" __new__(cls: type, state: StateEventIndex) """
pass
NewState = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: NewState(self: PaletteSetStateEventArgs) -> StateEventIndex
"""
class PaletteSetStateEventHandler(MulticastDelegate):
""" PaletteSetStateEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetStateEventHandler, sender: object, e: PaletteSetStateEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetStateEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetStateEventHandler, sender: object, e: PaletteSetStateEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteSetStyles(Enum):
""" enum (flags) PaletteSetStyles, values: NameEditable (16), Notify (1024), NoTitleBar (32768), PauseAutoRollupForChildModalDialog (65536), ShowAutoHideButton (2), ShowCloseButton (8), ShowPropertiesMenu (4), ShowTabForSingle (64), SingleColDock (4096), SingleRowDock (512), SingleRowNoVertResize (2048), Snappable (32), UsePaletteNameAsTitleForSingle (128) """
NameEditable = None
Notify = None
NoTitleBar = None
PauseAutoRollupForChildModalDialog = None
ShowAutoHideButton = None
ShowCloseButton = None
ShowPropertiesMenu = None
ShowTabForSingle = None
SingleColDock = None
SingleRowDock = None
SingleRowNoVertResize = None
Snappable = None
UsePaletteNameAsTitleForSingle = None
value__ = None
class PaletteSetTitleBarLocation(Enum):
""" enum PaletteSetTitleBarLocation, values: Left (0), Right (1) """
Left = None
Right = None
value__ = None
class PaletteSetTitleBarLocationChangeEventHandler(MulticastDelegate):
""" PaletteSetTitleBarLocationChangeEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: PaletteSetTitleBarLocationChangeEventHandler, sender: object, e: EventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: PaletteSetTitleBarLocationChangeEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: PaletteSetTitleBarLocationChangeEventHandler, sender: object, e: EventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class PaletteShowDockBarEventArgs(EventArgs):
""" PaletteShowDockBarEventArgs(bShowDockBar: bool) """
@staticmethod # known case of __new__
def __new__(self, bShowDockBar):
""" __new__(cls: type, bShowDockBar: bool) """
pass
ShowDockBar = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ShowDockBar(self: PaletteShowDockBarEventArgs) -> bool
"""
class StatusBarItem(DisposableWrapper):
# no doc
def DisplayContextMenu(self, menu, p):
""" DisplayContextMenu(self: StatusBarItem, menu: ContextMenu, p: Point) """
pass
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def PointToClient(self, p):
""" PointToClient(self: StatusBarItem, p: Point) -> Point """
pass
def PointToScreen(self, p):
""" PointToScreen(self: StatusBarItem, p: Point) -> Point """
pass
@staticmethod # known case of __new__
def __new__(self, *args): #cannot find CLR constructor
"""
__new__(cls: type, unmanagedPointer: IntPtr, autoDelete: bool)
__new__(cls: type)
"""
pass
Enabled = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Enabled(self: StatusBarItem) -> bool
Set: Enabled(self: StatusBarItem) = value
"""
Icon = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Icon(self: StatusBarItem) -> Icon
Set: Icon(self: StatusBarItem) = value
"""
ToolTipText = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: ToolTipText(self: StatusBarItem) -> str
Set: ToolTipText(self: StatusBarItem) = value
"""
Visible = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Visible(self: StatusBarItem) -> bool
Set: Visible(self: StatusBarItem) = value
"""
Deleted = None
MouseDown = None
class Pane(StatusBarItem):
""" Pane() """
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
@staticmethod # known case of __new__
def __new__(self):
"""
__new__(cls: type, unmanagedPointer: IntPtr, autoDelete: bool)
__new__(cls: type)
"""
pass
MaximumWidth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MaximumWidth(self: Pane) -> int
Set: MaximumWidth(self: Pane) = value
"""
MinimumWidth = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: MinimumWidth(self: Pane) -> int
Set: MinimumWidth(self: Pane) = value
"""
Style = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Style(self: Pane) -> PaneStyles
Set: Style(self: Pane) = value
"""
Text = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Text(self: Pane) -> str
Set: Text(self: Pane) = value
"""
class PaneCollection(object):
# no doc
def Add(self, value):
""" Add(self: PaneCollection, value: Pane) -> int """
pass
def Clear(self):
""" Clear(self: PaneCollection) """
pass
def Contains(self, value):
""" Contains(self: PaneCollection, value: Pane) -> bool """
pass
def CopyTo(self, array, index):
""" CopyTo(self: PaneCollection, array: Array[Pane], index: int) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: PaneCollection) -> IEnumerator """
pass
def IndexOf(self, value):
""" IndexOf(self: PaneCollection, value: Pane) -> int """
pass
def Insert(self, index, value):
""" Insert(self: PaneCollection, index: int, value: Pane) """
pass
def Remove(self, value):
""" Remove(self: PaneCollection, value: Pane) """
pass
def RemoveAt(self, index):
""" RemoveAt(self: PaneCollection, index: int) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__(self: IList, value: object) -> bool """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: PaneCollection) -> int
Set: Count(self: PaneCollection) = value
"""
IsFixedSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsFixedSize(self: PaneCollection) -> bool
"""
IsReadOnly = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsReadOnly(self: PaneCollection) -> bool
"""
class PaneStyles(Enum):
""" enum (flags) PaneStyles, values: Command (16), NoBorders (1), Normal (8), PopOut (2), PopUp (32), Stretch (4) """
Command = None
NoBorders = None
Normal = None
PopOut = None
PopUp = None
Stretch = None
value__ = None
class PlotStyleDialog(object):
""" PlotStyleDialog() """
def ShowDialog(self):
""" ShowDialog(self: PlotStyleDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: PlotStyleDialog) -> Nullable[bool] """
pass
IncludeByBlockByLayer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IncludeByBlockByLayer(self: PlotStyleDialog) -> bool
Set: IncludeByBlockByLayer(self: PlotStyleDialog) = value
"""
PlotStyle = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: PlotStyle(self: PlotStyleDialog) -> str
Set: PlotStyle(self: PlotStyleDialog) = value
"""
class SaveFileDialog(object):
""" SaveFileDialog(title: str, defaultName: str, extension: str, dialogName: str, flags: SaveFileDialogFlags) """
def ShowDialog(self):
""" ShowDialog(self: SaveFileDialog) -> DialogResult """
pass
def ShowModal(self):
""" ShowModal(self: SaveFileDialog) -> Nullable[bool] """
pass
@staticmethod # known case of __new__
def __new__(self, title, defaultName, extension, dialogName, flags):
""" __new__(cls: type, title: str, defaultName: str, extension: str, dialogName: str, flags: SaveFileDialogFlags) """
pass
Filename = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Filename(self: SaveFileDialog) -> str
"""
SaveFileDialogFlags = None
class StateEventIndex(Enum):
""" enum StateEventIndex, values: Hide (0), Show (1), ThemeChange (4) """
Hide = None
Show = None
ThemeChange = None
value__ = None
class StatusBar(object):
# no doc
def CloseBubbleWindows(self):
""" CloseBubbleWindows(self: StatusBar) """
pass
def GetDefaultPane(self, pane):
""" GetDefaultPane(self: StatusBar, pane: DefaultPane) -> Pane """
pass
def RemoveDefaultPane(self, pane):
""" RemoveDefaultPane(self: StatusBar, pane: DefaultPane) """
pass
def Update(self):
""" Update(self: StatusBar) """
pass
@staticmethod # known case of __new__
def __new__(self, *args): #cannot find CLR constructor
""" __new__(cls: type, impObj: AcApStatusBar*) """
pass
Panes = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Panes(self: StatusBar) -> PaneCollection
"""
TrayItems = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: TrayItems(self: StatusBar) -> TrayItemCollection
"""
Window = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Window(self: StatusBar) -> Window
"""
class StatusBarMouseDownEventArgs(EventArgs):
# no doc
Button = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Button(self: StatusBarMouseDownEventArgs) -> MouseButtons
"""
DoubleClick = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: DoubleClick(self: StatusBarMouseDownEventArgs) -> bool
"""
X = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: X(self: StatusBarMouseDownEventArgs) -> int
"""
Y = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Y(self: StatusBarMouseDownEventArgs) -> int
"""
class StatusBarMouseDownEventHandler(MulticastDelegate):
""" StatusBarMouseDownEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: StatusBarMouseDownEventHandler, sender: object, e: StatusBarMouseDownEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: StatusBarMouseDownEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: StatusBarMouseDownEventHandler, sender: object, e: StatusBarMouseDownEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class TrayItem(StatusBarItem):
""" TrayItem() """
def CloseBubbleWindows(self):
""" CloseBubbleWindows(self: TrayItem) """
pass
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
def ShowBubbleWindow(self, bubble):
""" ShowBubbleWindow(self: TrayItem, bubble: TrayItemBubbleWindow) """
pass
@staticmethod # known case of __new__
def __new__(self):
"""
__new__(cls: type, unmanagedPointer: IntPtr, autoDelete: bool)
__new__(cls: type)
"""
pass
class TrayItemBubbleWindow(DisposableWrapper):
""" TrayItemBubbleWindow() """
def Dispose(self):
""" Dispose(self: DisposableWrapper, A_0: bool) """
pass
HyperLink = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: HyperLink(self: TrayItemBubbleWindow) -> str
Set: HyperLink(self: TrayItemBubbleWindow) = value
"""
HyperText = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: HyperText(self: TrayItemBubbleWindow) -> str
Set: HyperText(self: TrayItemBubbleWindow) = value
"""
IconType = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IconType(self: TrayItemBubbleWindow) -> IconType
Set: IconType(self: TrayItemBubbleWindow) = value
"""
Text = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Text(self: TrayItemBubbleWindow) -> str
Set: Text(self: TrayItemBubbleWindow) = value
"""
Text2 = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Text2(self: TrayItemBubbleWindow) -> str
Set: Text2(self: TrayItemBubbleWindow) = value
"""
Title = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Title(self: TrayItemBubbleWindow) -> str
Set: Title(self: TrayItemBubbleWindow) = value
"""
Closed = None
class TrayItemBubbleWindowClosedEventArgs(EventArgs):
# no doc
CloseReason = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: CloseReason(self: TrayItemBubbleWindowClosedEventArgs) -> TrayItemBubbleWindowCloseReason
"""
class TrayItemBubbleWindowClosedEventHandler(MulticastDelegate):
""" TrayItemBubbleWindowClosedEventHandler(A_0: object, A_1: IntPtr) """
def BeginInvoke(self, sender, e, callback, obj):
""" BeginInvoke(self: TrayItemBubbleWindowClosedEventHandler, sender: object, e: TrayItemBubbleWindowClosedEventArgs, callback: AsyncCallback, obj: object) -> IAsyncResult """
pass
def EndInvoke(self, result):
""" EndInvoke(self: TrayItemBubbleWindowClosedEventHandler, result: IAsyncResult) """
pass
def Invoke(self, sender, e):
""" Invoke(self: TrayItemBubbleWindowClosedEventHandler, sender: object, e: TrayItemBubbleWindowClosedEventArgs) """
pass
@staticmethod # known case of __new__
def __new__(self, A_0, A_1):
""" __new__(cls: type, A_0: object, A_1: IntPtr) """
pass
class TrayItemBubbleWindowCloseReason(Enum):
""" enum TrayItemBubbleWindowCloseReason, values: ClosedByUser (3), DocumentDeactivated (7), FailedToCreate (0), HyperlinkClicked (5), NoIcons (1), NoNotifications (2), TimedOut (4) """
ClosedByUser = None
DocumentDeactivated = None
FailedToCreate = None
HyperlinkClicked = None
NoIcons = None
NoNotifications = None
TimedOut = None
value__ = None
class TrayItemCollection(object):
# no doc
def Add(self, value):
""" Add(self: TrayItemCollection, value: TrayItem) -> int """
pass
def Clear(self):
""" Clear(self: TrayItemCollection) """
pass
def Contains(self, value):
""" Contains(self: TrayItemCollection, value: TrayItem) -> bool """
pass
def CopyTo(self, array, index):
""" CopyTo(self: TrayItemCollection, array: Array[TrayItem], index: int) """
pass
def GetEnumerator(self):
""" GetEnumerator(self: TrayItemCollection) -> IEnumerator """
pass
def IndexOf(self, value):
""" IndexOf(self: TrayItemCollection, value: TrayItem) -> int """
pass
def Insert(self, index, value):
""" Insert(self: TrayItemCollection, index: int, value: TrayItem) """
pass
def Remove(self, value):
""" Remove(self: TrayItemCollection, value: TrayItem) """
pass
def RemoveAt(self, index):
""" RemoveAt(self: TrayItemCollection, index: int) """
pass
def __add__(self, *args): #cannot find CLR method
""" x.__add__(y) <==> x+y """
pass
def __contains__(self, *args): #cannot find CLR method
""" __contains__(self: IList, value: object) -> bool """
pass
def __getitem__(self, *args): #cannot find CLR method
""" x.__getitem__(y) <==> x[y] """
pass
def __iter__(self, *args): #cannot find CLR method
""" __iter__(self: IEnumerable) -> object """
pass
def __len__(self, *args): #cannot find CLR method
""" x.__len__() <==> len(x) """
pass
def __setitem__(self, *args): #cannot find CLR method
""" x.__setitem__(i, y) <==> x[i]= """
pass
Count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: Count(self: TrayItemCollection) -> int
Set: Count(self: TrayItemCollection) = value
"""
IsFixedSize = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsFixedSize(self: TrayItemCollection) -> bool
"""
IsReadOnly = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Get: IsReadOnly(self: TrayItemCollection) -> bool
"""
class Visuals(object):
# no doc
ApplicationIcon = None
PickSetBitmap = None
class WindowExtension(object):
# no doc
@staticmethod
def GetIcon(window):
""" GetIcon(window: Window) -> Icon """
pass
@staticmethod
def GetLocation(window):
""" GetLocation(window: Window) -> Point """
pass
@staticmethod
def GetSize(window):
""" GetSize(window: Window) -> Size """
pass
@staticmethod
def SetIcon(window, value):
""" SetIcon(window: Window, value: Icon) """
pass
@staticmethod
def SetLocation(window, value):
""" SetLocation(window: Window, value: Point) """
pass
@staticmethod
def SetSize(window, value):
""" SetSize(window: Window, value: Size) """
pass
__all__ = [
'GetIcon',
'GetLocation',
'GetSize',
'SetIcon',
'SetLocation',
'SetSize',
]
class WPFDocumentWindow(DocumentWindow):
""" WPFDocumentWindow(wpfVisual: Visual) """
def Dispose(self):
""" Dispose(self: DocumentWindow, A_0: bool) """
pass
def OnActivate(self, *args): #cannot find CLR method
""" OnActivate(self: WPFDocumentWindow) """
pass
def OnCreate(self, *args): #cannot find CLR method
""" OnCreate(self: WPFDocumentWindow) """
pass
def OnDestroy(self, *args): #cannot find CLR method
""" OnDestroy(self: WPFDocumentWindow) """
pass
def OnLoad(self, *args): #cannot find CLR method
""" OnLoad(self: WPFDocumentWindow) """
pass
def SetDocument(self, *args): #cannot find CLR method
""" SetDocument(self: WPFDocumentWindow, document: object) """
pass
@staticmethod # known case of __new__
def __new__(self, wpfVisual):
""" __new__(cls: type, wpfVisual: Visual) """
pass
# variables with complex values | 0.510008 | 0.130867 |
from flask import Blueprint
from flask import request
from flask import jsonify
import jwt
from util.util import decode_jwt
from config.config import config
from util.util import InvalidUsage
from util.util import handle_invalid_usage
from util.util import try_wrap_response
from config.config import config
from models.model_operations.location_operations import get_locations
from models.schema import locations_schema
bp = Blueprint("location_controller", __name__)
@bp.route("", methods=["GET"])
def location():
"""
The function for the front-end to retrieve random location data by specifying amount.
Sample command to test:
$ curl -H "Content-Type: application/json" -X GET http://localhost:5000/location?size=5\&gold_standard_size=1\&user_token=xxxx
$ https://localhost:5000/location?&size=5&gold_standard_size=1?user_token=xxxxx
Parameters
----------
user_token : str
The encoded user JWT, issued by the back-end.
(required)
size : int
Total number of locations to be returned.
(required)
gold_standard_size : int
The number of locations that should include gold standard answers.
There should be ("size" - "gold_standard_size") locations that are not labeled yet.
(required)
Returns
-------
The encoded JWT that stores location information:
id : int
ID of the location.
factory_id : string
The uuid imported from disfactory factory table.
"""
size = request.args.get("size")
gold_standard_size = request.args.get("gold_standard_size")
user_token = request.args.get("user_token")
if size is None:
e = InvalidUsage("Please provide size, the number of locations you want to get.")
return handle_invalid_usage(e)
try:
i = int(size)
except ValueError as ex:
e = InvalidUsage("size must be an integer.")
return handle_invalid_usage(e)
except Exception as ex:
e = InvalidUsage("size must be an integer.")
return handle_invalid_usage(e)
if int(size) < 2:
e = InvalidUsage("The size must be greater or equal to 2.")
return handle_invalid_usage(e)
if gold_standard_size is None:
e = InvalidUsage("Please provide gold_standard_size, the number of gold standards.")
return handle_invalid_usage(e)
try:
i = int(gold_standard_size)
except ValueError as ex:
e = InvalidUsage("gold_standard_size must be an integer.")
return handle_invalid_usage(e)
except Exception as ex:
e = InvalidUsage("gold_standard_size must be an integer.")
return handle_invalid_usage(e)
if user_token is None:
e = InvalidUsage("Please provide user_token.")
return handle_invalid_usage(e)
try:
user_json = decode_jwt(user_token, config.JWT_PRIVATE_KEY)
except jwt.InvalidSignatureError as ex:
e = InvalidUsage(ex.args[0], status_code=401)
return (handle_invalid_usage(e), None)
except Exception as ex:
e = InvalidUsage(ex.args[0], status_code=401)
return (handle_invalid_usage(e), None)
user_id = user_json["user_id"]
if user_id is None:
e = InvalidUsage("Cannot find user_id")
return handle_invalid_usage(e)
return try_get_locations(user_id, int(size), int(gold_standard_size))
@try_wrap_response
def try_get_locations(user_id, size, gold_standard_size):
try:
data = get_locations(user_id, size, gold_standard_size)
except Exception as errmsg:
e = InvalidUsage(repr(errmsg), status_code=400)
return handle_invalid_usage(e)
return jsonify({"data": locations_schema.dump(data)}) | back-end/www/controllers/location_controller.py |
from flask import Blueprint
from flask import request
from flask import jsonify
import jwt
from util.util import decode_jwt
from config.config import config
from util.util import InvalidUsage
from util.util import handle_invalid_usage
from util.util import try_wrap_response
from config.config import config
from models.model_operations.location_operations import get_locations
from models.schema import locations_schema
bp = Blueprint("location_controller", __name__)
@bp.route("", methods=["GET"])
def location():
"""
The function for the front-end to retrieve random location data by specifying amount.
Sample command to test:
$ curl -H "Content-Type: application/json" -X GET http://localhost:5000/location?size=5\&gold_standard_size=1\&user_token=xxxx
$ https://localhost:5000/location?&size=5&gold_standard_size=1?user_token=xxxxx
Parameters
----------
user_token : str
The encoded user JWT, issued by the back-end.
(required)
size : int
Total number of locations to be returned.
(required)
gold_standard_size : int
The number of locations that should include gold standard answers.
There should be ("size" - "gold_standard_size") locations that are not labeled yet.
(required)
Returns
-------
The encoded JWT that stores location information:
id : int
ID of the location.
factory_id : string
The uuid imported from disfactory factory table.
"""
size = request.args.get("size")
gold_standard_size = request.args.get("gold_standard_size")
user_token = request.args.get("user_token")
if size is None:
e = InvalidUsage("Please provide size, the number of locations you want to get.")
return handle_invalid_usage(e)
try:
i = int(size)
except ValueError as ex:
e = InvalidUsage("size must be an integer.")
return handle_invalid_usage(e)
except Exception as ex:
e = InvalidUsage("size must be an integer.")
return handle_invalid_usage(e)
if int(size) < 2:
e = InvalidUsage("The size must be greater or equal to 2.")
return handle_invalid_usage(e)
if gold_standard_size is None:
e = InvalidUsage("Please provide gold_standard_size, the number of gold standards.")
return handle_invalid_usage(e)
try:
i = int(gold_standard_size)
except ValueError as ex:
e = InvalidUsage("gold_standard_size must be an integer.")
return handle_invalid_usage(e)
except Exception as ex:
e = InvalidUsage("gold_standard_size must be an integer.")
return handle_invalid_usage(e)
if user_token is None:
e = InvalidUsage("Please provide user_token.")
return handle_invalid_usage(e)
try:
user_json = decode_jwt(user_token, config.JWT_PRIVATE_KEY)
except jwt.InvalidSignatureError as ex:
e = InvalidUsage(ex.args[0], status_code=401)
return (handle_invalid_usage(e), None)
except Exception as ex:
e = InvalidUsage(ex.args[0], status_code=401)
return (handle_invalid_usage(e), None)
user_id = user_json["user_id"]
if user_id is None:
e = InvalidUsage("Cannot find user_id")
return handle_invalid_usage(e)
return try_get_locations(user_id, int(size), int(gold_standard_size))
@try_wrap_response
def try_get_locations(user_id, size, gold_standard_size):
try:
data = get_locations(user_id, size, gold_standard_size)
except Exception as errmsg:
e = InvalidUsage(repr(errmsg), status_code=400)
return handle_invalid_usage(e)
return jsonify({"data": locations_schema.dump(data)}) | 0.702734 | 0.179746 |
from checker import Checker
from output import output
class AnalyzeRoot(Checker):
"""provides checker for analyze root structure
of the project. It can be check licence, contributing, make file, etc
"""
def __init__(self, content, url=None, provider=None):
super().__init__(provider, url=url)
self._provider = provider
self._content = content
self._provider = provider
def name(self):
return 'root'
def check(self, repo, **kwargs):
config = kwargs.get('config')
checkers = self._make_checkers(names=self._get_checkers_names_from_cfg(config))
self.score.add_total_checks(len(checkers))
return sum(checkers)
def _default_checkers(self):
return [
self._check_license(),
self._check_gitignore()
]
def start_message(self):
"""return message before start of checkers
"""
print('\n>Checking of the root structure\n')
def _check_license(self):
""" returns 1 if repo contance license
"""
msg = 'Checking of the license'
result = 0 if not self._get_license() else 1
output(msg, result, details='LICENCE file is not found')
self.score.add_check(msg, result)
return result
def _check_dockerfile(self):
'''provides checking of the Dockerfile
'''
msg = 'Checking of the Dockerfile at the root'
result = 0 if not self._get_dockerfile() else 1
output(msg, result, details='Dockerfile is not found')
self.score.add_check(msg, result)
return result
def _check_gitignore(self):
'''
checking if gitignore file is exist on the repo
'''
msg = 'Checking of the .gitignore at the root'
result = 0 if not self._get_gitignore() else 1
output(msg, result)
self.score.add_check(msg, result)
return result
def _get_dockerfile(self):
'''trying to get Dockerfile
'''
return self._get_file('Dockerfile', self._url)
def _get_gitignore(self):
'''trying to get .gitignore
'''
return self._get_file('.gitignore', self._url)
def _get_license(self):
'''trying to get license from repo
at this moment quick and dirty check
of the license contains
'''
license = self._get_file('LICENSE', self._url)
return True if b'License' in license and b'Copyright' in license else False
def _get_file(self, name, url):
"""trying to get file
"""
try:
return self._provider.get_content_file(
url, name)
except Exception:
return None | src/jams/analyzers/root.py | from checker import Checker
from output import output
class AnalyzeRoot(Checker):
"""provides checker for analyze root structure
of the project. It can be check licence, contributing, make file, etc
"""
def __init__(self, content, url=None, provider=None):
super().__init__(provider, url=url)
self._provider = provider
self._content = content
self._provider = provider
def name(self):
return 'root'
def check(self, repo, **kwargs):
config = kwargs.get('config')
checkers = self._make_checkers(names=self._get_checkers_names_from_cfg(config))
self.score.add_total_checks(len(checkers))
return sum(checkers)
def _default_checkers(self):
return [
self._check_license(),
self._check_gitignore()
]
def start_message(self):
"""return message before start of checkers
"""
print('\n>Checking of the root structure\n')
def _check_license(self):
""" returns 1 if repo contance license
"""
msg = 'Checking of the license'
result = 0 if not self._get_license() else 1
output(msg, result, details='LICENCE file is not found')
self.score.add_check(msg, result)
return result
def _check_dockerfile(self):
'''provides checking of the Dockerfile
'''
msg = 'Checking of the Dockerfile at the root'
result = 0 if not self._get_dockerfile() else 1
output(msg, result, details='Dockerfile is not found')
self.score.add_check(msg, result)
return result
def _check_gitignore(self):
'''
checking if gitignore file is exist on the repo
'''
msg = 'Checking of the .gitignore at the root'
result = 0 if not self._get_gitignore() else 1
output(msg, result)
self.score.add_check(msg, result)
return result
def _get_dockerfile(self):
'''trying to get Dockerfile
'''
return self._get_file('Dockerfile', self._url)
def _get_gitignore(self):
'''trying to get .gitignore
'''
return self._get_file('.gitignore', self._url)
def _get_license(self):
'''trying to get license from repo
at this moment quick and dirty check
of the license contains
'''
license = self._get_file('LICENSE', self._url)
return True if b'License' in license and b'Copyright' in license else False
def _get_file(self, name, url):
"""trying to get file
"""
try:
return self._provider.get_content_file(
url, name)
except Exception:
return None | 0.576065 | 0.161949 |
from pathlib import Path
import numpy as np
import torch
from torch.optim import SGD
from torch.optim.lr_scheduler import ReduceLROnPlateau, MultiStepLR
from torchvision.transforms import RandomHorizontalFlip, RandomVerticalFlip
from torchvision.transforms import RandomResizedCrop
from torchvision.transforms import ColorJitter, ToTensor, Normalize
from common.dataset import FilesFromCsvDataset
from common.data_loaders import get_data_loader
from common.sampling import get_weighted_train_sampler
from models.inceptionresnetv2 import FurnitureInceptionResNet299
SEED = 765
DEBUG = True
OUTPUT_PATH = "output"
DATASET_PATH = Path("/home/fast_storage/imaterialist-challenge-furniture-2018/")
size = 350
TRAIN_TRANSFORMS = [
RandomResizedCrop(350, scale=(0.6, 1.0), interpolation=3),
RandomVerticalFlip(p=0.5),
RandomHorizontalFlip(p=0.5),
ColorJitter(hue=0.12, brightness=0.12),
ToTensor(),
Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
]
VAL_TRANSFORMS = [
RandomResizedCrop(350, scale=(0.7, 1.0), interpolation=3),
RandomHorizontalFlip(p=0.5),
ToTensor(),
Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
]
BATCH_SIZE = 24
NUM_WORKERS = 15
lowest_recall_classes_weight = np.array([
(3, 7.0), (14, 12.0), (18, 5.0), (26, 5.0), (38, 5.0), (49, 5.0), (62, 10.0), (65, 10.0), (104, 5.0), (123, 5.0)
])
dataset = FilesFromCsvDataset("output/filtered_train_dataset.csv")
train_sampler = get_weighted_train_sampler(dataset, lowest_recall_classes_weight, n_samples=len(dataset))
TRAIN_LOADER = get_data_loader(dataset,
data_transform=TRAIN_TRANSFORMS,
sampler=train_sampler,
batch_size=BATCH_SIZE,
num_workers=NUM_WORKERS,
pin_memory=True)
val_dataset = FilesFromCsvDataset("output/filtered_val_dataset.csv")
VAL_LOADER = get_data_loader(val_dataset,
data_transform=VAL_TRANSFORMS,
batch_size=BATCH_SIZE,
num_workers=NUM_WORKERS,
pin_memory=True)
MODEL = FurnitureInceptionResNet299(pretrained='imagenet')
N_EPOCHS = 100
OPTIM = SGD(
params=[
{"params": MODEL.stem.parameters(), 'lr': 0.000095},
{"params": MODEL.features.parameters(), 'lr': 0.00054},
{"params": MODEL.classifier.parameters(),
"lr": 0.095,
"weight_decay": 0.0001},
],
momentum=0.8,
)
LR_SCHEDULERS = [
MultiStepLR(OPTIM, milestones=[4, 5, 6, 7, 8, 9, 10, 12], gamma=0.56)
]
# REDUCE_LR_ON_PLATEAU = ReduceLROnPlateau(OPTIM, mode='min', factor=0.5, patience=3, threshold=0.08, verbose=True)
EARLY_STOPPING_KWARGS = {
'patience': 15,
# 'score_function': None
}
LOG_INTERVAL = 100 | classification/imaterialist_challenge_furniture_2018/configs/train/train_inceptionresnetv2_350_weighted_sampler2_resized_crop.py | from pathlib import Path
import numpy as np
import torch
from torch.optim import SGD
from torch.optim.lr_scheduler import ReduceLROnPlateau, MultiStepLR
from torchvision.transforms import RandomHorizontalFlip, RandomVerticalFlip
from torchvision.transforms import RandomResizedCrop
from torchvision.transforms import ColorJitter, ToTensor, Normalize
from common.dataset import FilesFromCsvDataset
from common.data_loaders import get_data_loader
from common.sampling import get_weighted_train_sampler
from models.inceptionresnetv2 import FurnitureInceptionResNet299
SEED = 765
DEBUG = True
OUTPUT_PATH = "output"
DATASET_PATH = Path("/home/fast_storage/imaterialist-challenge-furniture-2018/")
size = 350
TRAIN_TRANSFORMS = [
RandomResizedCrop(350, scale=(0.6, 1.0), interpolation=3),
RandomVerticalFlip(p=0.5),
RandomHorizontalFlip(p=0.5),
ColorJitter(hue=0.12, brightness=0.12),
ToTensor(),
Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
]
VAL_TRANSFORMS = [
RandomResizedCrop(350, scale=(0.7, 1.0), interpolation=3),
RandomHorizontalFlip(p=0.5),
ToTensor(),
Normalize(mean=[0.5, 0.5, 0.5], std=[0.5, 0.5, 0.5])
]
BATCH_SIZE = 24
NUM_WORKERS = 15
lowest_recall_classes_weight = np.array([
(3, 7.0), (14, 12.0), (18, 5.0), (26, 5.0), (38, 5.0), (49, 5.0), (62, 10.0), (65, 10.0), (104, 5.0), (123, 5.0)
])
dataset = FilesFromCsvDataset("output/filtered_train_dataset.csv")
train_sampler = get_weighted_train_sampler(dataset, lowest_recall_classes_weight, n_samples=len(dataset))
TRAIN_LOADER = get_data_loader(dataset,
data_transform=TRAIN_TRANSFORMS,
sampler=train_sampler,
batch_size=BATCH_SIZE,
num_workers=NUM_WORKERS,
pin_memory=True)
val_dataset = FilesFromCsvDataset("output/filtered_val_dataset.csv")
VAL_LOADER = get_data_loader(val_dataset,
data_transform=VAL_TRANSFORMS,
batch_size=BATCH_SIZE,
num_workers=NUM_WORKERS,
pin_memory=True)
MODEL = FurnitureInceptionResNet299(pretrained='imagenet')
N_EPOCHS = 100
OPTIM = SGD(
params=[
{"params": MODEL.stem.parameters(), 'lr': 0.000095},
{"params": MODEL.features.parameters(), 'lr': 0.00054},
{"params": MODEL.classifier.parameters(),
"lr": 0.095,
"weight_decay": 0.0001},
],
momentum=0.8,
)
LR_SCHEDULERS = [
MultiStepLR(OPTIM, milestones=[4, 5, 6, 7, 8, 9, 10, 12], gamma=0.56)
]
# REDUCE_LR_ON_PLATEAU = ReduceLROnPlateau(OPTIM, mode='min', factor=0.5, patience=3, threshold=0.08, verbose=True)
EARLY_STOPPING_KWARGS = {
'patience': 15,
# 'score_function': None
}
LOG_INTERVAL = 100 | 0.781706 | 0.443841 |
from datetime import time
import time as t
from heatmiserV3.devices import Master, Device
from heatmiserV3.config import Config
import logging
import logging.config
from heatmiserV3.protocol_manager import ProtocolManager
def main():
log_config = Config.LOG_CONFIG
logging.config.dictConfig(log_config)
logger = logging.getLogger(__name__)
master = Master(Config.MASTER_IRQ_ADDRESS)
location = Config.MASTER_LOCATION['location']
if Config.MASTER_LOCATION['type'].casefold() == 'ip'.casefold():
master.connect_ip(location)
elif Config.MASTER_LOCATION['type'].casefold() == 'device'.casefold():
master.connect_device(location)
else:
raise ValueError("Unrecognized value for Config.MASTER_LOCATION.type, try ip or device",
Config.MASTER_LOCATION[
'type'])
tm1 = Device("tm1", "Boat Timer", 0)
protocol = ProtocolManager().get_protocol("tm1")
# # sync time always
# logger.info("Syncing time")
# dow_time = ProtocolManager.get_dow_time()
# response = master.update_field(tm1, "Current time", dow_time)
# logger.info("Time synced, response={}".format(ProtocolManager().to_hex_str(response)))
# t.sleep(1)
# logger.info("Updating weekday schedule")
# timer_block = ProtocolManager().get_timer_block(
# [[time(hour=5, minute=8), time(hour=11, minute=17)], [time(hour=19), time(hour=21, minute=9)]])
# response = master.update_field(tm1, "Weekday", timer_block)
# logger.info("Updated weekday schedule, response={}".format(ProtocolManager().to_hex_str(response)))
# #
t.sleep(1)
# master.update_field(tm1, "On/Off", 1)
#master.update_field(tm1, "Current timer state", 2) #1=on 2=off
#
response = master.send_request_all(tm1)
parsed_response = protocol.parse_response(response)
print("parsed response:")
for k, v in sorted(parsed_response.items()):
print(k, v)
master.close_connection()
if __name__ == '__main__':
main() | main.py | from datetime import time
import time as t
from heatmiserV3.devices import Master, Device
from heatmiserV3.config import Config
import logging
import logging.config
from heatmiserV3.protocol_manager import ProtocolManager
def main():
log_config = Config.LOG_CONFIG
logging.config.dictConfig(log_config)
logger = logging.getLogger(__name__)
master = Master(Config.MASTER_IRQ_ADDRESS)
location = Config.MASTER_LOCATION['location']
if Config.MASTER_LOCATION['type'].casefold() == 'ip'.casefold():
master.connect_ip(location)
elif Config.MASTER_LOCATION['type'].casefold() == 'device'.casefold():
master.connect_device(location)
else:
raise ValueError("Unrecognized value for Config.MASTER_LOCATION.type, try ip or device",
Config.MASTER_LOCATION[
'type'])
tm1 = Device("tm1", "Boat Timer", 0)
protocol = ProtocolManager().get_protocol("tm1")
# # sync time always
# logger.info("Syncing time")
# dow_time = ProtocolManager.get_dow_time()
# response = master.update_field(tm1, "Current time", dow_time)
# logger.info("Time synced, response={}".format(ProtocolManager().to_hex_str(response)))
# t.sleep(1)
# logger.info("Updating weekday schedule")
# timer_block = ProtocolManager().get_timer_block(
# [[time(hour=5, minute=8), time(hour=11, minute=17)], [time(hour=19), time(hour=21, minute=9)]])
# response = master.update_field(tm1, "Weekday", timer_block)
# logger.info("Updated weekday schedule, response={}".format(ProtocolManager().to_hex_str(response)))
# #
t.sleep(1)
# master.update_field(tm1, "On/Off", 1)
#master.update_field(tm1, "Current timer state", 2) #1=on 2=off
#
response = master.send_request_all(tm1)
parsed_response = protocol.parse_response(response)
print("parsed response:")
for k, v in sorted(parsed_response.items()):
print(k, v)
master.close_connection()
if __name__ == '__main__':
main() | 0.273089 | 0.070945 |
import time
from picamera import PiCamera
from threading import Thread
from uploader import upload_file, has_network_connection
class ConfigurableCamera(PiCamera):
def __init__(self, job):
super(ConfigurableCamera, self).__init__()
self.__job = job
self.__configure()
self.__test_network()
def __set_resolution(self):
try:
resolution_x = self.__job.settings.resolution_x
resolution_y = self.__job.settings.resolution_y
except AttributeError:
pass
else:
self.resolution = (resolution_x, resolution_y)
def __set_ISO(self):
try:
self.ISO = self.__job.settings.ISO
except AttributeError:
pass
def __set_brightness(self):
try:
self.brightness = self.__job.brightness
except AttributeError:
pass
def __set_contrast(self):
try:
self.contrast = self.__job.settings.contrast
except AttributeError:
pass
def __set_exposure_compensation(self):
try:
self.exposure_compensation = self.__job.settings.exposure_compensation
except AttributeError:
pass
def __set_exposure_mode(self):
try:
self.exposure_mode = self.__job.settings.exposure_mode
except AttributeError:
pass
def __set_rotation(self):
try:
self.rotation = self.__job.settings.rotation
except AttributeError:
pass
def __set_saturation(self):
try:
self.saturation = self.__job.settings.saturation
except AttributeError:
pass
def __set_sharpness(self):
try:
self.sharpness = self.__job.settings.sharpness
except AttributeError:
pass
def __set_shutter_speed(self):
try:
self.shutter_speed = self.__job.settings.shutter_speed
except AttributeError:
pass
def __set_awb_mode(self):
try:
self.awb_mode = self.__job.settings.awb_mode
except AttributeError:
pass
def __set_image_effect(self):
try:
self.image_effect = self.__job.settings.image_effect
except AttributeError:
pass
def __set_auto_upload(self):
try:
if self.__job.settings.auto_upload == "Off":
self.auto_upload = False
except AttributeError:
self.auto_upload = True
def __configure(self):
self.__set_resolution()
self.__set_ISO()
self.__set_brightness()
self.__set_contrast()
self.__set_exposure_compensation()
self.__set_exposure_mode()
self.__set_rotation()
self.__set_saturation()
self.__set_sharpness()
self.__set_shutter_speed()
self.__set_awb_mode()
self.__set_image_effect()
self.__set_auto_upload()
time.sleep(2)
def __test_network(self):
self.has_network_connection = has_network_connection()
def time_lapse(self, output_file):
quality = self.__job.settings.quality
interval = self.__job.settings.snap_interval
total = self.__job.settings.snap_total
capture = self.capture_continuous(output_file, quality=quality)
for i, file_name in enumerate(capture):
if self.auto_upload and self.has_network_connection:
Thread(target=upload_file, args=(file_name,)).start()
if i == total - 1:
self.__job.archive()
break
time.sleep(interval) | camera/camera.py | import time
from picamera import PiCamera
from threading import Thread
from uploader import upload_file, has_network_connection
class ConfigurableCamera(PiCamera):
def __init__(self, job):
super(ConfigurableCamera, self).__init__()
self.__job = job
self.__configure()
self.__test_network()
def __set_resolution(self):
try:
resolution_x = self.__job.settings.resolution_x
resolution_y = self.__job.settings.resolution_y
except AttributeError:
pass
else:
self.resolution = (resolution_x, resolution_y)
def __set_ISO(self):
try:
self.ISO = self.__job.settings.ISO
except AttributeError:
pass
def __set_brightness(self):
try:
self.brightness = self.__job.brightness
except AttributeError:
pass
def __set_contrast(self):
try:
self.contrast = self.__job.settings.contrast
except AttributeError:
pass
def __set_exposure_compensation(self):
try:
self.exposure_compensation = self.__job.settings.exposure_compensation
except AttributeError:
pass
def __set_exposure_mode(self):
try:
self.exposure_mode = self.__job.settings.exposure_mode
except AttributeError:
pass
def __set_rotation(self):
try:
self.rotation = self.__job.settings.rotation
except AttributeError:
pass
def __set_saturation(self):
try:
self.saturation = self.__job.settings.saturation
except AttributeError:
pass
def __set_sharpness(self):
try:
self.sharpness = self.__job.settings.sharpness
except AttributeError:
pass
def __set_shutter_speed(self):
try:
self.shutter_speed = self.__job.settings.shutter_speed
except AttributeError:
pass
def __set_awb_mode(self):
try:
self.awb_mode = self.__job.settings.awb_mode
except AttributeError:
pass
def __set_image_effect(self):
try:
self.image_effect = self.__job.settings.image_effect
except AttributeError:
pass
def __set_auto_upload(self):
try:
if self.__job.settings.auto_upload == "Off":
self.auto_upload = False
except AttributeError:
self.auto_upload = True
def __configure(self):
self.__set_resolution()
self.__set_ISO()
self.__set_brightness()
self.__set_contrast()
self.__set_exposure_compensation()
self.__set_exposure_mode()
self.__set_rotation()
self.__set_saturation()
self.__set_sharpness()
self.__set_shutter_speed()
self.__set_awb_mode()
self.__set_image_effect()
self.__set_auto_upload()
time.sleep(2)
def __test_network(self):
self.has_network_connection = has_network_connection()
def time_lapse(self, output_file):
quality = self.__job.settings.quality
interval = self.__job.settings.snap_interval
total = self.__job.settings.snap_total
capture = self.capture_continuous(output_file, quality=quality)
for i, file_name in enumerate(capture):
if self.auto_upload and self.has_network_connection:
Thread(target=upload_file, args=(file_name,)).start()
if i == total - 1:
self.__job.archive()
break
time.sleep(interval) | 0.460532 | 0.125493 |
import requests
import re
import urllib.parse as parse
from tqdm import tqdm
from bs4 import BeautifulSoup
class Show:
def __init__(self, user_show):
self.base_url_eztv = "https://eztv.re/search/"
self.base_url_pb = "https://thepiratebay.asia/s/?q="
if not user_show:
user_show = "game of thrones"
self.full_url_eztv = self.base_url_eztv + user_show
self.full_url_pb = self.base_url_pb + parse.quote_plus(user_show)
self.html_eztv = self._fetch_html_eztv(self.full_url_eztv)
self.html_pb = ""
print("Fetching search results from PirateBay...")
progress_bar = tqdm(total=100)
self._fetch_html_pb(self.full_url_pb, progress_bar)
self.soup_eztv = BeautifulSoup(self.html_eztv, "html.parser")
self.soup_pb = BeautifulSoup(self.html_pb, "html.parser")
self.episodes = []
self.rated_episodes = []
self.sorted_episodes = []
self.selected_episodes = []
self._parse_episodes_pb()
self._parse_episodes_eztv()
self._rate_episodes()
self._sort_episodes()
self._select_episodes()
self._print_info()
def _print_info(self):
print(
f"Found {len(self.selected_episodes)} episodes from {int(self.selected_episodes[-1]['season'])} seasons."
)
def _select_episodes(self):
buffer = []
prev_sortkey = None
for episode in self.sorted_episodes:
if episode["sortkey"] != prev_sortkey:
self._process_buffer(buffer)
buffer = []
buffer.append(episode)
prev_sortkey = episode["sortkey"]
continue
buffer.append(episode)
prev_sortkey = episode["sortkey"]
def _process_buffer(self, buffer):
if not buffer:
return
max_rating_episode = max(buffer, key=lambda x: x["rating"])
self.selected_episodes.append(max_rating_episode)
def _parse_episodes_eztv(self):
for episode in tqdm(
self.soup_eztv.find_all("tr", class_="forum_header_border")
):
try:
self._make_episodes_eztv(episode)
except Exception as e:
print(e)
def _parse_episodes_pb(self):
for episode in tqdm(self.soup_pb.find_all("tr")):
try:
self._make_episodes_pb(episode)
except Exception as e:
print(e)
def _rate_episodes(self):
for episode in self.episodes:
# Disqualify
if 1600 < episode["size"] < 200:
continue
if episode["quality"] and episode["quality"] < 360:
continue
if episode["seeds"] and episode["seeds"] < 1:
continue
if not episode["magnet"]:
continue
# Low rating
episode["rating"] = 0
# High rating
if 1200 > episode["size"] > 400:
episode["rating"] = 1
# Really high rating
if episode["quality"]:
if episode["quality"] > 720:
episode["rating"] = 2
self.rated_episodes.append(episode)
def _sort_episodes(self):
self.sorted_episodes = sorted(self.rated_episodes, key=lambda x: x["sortkey"])
def _make_episodes_eztv(self, episode):
title = str(episode.find("a", class_="epinfo").get_text()).strip()
season, ep_number = self._get_season_and_episode(title)
sortkey = season + ep_number
quality = self._get_quality(title)
cells = episode.find_all("td")
size = self._get_size(cells)
age = str(cells[4].contents[0])
seeds = self._get_seeds(cells)
magnet = str(episode.find("a", class_="magnet")["href"])
episode_dict = {
"title": title,
"season": season,
"episode": ep_number,
"sortkey": sortkey,
"quality": quality,
"size": size,
"age": age,
"seeds": seeds,
"magnet": magnet,
}
self.episodes.append(episode_dict)
def _make_episodes_pb(self, episode):
data_type = str(
episode.find("a", {"title": "More from this category"}).get_text()
).strip()
if data_type != "Video":
return
title = str(episode.find("a", class_="detLink").get_text()).strip()
season, ep_number = self._get_season_and_episode(title)
sortkey = season + ep_number
quality = self._get_quality(title)
magnet = str(
episode.find("a", {"title": "Download this torrent using magnet"})["href"]
)
size = self._get_size_pb(episode)
age = 0
seeds, leechers = self._get_seeds_and_leech_pb(episode)
episode_dict = {
"title": title,
"season": season,
"episode": ep_number,
"sortkey": sortkey,
"quality": quality,
"size": size,
"age": age,
"seeds": seeds,
"leechers": leechers,
"magnet": magnet,
}
self.episodes.append(episode_dict)
def _get_seeds_and_leech_pb(self, episode):
tds = episode.find_all("td", {"align": "right"})
seeds = int(str(tds[-2].get_text()).strip())
leechers = int(str(tds[-1].get_text()).strip())
return seeds, leechers
def _get_size_pb(self, episode):
size_text = str(episode.find("font", class_="detDesc").get_text())
size_match = re.search(r"Size (\d+\.\d+).+([MiBG]{3})", size_text)
if size_match:
size = float(size_match[1])
if size_match[2] == "GiB":
size = size * 1000
return int(size)
def _get_quality(self, title):
quality = re.search(r" (\d{3,4})p ", title)
if quality:
quality = int(quality[1])
return quality
def _get_season_and_episode(self, title):
title_match = re.search(r"[Ss](\d\d)[Ee](\d\d)", title)
if not title_match:
title_match = re.search(r" (\d+)x(\d+) ", title)
if not title_match:
raise Exception("Could not get Season / Episode.")
if title_match:
season = "0" + title_match[1] if len(title_match[1]) < 2 else title_match[1]
ep_number = (
"0" + title_match[2] if len(title_match[2]) < 2 else title_match[2]
)
return season, ep_number
def _get_seeds(self, cells):
seeds = str(cells[5].contents[0].get_text())
if seeds.isnumeric():
seeds = int(seeds)
else:
raise Exception("No Seeds.")
return seeds
def _get_size(self, cells):
if not cells[3].contents:
raise Exception("Could not get Size.")
else:
size_text = str(cells[3].contents[0])
size = float(re.search(r"^(\d+\.\d+)", size_text)[1])
if "GB" in size_text:
size = size * 1000
size = int(size)
return size
def _fetch_html_eztv(self, full_url):
print("Fetching search results from eztv...")
html_doc = requests.get(full_url).text
print("Done fetching...")
return html_doc
def _fetch_html_pb(self, full_url, progress_bar):
progress_bar.update(1)
html_doc = requests.get(full_url).text
self.html_pb += html_doc
soup_pb = BeautifulSoup(html_doc, "html.parser")
next_image = soup_pb.find("img", {"alt": "Next"})
if not next_image:
return
next_search = next_image.find_parent("a")["href"].split("/search/")[1]
next_page = self.base_url_pb + next_search
if next_page:
self._fetch_html_pb(next_page, progress_bar) | src/show.py | import requests
import re
import urllib.parse as parse
from tqdm import tqdm
from bs4 import BeautifulSoup
class Show:
def __init__(self, user_show):
self.base_url_eztv = "https://eztv.re/search/"
self.base_url_pb = "https://thepiratebay.asia/s/?q="
if not user_show:
user_show = "game of thrones"
self.full_url_eztv = self.base_url_eztv + user_show
self.full_url_pb = self.base_url_pb + parse.quote_plus(user_show)
self.html_eztv = self._fetch_html_eztv(self.full_url_eztv)
self.html_pb = ""
print("Fetching search results from PirateBay...")
progress_bar = tqdm(total=100)
self._fetch_html_pb(self.full_url_pb, progress_bar)
self.soup_eztv = BeautifulSoup(self.html_eztv, "html.parser")
self.soup_pb = BeautifulSoup(self.html_pb, "html.parser")
self.episodes = []
self.rated_episodes = []
self.sorted_episodes = []
self.selected_episodes = []
self._parse_episodes_pb()
self._parse_episodes_eztv()
self._rate_episodes()
self._sort_episodes()
self._select_episodes()
self._print_info()
def _print_info(self):
print(
f"Found {len(self.selected_episodes)} episodes from {int(self.selected_episodes[-1]['season'])} seasons."
)
def _select_episodes(self):
buffer = []
prev_sortkey = None
for episode in self.sorted_episodes:
if episode["sortkey"] != prev_sortkey:
self._process_buffer(buffer)
buffer = []
buffer.append(episode)
prev_sortkey = episode["sortkey"]
continue
buffer.append(episode)
prev_sortkey = episode["sortkey"]
def _process_buffer(self, buffer):
if not buffer:
return
max_rating_episode = max(buffer, key=lambda x: x["rating"])
self.selected_episodes.append(max_rating_episode)
def _parse_episodes_eztv(self):
for episode in tqdm(
self.soup_eztv.find_all("tr", class_="forum_header_border")
):
try:
self._make_episodes_eztv(episode)
except Exception as e:
print(e)
def _parse_episodes_pb(self):
for episode in tqdm(self.soup_pb.find_all("tr")):
try:
self._make_episodes_pb(episode)
except Exception as e:
print(e)
def _rate_episodes(self):
for episode in self.episodes:
# Disqualify
if 1600 < episode["size"] < 200:
continue
if episode["quality"] and episode["quality"] < 360:
continue
if episode["seeds"] and episode["seeds"] < 1:
continue
if not episode["magnet"]:
continue
# Low rating
episode["rating"] = 0
# High rating
if 1200 > episode["size"] > 400:
episode["rating"] = 1
# Really high rating
if episode["quality"]:
if episode["quality"] > 720:
episode["rating"] = 2
self.rated_episodes.append(episode)
def _sort_episodes(self):
self.sorted_episodes = sorted(self.rated_episodes, key=lambda x: x["sortkey"])
def _make_episodes_eztv(self, episode):
title = str(episode.find("a", class_="epinfo").get_text()).strip()
season, ep_number = self._get_season_and_episode(title)
sortkey = season + ep_number
quality = self._get_quality(title)
cells = episode.find_all("td")
size = self._get_size(cells)
age = str(cells[4].contents[0])
seeds = self._get_seeds(cells)
magnet = str(episode.find("a", class_="magnet")["href"])
episode_dict = {
"title": title,
"season": season,
"episode": ep_number,
"sortkey": sortkey,
"quality": quality,
"size": size,
"age": age,
"seeds": seeds,
"magnet": magnet,
}
self.episodes.append(episode_dict)
def _make_episodes_pb(self, episode):
data_type = str(
episode.find("a", {"title": "More from this category"}).get_text()
).strip()
if data_type != "Video":
return
title = str(episode.find("a", class_="detLink").get_text()).strip()
season, ep_number = self._get_season_and_episode(title)
sortkey = season + ep_number
quality = self._get_quality(title)
magnet = str(
episode.find("a", {"title": "Download this torrent using magnet"})["href"]
)
size = self._get_size_pb(episode)
age = 0
seeds, leechers = self._get_seeds_and_leech_pb(episode)
episode_dict = {
"title": title,
"season": season,
"episode": ep_number,
"sortkey": sortkey,
"quality": quality,
"size": size,
"age": age,
"seeds": seeds,
"leechers": leechers,
"magnet": magnet,
}
self.episodes.append(episode_dict)
def _get_seeds_and_leech_pb(self, episode):
tds = episode.find_all("td", {"align": "right"})
seeds = int(str(tds[-2].get_text()).strip())
leechers = int(str(tds[-1].get_text()).strip())
return seeds, leechers
def _get_size_pb(self, episode):
size_text = str(episode.find("font", class_="detDesc").get_text())
size_match = re.search(r"Size (\d+\.\d+).+([MiBG]{3})", size_text)
if size_match:
size = float(size_match[1])
if size_match[2] == "GiB":
size = size * 1000
return int(size)
def _get_quality(self, title):
quality = re.search(r" (\d{3,4})p ", title)
if quality:
quality = int(quality[1])
return quality
def _get_season_and_episode(self, title):
title_match = re.search(r"[Ss](\d\d)[Ee](\d\d)", title)
if not title_match:
title_match = re.search(r" (\d+)x(\d+) ", title)
if not title_match:
raise Exception("Could not get Season / Episode.")
if title_match:
season = "0" + title_match[1] if len(title_match[1]) < 2 else title_match[1]
ep_number = (
"0" + title_match[2] if len(title_match[2]) < 2 else title_match[2]
)
return season, ep_number
def _get_seeds(self, cells):
seeds = str(cells[5].contents[0].get_text())
if seeds.isnumeric():
seeds = int(seeds)
else:
raise Exception("No Seeds.")
return seeds
def _get_size(self, cells):
if not cells[3].contents:
raise Exception("Could not get Size.")
else:
size_text = str(cells[3].contents[0])
size = float(re.search(r"^(\d+\.\d+)", size_text)[1])
if "GB" in size_text:
size = size * 1000
size = int(size)
return size
def _fetch_html_eztv(self, full_url):
print("Fetching search results from eztv...")
html_doc = requests.get(full_url).text
print("Done fetching...")
return html_doc
def _fetch_html_pb(self, full_url, progress_bar):
progress_bar.update(1)
html_doc = requests.get(full_url).text
self.html_pb += html_doc
soup_pb = BeautifulSoup(html_doc, "html.parser")
next_image = soup_pb.find("img", {"alt": "Next"})
if not next_image:
return
next_search = next_image.find_parent("a")["href"].split("/search/")[1]
next_page = self.base_url_pb + next_search
if next_page:
self._fetch_html_pb(next_page, progress_bar) | 0.287268 | 0.161023 |
import torch
import torch.nn as nn
from ding.torch_utils import MLP, get_lstm, Transformer
from ding.model import DiscreteHead
from ding.utils import list_split
class RelationGCN(nn.Module):
def __init__(
self,
hidden_shape: int,
activation=nn.ReLU(inplace=True),
) -> None:
super(RelationGCN, self).__init__()
# activation
self.act = activation
# layers
self.thorn_relation_layers = MLP(
hidden_shape, hidden_shape, hidden_shape, layer_num=1, activation=activation
)
self.clone_relation_layers = MLP(
hidden_shape, hidden_shape, hidden_shape, layer_num=1, activation=activation
)
self.agg_relation_layers = MLP(
4 * hidden_shape, hidden_shape, hidden_shape, layer_num=1, activation=activation
)
def forward(self, food_relation, thorn_relation, clone, clone_relation, thorn_mask, clone_mask):
b, t, c = clone.shape[0], thorn_relation.shape[2], clone.shape[1]
# encode thorn relation
thorn_relation = self.thorn_relation_layers(thorn_relation) * thorn_mask.view(b, 1, t, 1) # [b,n_clone,n_thorn,c]
thorn_relation = thorn_relation.max(2).values # [b,n_clone,c]
# encode clone relation
clone_relation = self.clone_relation_layers(clone_relation) * clone_mask.view(b, 1, c, 1) # [b,n_clone,n_clone,c]
clone_relation = clone_relation.max(2).values # [b,n_clone,c]
# encode aggregated relation
agg_relation = torch.cat([clone, food_relation, thorn_relation, clone_relation], dim=2)
clone = self.agg_relation_layers(agg_relation)
return clone
class Encoder(nn.Module):
def __init__(
self,
scalar_shape: int,
food_shape: int,
food_relation_shape: int,
thorn_relation_shape: int,
clone_shape: int,
clone_relation_shape: int,
hidden_shape: int,
encode_shape: int,
activation=nn.ReLU(inplace=True),
) -> None:
super(Encoder, self).__init__()
# scalar encoder
self.scalar_encoder = MLP(
scalar_shape, hidden_shape // 4, hidden_shape, layer_num=2, activation=activation
)
# food encoder
layers = []
kernel_size = [5, 3, 1]
stride = [4, 2, 1]
shape = [hidden_shape // 4, hidden_shape // 2, hidden_shape]
input_shape = food_shape
for i in range(len(kernel_size)):
layers.append(nn.Conv2d(input_shape, shape[i], kernel_size[i], stride[i], kernel_size[i] // 2))
layers.append(activation)
input_shape = shape[i]
self.food_encoder = nn.Sequential(*layers)
# food relation encoder
self.food_relation_encoder = MLP(
food_relation_shape, hidden_shape // 2, hidden_shape, layer_num=2, activation=activation
)
# thorn relation encoder
self.thorn_relation_encoder = MLP(
thorn_relation_shape, hidden_shape // 4, hidden_shape, layer_num=2, activation=activation
)
# clone encoder
self.clone_encoder = MLP(
clone_shape, hidden_shape // 4, hidden_shape, layer_num=2, activation=activation
)
# clone relation encoder
self.clone_relation_encoder = MLP(
clone_relation_shape, hidden_shape // 4, hidden_shape, layer_num=2, activation=activation
)
# gcn
self.gcn = RelationGCN(
hidden_shape, activation=activation
)
self.agg_encoder = MLP(
3 * hidden_shape, hidden_shape, encode_shape, layer_num=2, activation=activation
)
def forward(self, scalar, food, food_relation, thorn_relation, thorn_mask, clone, clone_relation, clone_mask):
# encode scalar
scalar = self.scalar_encoder(scalar) # [b,c]
# encode food
food = self.food_encoder(food) # [b,c,h,w]
food = food.reshape(*food.shape[:2], -1).max(-1).values # [b,c]
# encode food relation
food_relation = self.food_relation_encoder(food_relation) # [b,c]
# encode thorn relation
thorn_relation = self.thorn_relation_encoder(thorn_relation) # [b,n_clone,n_thorn, c]
# encode clone
clone = self.clone_encoder(clone) # [b,n_clone,c]
# encode clone relation
clone_relation = self.clone_relation_encoder(clone_relation) # [b,n_clone,n_clone,c]
# aggregate all relation
clone = self.gcn(food_relation, thorn_relation, clone, clone_relation, thorn_mask, clone_mask)
clone = clone.max(1).values # [b,c]
return self.agg_encoder(torch.cat([scalar, food, clone], dim=1))
class GoBiggerHybridActionSimpleV3(nn.Module):
r"""
Overview:
The GoBiggerHybridAction model.
Interfaces:
``__init__``, ``forward``, ``compute_encoder``, ``compute_critic``
"""
def __init__(
self,
scalar_shape: int,
food_shape: int,
food_relation_shape: int,
thorn_relation_shape: int,
clone_shape: int,
clone_relation_shape: int,
hidden_shape: int,
encode_shape: int,
action_type_shape: int,
rnn: bool = False,
activation=nn.ReLU(inplace=True),
) -> None:
super(GoBiggerHybridActionSimpleV3, self).__init__()
self.activation = activation
self.action_type_shape = action_type_shape
# encoder
self.encoder = Encoder(scalar_shape, food_shape, food_relation_shape, thorn_relation_shape, clone_shape, clone_relation_shape, hidden_shape, encode_shape, activation)
# head
self.action_type_head = DiscreteHead(32, action_type_shape, layer_num=2, activation=self.activation)
def forward(self, inputs):
scalar = inputs['scalar']
food = inputs['food']
food_relation = inputs['food_relation']
thorn_relation = inputs['thorn_relation']
thorn_mask = inputs['thorn_mask']
clone = inputs['clone']
clone_relation = inputs['clone_relation']
clone_mask = inputs['clone_mask']
fused_embedding_total = self.encoder(scalar, food, food_relation, thorn_relation, thorn_mask, clone, clone_relation, clone_mask)
B = inputs['batch']
A = inputs['player_num_per_team']
action_type_logit = self.action_type_head(fused_embedding_total)['logit'] # B, M, action_type_size
action_type_logit = action_type_logit.reshape(B, A, *action_type_logit.shape[1:])
result = {
'logit': action_type_logit,
}
return result | my_submission/model/gobigger_structed_simple_model.py | import torch
import torch.nn as nn
from ding.torch_utils import MLP, get_lstm, Transformer
from ding.model import DiscreteHead
from ding.utils import list_split
class RelationGCN(nn.Module):
def __init__(
self,
hidden_shape: int,
activation=nn.ReLU(inplace=True),
) -> None:
super(RelationGCN, self).__init__()
# activation
self.act = activation
# layers
self.thorn_relation_layers = MLP(
hidden_shape, hidden_shape, hidden_shape, layer_num=1, activation=activation
)
self.clone_relation_layers = MLP(
hidden_shape, hidden_shape, hidden_shape, layer_num=1, activation=activation
)
self.agg_relation_layers = MLP(
4 * hidden_shape, hidden_shape, hidden_shape, layer_num=1, activation=activation
)
def forward(self, food_relation, thorn_relation, clone, clone_relation, thorn_mask, clone_mask):
b, t, c = clone.shape[0], thorn_relation.shape[2], clone.shape[1]
# encode thorn relation
thorn_relation = self.thorn_relation_layers(thorn_relation) * thorn_mask.view(b, 1, t, 1) # [b,n_clone,n_thorn,c]
thorn_relation = thorn_relation.max(2).values # [b,n_clone,c]
# encode clone relation
clone_relation = self.clone_relation_layers(clone_relation) * clone_mask.view(b, 1, c, 1) # [b,n_clone,n_clone,c]
clone_relation = clone_relation.max(2).values # [b,n_clone,c]
# encode aggregated relation
agg_relation = torch.cat([clone, food_relation, thorn_relation, clone_relation], dim=2)
clone = self.agg_relation_layers(agg_relation)
return clone
class Encoder(nn.Module):
def __init__(
self,
scalar_shape: int,
food_shape: int,
food_relation_shape: int,
thorn_relation_shape: int,
clone_shape: int,
clone_relation_shape: int,
hidden_shape: int,
encode_shape: int,
activation=nn.ReLU(inplace=True),
) -> None:
super(Encoder, self).__init__()
# scalar encoder
self.scalar_encoder = MLP(
scalar_shape, hidden_shape // 4, hidden_shape, layer_num=2, activation=activation
)
# food encoder
layers = []
kernel_size = [5, 3, 1]
stride = [4, 2, 1]
shape = [hidden_shape // 4, hidden_shape // 2, hidden_shape]
input_shape = food_shape
for i in range(len(kernel_size)):
layers.append(nn.Conv2d(input_shape, shape[i], kernel_size[i], stride[i], kernel_size[i] // 2))
layers.append(activation)
input_shape = shape[i]
self.food_encoder = nn.Sequential(*layers)
# food relation encoder
self.food_relation_encoder = MLP(
food_relation_shape, hidden_shape // 2, hidden_shape, layer_num=2, activation=activation
)
# thorn relation encoder
self.thorn_relation_encoder = MLP(
thorn_relation_shape, hidden_shape // 4, hidden_shape, layer_num=2, activation=activation
)
# clone encoder
self.clone_encoder = MLP(
clone_shape, hidden_shape // 4, hidden_shape, layer_num=2, activation=activation
)
# clone relation encoder
self.clone_relation_encoder = MLP(
clone_relation_shape, hidden_shape // 4, hidden_shape, layer_num=2, activation=activation
)
# gcn
self.gcn = RelationGCN(
hidden_shape, activation=activation
)
self.agg_encoder = MLP(
3 * hidden_shape, hidden_shape, encode_shape, layer_num=2, activation=activation
)
def forward(self, scalar, food, food_relation, thorn_relation, thorn_mask, clone, clone_relation, clone_mask):
# encode scalar
scalar = self.scalar_encoder(scalar) # [b,c]
# encode food
food = self.food_encoder(food) # [b,c,h,w]
food = food.reshape(*food.shape[:2], -1).max(-1).values # [b,c]
# encode food relation
food_relation = self.food_relation_encoder(food_relation) # [b,c]
# encode thorn relation
thorn_relation = self.thorn_relation_encoder(thorn_relation) # [b,n_clone,n_thorn, c]
# encode clone
clone = self.clone_encoder(clone) # [b,n_clone,c]
# encode clone relation
clone_relation = self.clone_relation_encoder(clone_relation) # [b,n_clone,n_clone,c]
# aggregate all relation
clone = self.gcn(food_relation, thorn_relation, clone, clone_relation, thorn_mask, clone_mask)
clone = clone.max(1).values # [b,c]
return self.agg_encoder(torch.cat([scalar, food, clone], dim=1))
class GoBiggerHybridActionSimpleV3(nn.Module):
r"""
Overview:
The GoBiggerHybridAction model.
Interfaces:
``__init__``, ``forward``, ``compute_encoder``, ``compute_critic``
"""
def __init__(
self,
scalar_shape: int,
food_shape: int,
food_relation_shape: int,
thorn_relation_shape: int,
clone_shape: int,
clone_relation_shape: int,
hidden_shape: int,
encode_shape: int,
action_type_shape: int,
rnn: bool = False,
activation=nn.ReLU(inplace=True),
) -> None:
super(GoBiggerHybridActionSimpleV3, self).__init__()
self.activation = activation
self.action_type_shape = action_type_shape
# encoder
self.encoder = Encoder(scalar_shape, food_shape, food_relation_shape, thorn_relation_shape, clone_shape, clone_relation_shape, hidden_shape, encode_shape, activation)
# head
self.action_type_head = DiscreteHead(32, action_type_shape, layer_num=2, activation=self.activation)
def forward(self, inputs):
scalar = inputs['scalar']
food = inputs['food']
food_relation = inputs['food_relation']
thorn_relation = inputs['thorn_relation']
thorn_mask = inputs['thorn_mask']
clone = inputs['clone']
clone_relation = inputs['clone_relation']
clone_mask = inputs['clone_mask']
fused_embedding_total = self.encoder(scalar, food, food_relation, thorn_relation, thorn_mask, clone, clone_relation, clone_mask)
B = inputs['batch']
A = inputs['player_num_per_team']
action_type_logit = self.action_type_head(fused_embedding_total)['logit'] # B, M, action_type_size
action_type_logit = action_type_logit.reshape(B, A, *action_type_logit.shape[1:])
result = {
'logit': action_type_logit,
}
return result | 0.941237 | 0.359758 |
import struct
import asyncio
class SubscriberProtocol(asyncio.Protocol):
hdrfmt = struct.Struct('>I')
def __init__(self, loop):
self.loop = loop
def connection_made(self, transport):
print('connection_made')
self.transport = transport
self.reset()
def connection_lost(self, exc):
print('connection_lost')
self.loop.stop()
def data_received(self, data):
self.accum_buffer.extend(data)
while len(self.accum_buffer) >= self.packet_size:
pkt = memoryview(self.accum_buffer)[:self.packet_size]
self.accum_buffer = self.accum_buffer[self.packet_size:]
if self.wait_hdr:
self.wait_hdr = False
self.packet_size = self.hdrfmt.unpack(pkt)[0]
else:
self.wait_hdr = True
self.packet_size = self.hdrfmt.size
self.handle_msg(pkt)
def reset(self):
self.wait_hdr = True
self.packet_size = self.hdrfmt.size
self.accum_buffer = bytearray()
def handle_msg(self, msg):
idx = struct.unpack('I', msg[:4])[0]
print('{0}: {1}'.format(idx, len(msg)))
@asyncio.coroutine
def reconnect(loop):
coro = loop.create_connection(lambda: SubscriberProtocol(loop), '127.0.0.1', 10000)
transport, protocol = yield from coro
def handle_msg(msg):
idx = struct.unpack('I', msg[:4])[0]
print('{0}: {1}'.format(idx, len(msg)))
@asyncio.coroutine
def subscribe_stuff():
reader, writer = yield from asyncio.open_connection('127.0.0.1', 10000)
hdrfmt = struct.Struct('>I')
while True:
try:
hdr = yield from reader.readexactly(hdrfmt.size)
packet_size = hdrfmt.unpack(hdr)[0]
payload = yield from reader.readexactly(packet_size)
except asyncio.IncompleteReadError as e:
print('partial read {} / {}'.format(len(e.partial), e.expected))
break
handle_msg(payload)
loop = asyncio.get_event_loop()
#coro = loop.create_connection(lambda: SubscriberProtocol(loop), '127.0.0.1', 10000)
#transport, protocol = loop.run_until_complete(coro)
asyncio.async(reconnect(loop))
loop.run_forever()
loop.close()
#loop.run_until_complete(subscribe_stuff()) | asyncio_subscriber.py | import struct
import asyncio
class SubscriberProtocol(asyncio.Protocol):
hdrfmt = struct.Struct('>I')
def __init__(self, loop):
self.loop = loop
def connection_made(self, transport):
print('connection_made')
self.transport = transport
self.reset()
def connection_lost(self, exc):
print('connection_lost')
self.loop.stop()
def data_received(self, data):
self.accum_buffer.extend(data)
while len(self.accum_buffer) >= self.packet_size:
pkt = memoryview(self.accum_buffer)[:self.packet_size]
self.accum_buffer = self.accum_buffer[self.packet_size:]
if self.wait_hdr:
self.wait_hdr = False
self.packet_size = self.hdrfmt.unpack(pkt)[0]
else:
self.wait_hdr = True
self.packet_size = self.hdrfmt.size
self.handle_msg(pkt)
def reset(self):
self.wait_hdr = True
self.packet_size = self.hdrfmt.size
self.accum_buffer = bytearray()
def handle_msg(self, msg):
idx = struct.unpack('I', msg[:4])[0]
print('{0}: {1}'.format(idx, len(msg)))
@asyncio.coroutine
def reconnect(loop):
coro = loop.create_connection(lambda: SubscriberProtocol(loop), '127.0.0.1', 10000)
transport, protocol = yield from coro
def handle_msg(msg):
idx = struct.unpack('I', msg[:4])[0]
print('{0}: {1}'.format(idx, len(msg)))
@asyncio.coroutine
def subscribe_stuff():
reader, writer = yield from asyncio.open_connection('127.0.0.1', 10000)
hdrfmt = struct.Struct('>I')
while True:
try:
hdr = yield from reader.readexactly(hdrfmt.size)
packet_size = hdrfmt.unpack(hdr)[0]
payload = yield from reader.readexactly(packet_size)
except asyncio.IncompleteReadError as e:
print('partial read {} / {}'.format(len(e.partial), e.expected))
break
handle_msg(payload)
loop = asyncio.get_event_loop()
#coro = loop.create_connection(lambda: SubscriberProtocol(loop), '127.0.0.1', 10000)
#transport, protocol = loop.run_until_complete(coro)
asyncio.async(reconnect(loop))
loop.run_forever()
loop.close()
#loop.run_until_complete(subscribe_stuff()) | 0.189596 | 0.102799 |
import torch
import torch.nn as nn
import torch.optim as optim
import numpy as np
import random
import os
import logging
from transformers import get_cosine_schedule_with_warmup, DistilBertTokenizer
from args import get_args
from model.multimodal_transformer import MMT_VideoQA
from loss import LogSoftmax
from util import compute_a2v
from train.train_videoqa import train, eval
from data.videoqa_loader import get_videoqa_loaders
# args, logging
args = get_args()
if not (os.path.isdir(args.save_dir)):
os.mkdir(os.path.join(args.save_dir))
logging.basicConfig(
level=logging.INFO, format="%(asctime)s %(levelname)-8s %(message)s"
)
logFormatter = logging.Formatter("%(asctime)s %(levelname)-8s %(message)s")
rootLogger = logging.getLogger()
fileHandler = logging.FileHandler(os.path.join(args.save_dir, "stdout.log"), "w+")
fileHandler.setFormatter(logFormatter)
rootLogger.addHandler(fileHandler)
logging.info(args)
# set random seeds
torch.manual_seed(args.seed)
np.random.seed(args.seed)
random.seed(args.seed)
# get answer embeddings
bert_tokenizer = DistilBertTokenizer.from_pretrained("distilbert-base-uncased")
a2id, id2a, a2v = None, None, None
if not args.mc:
a2id, id2a, a2v = compute_a2v(
vocab_path=args.vocab_path,
bert_tokenizer=bert_tokenizer,
amax_words=args.amax_words,
)
logging.info(f"Length of Answer Vocabulary: {len(a2id)}")
# Model
model = MMT_VideoQA(
feature_dim=args.feature_dim,
word_dim=args.word_dim,
N=args.n_layers,
d_model=args.embd_dim,
d_ff=args.ff_dim,
h=args.n_heads,
dropout=args.dropout,
T=args.max_feats,
Q=args.qmax_words,
baseline=args.baseline,
)
model.cuda()
logging.info("Using {} GPUs".format(torch.cuda.device_count()))
# Load pretrain path
model = nn.DataParallel(model)
if args.pretrain_path != "":
model.load_state_dict(torch.load(args.pretrain_path))
logging.info(f"Loaded checkpoint {args.pretrain_path}")
logging.info(
f"Nb of trainable params:{sum(p.numel() for p in model.parameters() if p.requires_grad)}"
)
# Dataloaders
features = torch.load(args.features_path)
(
train_dataset,
train_loader,
val_dataset,
val_loader,
test_dataset,
test_loader,
) = get_videoqa_loaders(args, features, a2id, bert_tokenizer)
logging.info("number of train instances: {}".format(len(train_loader.dataset)))
logging.info("number of val instances: {}".format(len(val_loader.dataset)))
logging.info("number of test instances: {}".format(len(test_loader.dataset)))
# Loss + Optimizer
if args.dataset == "ivqa":
criterion = LogSoftmax(dim=1)
else:
criterion = nn.CrossEntropyLoss()
params_for_optimization = list(p for p in model.parameters() if p.requires_grad)
optimizer = optim.Adam(
params_for_optimization, lr=args.lr, weight_decay=args.weight_decay
)
criterion.cuda()
# Training
if not args.test:
scheduler = get_cosine_schedule_with_warmup(
optimizer, 0, len(train_loader) * args.epochs
)
logging.info(
f"Set cosine schedule with {len(train_loader) * args.epochs} iterations"
)
eval(model, test_loader, a2v, args, test=True) # zero-shot VideoQA
best_val_acc = -float("inf")
best_epoch = 0
for epoch in range(args.epochs):
train(model, train_loader, a2v, optimizer, criterion, scheduler, epoch, args)
val_acc = eval(model, val_loader, a2v, args, test=False)
if val_acc > best_val_acc:
best_val_acc = val_acc
best_epoch = epoch
torch.save(
model.state_dict(), os.path.join(args.save_dir, "best_model.pth")
)
logging.info(f"Best val model at epoch {best_epoch + 1}")
model.load_state_dict(
torch.load(
os.path.join(args.checkpoint_predir, args.checkpoint_dir, "best_model.pth")
)
)
# Evaluate on test set
eval(model, test_loader, a2v, args, test=True) | main_videoqa.py | import torch
import torch.nn as nn
import torch.optim as optim
import numpy as np
import random
import os
import logging
from transformers import get_cosine_schedule_with_warmup, DistilBertTokenizer
from args import get_args
from model.multimodal_transformer import MMT_VideoQA
from loss import LogSoftmax
from util import compute_a2v
from train.train_videoqa import train, eval
from data.videoqa_loader import get_videoqa_loaders
# args, logging
args = get_args()
if not (os.path.isdir(args.save_dir)):
os.mkdir(os.path.join(args.save_dir))
logging.basicConfig(
level=logging.INFO, format="%(asctime)s %(levelname)-8s %(message)s"
)
logFormatter = logging.Formatter("%(asctime)s %(levelname)-8s %(message)s")
rootLogger = logging.getLogger()
fileHandler = logging.FileHandler(os.path.join(args.save_dir, "stdout.log"), "w+")
fileHandler.setFormatter(logFormatter)
rootLogger.addHandler(fileHandler)
logging.info(args)
# set random seeds
torch.manual_seed(args.seed)
np.random.seed(args.seed)
random.seed(args.seed)
# get answer embeddings
bert_tokenizer = DistilBertTokenizer.from_pretrained("distilbert-base-uncased")
a2id, id2a, a2v = None, None, None
if not args.mc:
a2id, id2a, a2v = compute_a2v(
vocab_path=args.vocab_path,
bert_tokenizer=bert_tokenizer,
amax_words=args.amax_words,
)
logging.info(f"Length of Answer Vocabulary: {len(a2id)}")
# Model
model = MMT_VideoQA(
feature_dim=args.feature_dim,
word_dim=args.word_dim,
N=args.n_layers,
d_model=args.embd_dim,
d_ff=args.ff_dim,
h=args.n_heads,
dropout=args.dropout,
T=args.max_feats,
Q=args.qmax_words,
baseline=args.baseline,
)
model.cuda()
logging.info("Using {} GPUs".format(torch.cuda.device_count()))
# Load pretrain path
model = nn.DataParallel(model)
if args.pretrain_path != "":
model.load_state_dict(torch.load(args.pretrain_path))
logging.info(f"Loaded checkpoint {args.pretrain_path}")
logging.info(
f"Nb of trainable params:{sum(p.numel() for p in model.parameters() if p.requires_grad)}"
)
# Dataloaders
features = torch.load(args.features_path)
(
train_dataset,
train_loader,
val_dataset,
val_loader,
test_dataset,
test_loader,
) = get_videoqa_loaders(args, features, a2id, bert_tokenizer)
logging.info("number of train instances: {}".format(len(train_loader.dataset)))
logging.info("number of val instances: {}".format(len(val_loader.dataset)))
logging.info("number of test instances: {}".format(len(test_loader.dataset)))
# Loss + Optimizer
if args.dataset == "ivqa":
criterion = LogSoftmax(dim=1)
else:
criterion = nn.CrossEntropyLoss()
params_for_optimization = list(p for p in model.parameters() if p.requires_grad)
optimizer = optim.Adam(
params_for_optimization, lr=args.lr, weight_decay=args.weight_decay
)
criterion.cuda()
# Training
if not args.test:
scheduler = get_cosine_schedule_with_warmup(
optimizer, 0, len(train_loader) * args.epochs
)
logging.info(
f"Set cosine schedule with {len(train_loader) * args.epochs} iterations"
)
eval(model, test_loader, a2v, args, test=True) # zero-shot VideoQA
best_val_acc = -float("inf")
best_epoch = 0
for epoch in range(args.epochs):
train(model, train_loader, a2v, optimizer, criterion, scheduler, epoch, args)
val_acc = eval(model, val_loader, a2v, args, test=False)
if val_acc > best_val_acc:
best_val_acc = val_acc
best_epoch = epoch
torch.save(
model.state_dict(), os.path.join(args.save_dir, "best_model.pth")
)
logging.info(f"Best val model at epoch {best_epoch + 1}")
model.load_state_dict(
torch.load(
os.path.join(args.checkpoint_predir, args.checkpoint_dir, "best_model.pth")
)
)
# Evaluate on test set
eval(model, test_loader, a2v, args, test=True) | 0.592195 | 0.12297 |
from datetime import datetime
import discord
from discord.ext import commands
def check_channel(channel: discord.abc.Messageable) -> bool:
"""Check for NSFW rights."""
if isinstance(channel, discord.TextChannel):
return channel.is_nsfw()
return True
def sha(message: str) -> str:
"""Use SHA-256 to hash a string."""
def sha_et(part1: str, part2: str) -> str:
return "".join(
[str(int(part1[i]) and int(part2[i])) for i in range(len(part1))])
def sha_ou(part1: str, part2: str) -> str:
return "".join(
[str(int(part1[i]) or int(part2[i])) for i in range(len(part1))])
def sha_xor(part1: str, part2: str) -> str:
return "".join(
[str(int(part1[i]) ^ int(part2[i])) for i in range(len(part1))])
def complement(part1: str) -> str:
return "".join(
[str((int(part1[i]) + 1) % 2) for i in range(len(part1))])
def dec_g(part1: str, number: int) -> str:
return part1[number:] + "0" * number
def dec_d(part1: str, number: int) -> str:
return "0" * number + part1[:len(part1) - number]
def sha_shr(number: int, part1: str) -> str:
return dec_g(part1, number)
def sha_rotr(number: int, part1: str) -> str:
return sha_ou(dec_d(part1, number), dec_g(part1, 32 - number))
def sha_ch(part1: str, part2: str, part3: str) -> str:
return sha_xor(sha_et(part1, part2), sha_et(complement(part1), part3))
def sha_maj(part1: str, part2: str, part3: str) -> str:
return sha_xor(
sha_xor(sha_et(part1, part2), sha_et(part1, part3)),
sha_et(part2, part3),
)
def sha_e_0(part1: str) -> str:
return sha_xor(
sha_xor(sha_rotr(2, part1), sha_rotr(13, part1)),
sha_rotr(22, part1),
)
def sha_e_1(part1: str) -> str:
return sha_xor(
sha_xor(sha_rotr(6, part1), sha_rotr(11, part1)),
sha_rotr(25, part1),
)
def sha_o_0(part1: str) -> str:
return sha_xor(
sha_xor(sha_rotr(7, part1), sha_rotr(18, part1)),
sha_shr(3, part1),
)
def sha_o_1(part1: str) -> str:
return sha_xor(
sha_xor(sha_rotr(17, part1), sha_rotr(19, part1)),
sha_shr(10, part1),
)
constants_k = [
"428a2f98",
"71374491",
"b5c0fbcf",
"e9b5dba5",
"3956c25b",
"59f111f1",
"923f82a4",
"ab1c5ed5",
"d807aa98",
"12835b01",
"243185be",
"550c7dc3",
"72be5d74",
"80deb1fe",
"9bdc06a7",
"c19bf174",
"e49b69c1",
"efbe4786",
"0fc19dc6",
"240ca1cc",
"2de92c6f",
"4a7484aa",
"5cb0a9dc",
"76f988da",
"983e5152",
"a831c66d",
"b00327c8",
"bf597fc7",
"c6e00bf3",
"d5a79147",
"06ca6351",
"14292967",
"27b70a85",
"2e1b2138",
"4d2c6dfc",
"53380d13",
"650a7354",
"766a0abb",
"81c2c92e",
"92722c85",
"a2bfe8a1",
"a81a664b",
"c24b8b70",
"c76c51a3",
"d192e819",
"d6990624",
"f40e3585",
"106aa070",
"19a4c116",
"1e376c08",
"2748774c",
"34b0bcb5",
"391c0cb3",
"4ed8aa4a",
"5b9cca4f",
"682e6ff3",
"748f82ee",
"78a5636f",
"84c87814",
"8cc70208",
"90befffa",
"a4506ceb",
"bef9a3f7",
"c67178f2",
]
constants_k = [bin(int(i, base=16))[2:] for i in constants_k]
constants_k = ["0" * (32 - len(i)) + i for i in constants_k]
list_h = [
"6a09e667",
"bb67ae85",
"3c6ef372",
"a54ff53a",
"510e527f",
"9b05688c",
"1f83d9ab",
"5be0cd19",
]
list_h = [bin(int(i, base=16))[2:] for i in list_h]
list_h = ["0" * (32 - len(i)) + i for i in list_h]
if not isinstance(message, str):
raise TypeError("Message must be of type str")
message = bin(int(bytes(message, encoding="utf-8").hex(), base=16))[2:]
completion = bin(len(message))[2:]
completion = "0" * (64 - len(completion)) + completion
message += "1" + "0" * ((447 - len(message)) % 512) + completion
M = [[message[i:i + 512][32 * j:32 * (j + 1)] for j in range(16)]
for i in range(0, len(message), 512)]
for i, elem in enumerate(M):
W = [elem[t] for t in range(16)]
for t in range(16, 64):
w = bin(
(int(sha_o_1(W[t - 2]), base=2) + int(W[t - 7], base=2) +
int(sha_o_0(W[t - 15]), base=2) + int(W[t - 16], base=2)) %
2**32)[2:]
W.append("0" * (32 - len(w)) + w)
a, b, c, d, e, f, g, h = list_h
for t in range(64):
T1 = bin(
(int(h, base=2) + int(sha_e_1(e), base=2) +
int(sha_ch(e, f, g), base=2) + int(constants_k[t], base=2) +
int(W[t], base=2)) % 2**32)[2:]
T1 = "0" * (32 - len(T1)) + T1
T2 = bin(
(int(sha_e_0(a), base=2) + int(sha_maj(a, b, c), base=2)) %
2**32)[2:]
T2 = "0" * (32 - len(T2)) + T2
h, g, f = g, f, e
e = bin((int(d, base=2) + int(T1, base=2)) % 2**32)[2:]
e = "0" * (32 - len(e)) + e
d, c, b = c, b, a
a = bin((int(T1, base=2) + int(T2, base=2)) % 2**32)[2:]
a = "0" * (32 - len(a)) + a
for j in range(8):
list_h[j] = bin((int([a, b, c, d, e, f, g, h][i], base=2) +
int(list_h[j], base=2)) % 2**32)[2:]
list_h[j] = "0" * (32 - len(list_h[j])) + list_h[j]
return "".join(
[hex(int(list_h[i], base=2))[2:] for i in range(len(list_h))])
class Pic:
"""Picture placeholder."""
def __init__(self, url, tag) -> None:
"""Initialize the "picture"."""
self.url = url
self.tag = tag
class PicError:
"""Error for pics."""
def __init__(self) -> None:
"""Initialize the error."""
self.code = 404
class Redditwrapper:
"""Wrap a reddit post in a class structure."""
def __init__(self, json: dict):
if json.get("error"):
self.error = json["error"]
self.code = json.get("code")
self.message = json.get("message", "Error")
elif json.get("detail"):
self.error = True
self.message = json["detail"]
else:
self.title = json["title"]
self.image_url = json["image_url"]
self.source = json["source"]
self.subreddit = json["subreddit"]
self.upvotes = json.get("upvotes", 0)
self.downvotes = json.get("downvotes", 0)
self.comments = json.get("comments", 0)
self.created_at = json["created_at"]
self.nsfw = json["nsfw"]
self.author = json["author"]
class Images(commands.Cog): # Thanks KSoft.si
"""Commands to get random images.
You can try using the nsfw command, if you dare
"""
def __init__(self, bot: commands.Bot) -> None:
"""Initialize Images."""
self.bot = bot
@commands.command(ignore_extra=True)
async def dab(self, ctx: commands.Context) -> None:
"""Get a random dab image."""
await self.image_sender(ctx, await self.rand_im("dab"))
@commands.command(ignore_extra=True)
async def doge(self, ctx: commands.Context) -> None:
"""Get a random doge image."""
await self.image_sender(ctx, await self.rand_im("doge"))
@commands.command(ignore_extra=True)
async def fbi(self, ctx: commands.Context) -> None:
"""Get a random FBI image."""
await self.image_sender(ctx, await self.rand_im("fbi"))
@commands.command(ignore_extra=True, hidden=True)
@commands.is_nsfw()
async def hentai(self, ctx: commands.Context) -> None:
"""Get a random hentai image."""
await self.image_sender(ctx, await self.rand_im("hentai", True))
@commands.command(ignore_extra=True, hidden=True)
@commands.is_nsfw()
async def hentai_gif(self, ctx: commands.Context) -> None:
"""Get a random hentai GIF."""
await self.image_sender(ctx, await self.rand_im("hentai_gif", True))
@commands.command(ignore_extra=True)
async def hug(self, ctx: commands.Context) -> None:
"""Get a random hug image."""
await self.image_sender(ctx, await self.rand_im("hug"))
@commands.command(ignore_extra=True)
async def kappa(self, ctx: commands.Context) -> None:
"""Get a random kappa image."""
await self.image_sender(ctx, await self.rand_im("kappa"))
@commands.command()
async def koala(self, ctx) -> None:
"""Get a random picture of a koala."""
async with self.bot.aio_session.get(
"https://some-random-api.ml/img/koala") as resp:
if resp.status == 200:
data = await resp.json()
embed = discord.Embed(title="Random Koala",
colour=discord.Colour.gold())
embed.set_image(url=data["link"])
await ctx.send(embed=embed)
else:
await ctx.send("Something went wrong.")
await self.bot.log_channel.send(f"Code {resp.status} in koala")
@commands.command(ignore_extra=True)
async def kiss(self, ctx: commands.Context) -> None:
"""Get a random kiss image."""
await self.image_sender(ctx, await self.rand_im("kiss"))
@commands.command(ignore_extra=True, hidden=True, aliases=["im_nsfw"])
async def image_nsfw(self, ctx: commands.Context) -> None:
"""Retrieve the list of all available NSFW tags."""
tag_list = await self.bot.ksoft_client.images.tags()
embed = discord.Embed(timestamp=datetime.utcnow(),
colour=discord.Colour.random())
embed.add_field(name="NSFW tags", value="\n".join(tag_list.nsfw_tags))
embed.set_author(name=ctx.author.display_name,
icon_url=str(ctx.author.avatar_url))
await ctx.send(embed=embed)
@commands.command()
async def monster(self, ctx: commands.Context, *, hashed: str) -> None:
"""Get a monster image from an input."""
embed = discord.Embed(title=hashed)
embed.set_image(url=f"https://robohash.org/{sha(hashed)}.png?set=set2")
await ctx.send(embed=embed)
@commands.command(hidden=True, ignore_extra=True)
@commands.is_nsfw()
async def neko(self, ctx: commands.Context) -> None:
"""Get a random neko image."""
await self.image_sender(ctx, await self.rand_im("neko", nsfw=True))
@commands.command(hidden=True, ignore_extra=True)
@commands.is_nsfw()
async def nsfw(self, ctx: commands.Context) -> None:
"""Retrieve random NSFW pics.
To find the other NSFW commands :
use im_nsfw or reddit with an NSFW subreddit
"""
async with self.bot.aio_session.get(
"https://api.ksoft.si/images/random-nsfw",
headers={
"Authorization": f"Bearer {self.bot.ksoft_client.api_key}"
},
) as response:
await self.reddit_sender(
ctx,
Redditwrapper(await response.json()),
)
@commands.command(ignore_extra=True)
async def meme(self, ctx: commands.Context) -> None:
"""Retrieve a random meme from Reddit."""
await self.reddit_sender(
ctx,
await self.bot.ksoft_client.images.random_meme(),
)
@commands.command(ignore_extra=True)
async def pat(self, ctx: commands.Context) -> None:
"""Get a random pat image."""
await self.image_sender(ctx, await self.rand_im("pat"))
@commands.command()
async def panda(self, ctx) -> None:
"""Get a random picture of a panda."""
async with self.bot.aio_session.get(
"https://some-random-api.ml/img/panda") as resp:
if resp.status == 200:
data = await resp.json()
embed = discord.Embed(
title="Random Panda!",
colour=discord.Colour.gold(),
)
embed.set_image(url=data["link"])
await ctx.send(embed=embed)
else:
await ctx.send(f"Something went boom! :( [CODE: {resp.status}]"
)
await self.bot.log_channel.send(f"Code {resp.status} in panda")
@commands.command(ignore_extra=True)
async def pepe(self, ctx: commands.Context) -> None:
"""Get a random pepe image."""
await self.image_sender(ctx, await self.rand_im("pepe"))
@commands.command()
async def reddit(self, ctx: commands.Context, subreddit: str) -> None:
"""Retrieve images from the specified subreddit.
This command may return NSFW results only in NSFW channels
"""
sub = subreddit.split("r/")[-1]
try:
async with self.bot.aio_session.get(
f"https://api.ksoft.si/images/rand-reddit/{sub}",
headers={
"Authorization":
(f"Bearer {self.bot.ksoft_client.api_key}")
},
params={
"remove_nsfw": str(not check_channel(ctx.channel)),
"span": "week",
},
) as response:
await self.reddit_sender(
ctx,
Redditwrapper(await response.json()),
)
except Exception:
await ctx.send("Subreddit not found")
@commands.command()
async def robot(self, ctx: commands.Context, *, hashed: str) -> None:
"""Get a robot image from an input."""
embed = discord.Embed(title=hashed)
embed.set_image(url=f"https://robohash.org/{sha(hashed)}.png?set=set1")
await ctx.send(embed=embed)
@commands.command(ignore_extra=True)
async def tickle(self, ctx: commands.Context) -> None:
"""Get a random tickle image."""
await self.image_sender(ctx, await self.rand_im("tickle"))
@commands.command(ignore_extra=True)
async def wikihow(self, ctx: commands.Context) -> None:
"""Retrieve a weird image from WikiHow."""
image = await self.bot.ksoft_client.images.random_wikihow()
embed = discord.Embed(
title=image.title,
url=image.article_url,
colour=discord.Colour.blue(),
)
embed.set_image(url=image.url)
await ctx.send(embed=embed)
async def rand_im(self, tag: str, nsfw: bool = False) -> object:
"""Random image lol."""
try:
return await self.bot.ksoft_client.images.random_image(tag=tag,
nsfw=nsfw)
except Exception:
return PicError()
async def image_sender(self, ctx: commands.Context, image) -> None:
"""Embeds an image then sends it."""
if hasattr(image, "code"):
await self.bot.httpcat(ctx, image["code"])
return
if not image.url:
await self.bot.httpcat(ctx, 404)
return
embed = discord.Embed(
title=image.tag,
timestamp=datetime.utcnow(),
colour=discord.Colour.blue(),
)
embed.set_image(url=image.url)
await ctx.send(embed=embed)
async def reddit_sender(self, ctx: commands.Context, image) -> None:
"""Embeds a Reddit image then sends it."""
if hasattr(image, "error"):
await ctx.send(image.message)
return
embed = discord.Embed(
title=image.title,
url=image.source,
timestamp=datetime.fromtimestamp(image.created_at),
colour=discord.Colour.blue(),
)
if not image.image_url:
await self.bot.httpcat(ctx, 404)
return
embed.set_image(url=image.image_url)
embed.set_footer(text=(f"👍 {image.upvotes} | 👎 {image.downvotes} |"
f" 💬 {image.comments}"))
embed.set_author(
name=f"Posted by {image.author} in {image.subreddit}",
icon_url="https://i.redd.it/qupjfpl4gvoy.jpg",
url="https://reddit.com" + image.author,
)
await ctx.send(embed=embed)
def setup(bot: commands.Bot) -> None:
"""Load the Images cog."""
bot.add_cog(Images(bot)) | cogs/image.py | from datetime import datetime
import discord
from discord.ext import commands
def check_channel(channel: discord.abc.Messageable) -> bool:
"""Check for NSFW rights."""
if isinstance(channel, discord.TextChannel):
return channel.is_nsfw()
return True
def sha(message: str) -> str:
"""Use SHA-256 to hash a string."""
def sha_et(part1: str, part2: str) -> str:
return "".join(
[str(int(part1[i]) and int(part2[i])) for i in range(len(part1))])
def sha_ou(part1: str, part2: str) -> str:
return "".join(
[str(int(part1[i]) or int(part2[i])) for i in range(len(part1))])
def sha_xor(part1: str, part2: str) -> str:
return "".join(
[str(int(part1[i]) ^ int(part2[i])) for i in range(len(part1))])
def complement(part1: str) -> str:
return "".join(
[str((int(part1[i]) + 1) % 2) for i in range(len(part1))])
def dec_g(part1: str, number: int) -> str:
return part1[number:] + "0" * number
def dec_d(part1: str, number: int) -> str:
return "0" * number + part1[:len(part1) - number]
def sha_shr(number: int, part1: str) -> str:
return dec_g(part1, number)
def sha_rotr(number: int, part1: str) -> str:
return sha_ou(dec_d(part1, number), dec_g(part1, 32 - number))
def sha_ch(part1: str, part2: str, part3: str) -> str:
return sha_xor(sha_et(part1, part2), sha_et(complement(part1), part3))
def sha_maj(part1: str, part2: str, part3: str) -> str:
return sha_xor(
sha_xor(sha_et(part1, part2), sha_et(part1, part3)),
sha_et(part2, part3),
)
def sha_e_0(part1: str) -> str:
return sha_xor(
sha_xor(sha_rotr(2, part1), sha_rotr(13, part1)),
sha_rotr(22, part1),
)
def sha_e_1(part1: str) -> str:
return sha_xor(
sha_xor(sha_rotr(6, part1), sha_rotr(11, part1)),
sha_rotr(25, part1),
)
def sha_o_0(part1: str) -> str:
return sha_xor(
sha_xor(sha_rotr(7, part1), sha_rotr(18, part1)),
sha_shr(3, part1),
)
def sha_o_1(part1: str) -> str:
return sha_xor(
sha_xor(sha_rotr(17, part1), sha_rotr(19, part1)),
sha_shr(10, part1),
)
constants_k = [
"428a2f98",
"71374491",
"b5c0fbcf",
"e9b5dba5",
"3956c25b",
"59f111f1",
"923f82a4",
"ab1c5ed5",
"d807aa98",
"12835b01",
"243185be",
"550c7dc3",
"72be5d74",
"80deb1fe",
"9bdc06a7",
"c19bf174",
"e49b69c1",
"efbe4786",
"0fc19dc6",
"240ca1cc",
"2de92c6f",
"4a7484aa",
"5cb0a9dc",
"76f988da",
"983e5152",
"a831c66d",
"b00327c8",
"bf597fc7",
"c6e00bf3",
"d5a79147",
"06ca6351",
"14292967",
"27b70a85",
"2e1b2138",
"4d2c6dfc",
"53380d13",
"650a7354",
"766a0abb",
"81c2c92e",
"92722c85",
"a2bfe8a1",
"a81a664b",
"c24b8b70",
"c76c51a3",
"d192e819",
"d6990624",
"f40e3585",
"106aa070",
"19a4c116",
"1e376c08",
"2748774c",
"34b0bcb5",
"391c0cb3",
"4ed8aa4a",
"5b9cca4f",
"682e6ff3",
"748f82ee",
"78a5636f",
"84c87814",
"8cc70208",
"90befffa",
"a4506ceb",
"bef9a3f7",
"c67178f2",
]
constants_k = [bin(int(i, base=16))[2:] for i in constants_k]
constants_k = ["0" * (32 - len(i)) + i for i in constants_k]
list_h = [
"6a09e667",
"bb67ae85",
"3c6ef372",
"a54ff53a",
"510e527f",
"9b05688c",
"1f83d9ab",
"5be0cd19",
]
list_h = [bin(int(i, base=16))[2:] for i in list_h]
list_h = ["0" * (32 - len(i)) + i for i in list_h]
if not isinstance(message, str):
raise TypeError("Message must be of type str")
message = bin(int(bytes(message, encoding="utf-8").hex(), base=16))[2:]
completion = bin(len(message))[2:]
completion = "0" * (64 - len(completion)) + completion
message += "1" + "0" * ((447 - len(message)) % 512) + completion
M = [[message[i:i + 512][32 * j:32 * (j + 1)] for j in range(16)]
for i in range(0, len(message), 512)]
for i, elem in enumerate(M):
W = [elem[t] for t in range(16)]
for t in range(16, 64):
w = bin(
(int(sha_o_1(W[t - 2]), base=2) + int(W[t - 7], base=2) +
int(sha_o_0(W[t - 15]), base=2) + int(W[t - 16], base=2)) %
2**32)[2:]
W.append("0" * (32 - len(w)) + w)
a, b, c, d, e, f, g, h = list_h
for t in range(64):
T1 = bin(
(int(h, base=2) + int(sha_e_1(e), base=2) +
int(sha_ch(e, f, g), base=2) + int(constants_k[t], base=2) +
int(W[t], base=2)) % 2**32)[2:]
T1 = "0" * (32 - len(T1)) + T1
T2 = bin(
(int(sha_e_0(a), base=2) + int(sha_maj(a, b, c), base=2)) %
2**32)[2:]
T2 = "0" * (32 - len(T2)) + T2
h, g, f = g, f, e
e = bin((int(d, base=2) + int(T1, base=2)) % 2**32)[2:]
e = "0" * (32 - len(e)) + e
d, c, b = c, b, a
a = bin((int(T1, base=2) + int(T2, base=2)) % 2**32)[2:]
a = "0" * (32 - len(a)) + a
for j in range(8):
list_h[j] = bin((int([a, b, c, d, e, f, g, h][i], base=2) +
int(list_h[j], base=2)) % 2**32)[2:]
list_h[j] = "0" * (32 - len(list_h[j])) + list_h[j]
return "".join(
[hex(int(list_h[i], base=2))[2:] for i in range(len(list_h))])
class Pic:
"""Picture placeholder."""
def __init__(self, url, tag) -> None:
"""Initialize the "picture"."""
self.url = url
self.tag = tag
class PicError:
"""Error for pics."""
def __init__(self) -> None:
"""Initialize the error."""
self.code = 404
class Redditwrapper:
"""Wrap a reddit post in a class structure."""
def __init__(self, json: dict):
if json.get("error"):
self.error = json["error"]
self.code = json.get("code")
self.message = json.get("message", "Error")
elif json.get("detail"):
self.error = True
self.message = json["detail"]
else:
self.title = json["title"]
self.image_url = json["image_url"]
self.source = json["source"]
self.subreddit = json["subreddit"]
self.upvotes = json.get("upvotes", 0)
self.downvotes = json.get("downvotes", 0)
self.comments = json.get("comments", 0)
self.created_at = json["created_at"]
self.nsfw = json["nsfw"]
self.author = json["author"]
class Images(commands.Cog): # Thanks KSoft.si
"""Commands to get random images.
You can try using the nsfw command, if you dare
"""
def __init__(self, bot: commands.Bot) -> None:
"""Initialize Images."""
self.bot = bot
@commands.command(ignore_extra=True)
async def dab(self, ctx: commands.Context) -> None:
"""Get a random dab image."""
await self.image_sender(ctx, await self.rand_im("dab"))
@commands.command(ignore_extra=True)
async def doge(self, ctx: commands.Context) -> None:
"""Get a random doge image."""
await self.image_sender(ctx, await self.rand_im("doge"))
@commands.command(ignore_extra=True)
async def fbi(self, ctx: commands.Context) -> None:
"""Get a random FBI image."""
await self.image_sender(ctx, await self.rand_im("fbi"))
@commands.command(ignore_extra=True, hidden=True)
@commands.is_nsfw()
async def hentai(self, ctx: commands.Context) -> None:
"""Get a random hentai image."""
await self.image_sender(ctx, await self.rand_im("hentai", True))
@commands.command(ignore_extra=True, hidden=True)
@commands.is_nsfw()
async def hentai_gif(self, ctx: commands.Context) -> None:
"""Get a random hentai GIF."""
await self.image_sender(ctx, await self.rand_im("hentai_gif", True))
@commands.command(ignore_extra=True)
async def hug(self, ctx: commands.Context) -> None:
"""Get a random hug image."""
await self.image_sender(ctx, await self.rand_im("hug"))
@commands.command(ignore_extra=True)
async def kappa(self, ctx: commands.Context) -> None:
"""Get a random kappa image."""
await self.image_sender(ctx, await self.rand_im("kappa"))
@commands.command()
async def koala(self, ctx) -> None:
"""Get a random picture of a koala."""
async with self.bot.aio_session.get(
"https://some-random-api.ml/img/koala") as resp:
if resp.status == 200:
data = await resp.json()
embed = discord.Embed(title="Random Koala",
colour=discord.Colour.gold())
embed.set_image(url=data["link"])
await ctx.send(embed=embed)
else:
await ctx.send("Something went wrong.")
await self.bot.log_channel.send(f"Code {resp.status} in koala")
@commands.command(ignore_extra=True)
async def kiss(self, ctx: commands.Context) -> None:
"""Get a random kiss image."""
await self.image_sender(ctx, await self.rand_im("kiss"))
@commands.command(ignore_extra=True, hidden=True, aliases=["im_nsfw"])
async def image_nsfw(self, ctx: commands.Context) -> None:
"""Retrieve the list of all available NSFW tags."""
tag_list = await self.bot.ksoft_client.images.tags()
embed = discord.Embed(timestamp=datetime.utcnow(),
colour=discord.Colour.random())
embed.add_field(name="NSFW tags", value="\n".join(tag_list.nsfw_tags))
embed.set_author(name=ctx.author.display_name,
icon_url=str(ctx.author.avatar_url))
await ctx.send(embed=embed)
@commands.command()
async def monster(self, ctx: commands.Context, *, hashed: str) -> None:
"""Get a monster image from an input."""
embed = discord.Embed(title=hashed)
embed.set_image(url=f"https://robohash.org/{sha(hashed)}.png?set=set2")
await ctx.send(embed=embed)
@commands.command(hidden=True, ignore_extra=True)
@commands.is_nsfw()
async def neko(self, ctx: commands.Context) -> None:
"""Get a random neko image."""
await self.image_sender(ctx, await self.rand_im("neko", nsfw=True))
@commands.command(hidden=True, ignore_extra=True)
@commands.is_nsfw()
async def nsfw(self, ctx: commands.Context) -> None:
"""Retrieve random NSFW pics.
To find the other NSFW commands :
use im_nsfw or reddit with an NSFW subreddit
"""
async with self.bot.aio_session.get(
"https://api.ksoft.si/images/random-nsfw",
headers={
"Authorization": f"Bearer {self.bot.ksoft_client.api_key}"
},
) as response:
await self.reddit_sender(
ctx,
Redditwrapper(await response.json()),
)
@commands.command(ignore_extra=True)
async def meme(self, ctx: commands.Context) -> None:
"""Retrieve a random meme from Reddit."""
await self.reddit_sender(
ctx,
await self.bot.ksoft_client.images.random_meme(),
)
@commands.command(ignore_extra=True)
async def pat(self, ctx: commands.Context) -> None:
"""Get a random pat image."""
await self.image_sender(ctx, await self.rand_im("pat"))
@commands.command()
async def panda(self, ctx) -> None:
"""Get a random picture of a panda."""
async with self.bot.aio_session.get(
"https://some-random-api.ml/img/panda") as resp:
if resp.status == 200:
data = await resp.json()
embed = discord.Embed(
title="Random Panda!",
colour=discord.Colour.gold(),
)
embed.set_image(url=data["link"])
await ctx.send(embed=embed)
else:
await ctx.send(f"Something went boom! :( [CODE: {resp.status}]"
)
await self.bot.log_channel.send(f"Code {resp.status} in panda")
@commands.command(ignore_extra=True)
async def pepe(self, ctx: commands.Context) -> None:
"""Get a random pepe image."""
await self.image_sender(ctx, await self.rand_im("pepe"))
@commands.command()
async def reddit(self, ctx: commands.Context, subreddit: str) -> None:
"""Retrieve images from the specified subreddit.
This command may return NSFW results only in NSFW channels
"""
sub = subreddit.split("r/")[-1]
try:
async with self.bot.aio_session.get(
f"https://api.ksoft.si/images/rand-reddit/{sub}",
headers={
"Authorization":
(f"Bearer {self.bot.ksoft_client.api_key}")
},
params={
"remove_nsfw": str(not check_channel(ctx.channel)),
"span": "week",
},
) as response:
await self.reddit_sender(
ctx,
Redditwrapper(await response.json()),
)
except Exception:
await ctx.send("Subreddit not found")
@commands.command()
async def robot(self, ctx: commands.Context, *, hashed: str) -> None:
"""Get a robot image from an input."""
embed = discord.Embed(title=hashed)
embed.set_image(url=f"https://robohash.org/{sha(hashed)}.png?set=set1")
await ctx.send(embed=embed)
@commands.command(ignore_extra=True)
async def tickle(self, ctx: commands.Context) -> None:
"""Get a random tickle image."""
await self.image_sender(ctx, await self.rand_im("tickle"))
@commands.command(ignore_extra=True)
async def wikihow(self, ctx: commands.Context) -> None:
"""Retrieve a weird image from WikiHow."""
image = await self.bot.ksoft_client.images.random_wikihow()
embed = discord.Embed(
title=image.title,
url=image.article_url,
colour=discord.Colour.blue(),
)
embed.set_image(url=image.url)
await ctx.send(embed=embed)
async def rand_im(self, tag: str, nsfw: bool = False) -> object:
"""Random image lol."""
try:
return await self.bot.ksoft_client.images.random_image(tag=tag,
nsfw=nsfw)
except Exception:
return PicError()
async def image_sender(self, ctx: commands.Context, image) -> None:
"""Embeds an image then sends it."""
if hasattr(image, "code"):
await self.bot.httpcat(ctx, image["code"])
return
if not image.url:
await self.bot.httpcat(ctx, 404)
return
embed = discord.Embed(
title=image.tag,
timestamp=datetime.utcnow(),
colour=discord.Colour.blue(),
)
embed.set_image(url=image.url)
await ctx.send(embed=embed)
async def reddit_sender(self, ctx: commands.Context, image) -> None:
"""Embeds a Reddit image then sends it."""
if hasattr(image, "error"):
await ctx.send(image.message)
return
embed = discord.Embed(
title=image.title,
url=image.source,
timestamp=datetime.fromtimestamp(image.created_at),
colour=discord.Colour.blue(),
)
if not image.image_url:
await self.bot.httpcat(ctx, 404)
return
embed.set_image(url=image.image_url)
embed.set_footer(text=(f"👍 {image.upvotes} | 👎 {image.downvotes} |"
f" 💬 {image.comments}"))
embed.set_author(
name=f"Posted by {image.author} in {image.subreddit}",
icon_url="https://i.redd.it/qupjfpl4gvoy.jpg",
url="https://reddit.com" + image.author,
)
await ctx.send(embed=embed)
def setup(bot: commands.Bot) -> None:
"""Load the Images cog."""
bot.add_cog(Images(bot)) | 0.59796 | 0.353958 |
import pycurl
import sys
import json
from StringIO import StringIO
def getswitchesDPID():
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/switches")
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getSwitchDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/desc/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getFlowDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/flow/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getPortDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/port/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getPortdescDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/portdesc/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getTableDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/table/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getGroupDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/group/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def deleteflow(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/tats/flowentry/clear/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def addFlow(str):
#!print "hi"
print str
arr={}
strArr=str.split(",")
for aStr in strArr:
tmpArr=aStr.split(":")
arr[tmpArr[0]]=tmpArr[1]
data = json.dumps({
"dpid": arr["dpid"],
"cookie": arr["cookie"],
"cookie_mask": arr["cookie_mask"],
"table_id": arr["table_id"],
"idle_timeout": arr["idle_timeout"],
"hard_timeout": arr["hard_timeout"],
"priority": arr["priority"],
"flags": arr["flags"],
"actions":[
{
"type": arr["type"],
"port": 2
}
],
})
print data
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/flowentry/add")
curl.setopt(curl.HTTPHEADER, ['Accept: application/json'])
curl.setopt(curl.POST, 1)
curl.setopt(curl.POSTFIELDS, data)
curl.perform()
curl.close()
ryuFunction =""
if (len(sys.argv) == 2):
ryuFunction =sys.argv[1]
elif(len(sys.argv) == 3):
ryuFunction =sys.argv[1]
if ryuFunction == "getswitchesDPID":
getswitchesDPID()
elif(ryuFunction == "getSwitchDetails" and len(sys.argv) == 3):
getSwitchDetails(sys.argv[2])
elif(ryuFunction == "addFlow" and len(sys.argv) == 3):
addFlow(sys.argv[2])
elif(ryuFunction == "getFlowDetails" and len(sys.argv) == 3):
getFlowDetails(sys.argv[2])
elif(ryuFunction == "getTableDetails" and len(sys.argv) == 3):
getTableDetails(sys.argv[2])
elif(ryuFunction == "getPortDetails" and len(sys.argv) == 3):
getPortDetails(sys.argv[2])
elif(ryuFunction == "getPortdescDetails" and len(sys.argv) == 3):
getPortdescDetails(sys.argv[2])
elif(ryuFunction == "getGroupDetails" and len(sys.argv) == 3):
getGroupDetails(sys.argv[2]) | html/controller.py | import pycurl
import sys
import json
from StringIO import StringIO
def getswitchesDPID():
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/switches")
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getSwitchDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/desc/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getFlowDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/flow/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getPortDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/port/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getPortdescDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/portdesc/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getTableDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/table/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def getGroupDetails(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/group/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def deleteflow(dpID):
response_buffer = StringIO()
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/tats/flowentry/clear/" + str(dpID))
curl.setopt(curl.WRITEFUNCTION, response_buffer.write)
curl.perform()
curl.close()
val = response_buffer.getvalue()
print val
def addFlow(str):
#!print "hi"
print str
arr={}
strArr=str.split(",")
for aStr in strArr:
tmpArr=aStr.split(":")
arr[tmpArr[0]]=tmpArr[1]
data = json.dumps({
"dpid": arr["dpid"],
"cookie": arr["cookie"],
"cookie_mask": arr["cookie_mask"],
"table_id": arr["table_id"],
"idle_timeout": arr["idle_timeout"],
"hard_timeout": arr["hard_timeout"],
"priority": arr["priority"],
"flags": arr["flags"],
"actions":[
{
"type": arr["type"],
"port": 2
}
],
})
print data
curl = pycurl.Curl()
curl.setopt(curl.URL, "http://localhost:8080/stats/flowentry/add")
curl.setopt(curl.HTTPHEADER, ['Accept: application/json'])
curl.setopt(curl.POST, 1)
curl.setopt(curl.POSTFIELDS, data)
curl.perform()
curl.close()
ryuFunction =""
if (len(sys.argv) == 2):
ryuFunction =sys.argv[1]
elif(len(sys.argv) == 3):
ryuFunction =sys.argv[1]
if ryuFunction == "getswitchesDPID":
getswitchesDPID()
elif(ryuFunction == "getSwitchDetails" and len(sys.argv) == 3):
getSwitchDetails(sys.argv[2])
elif(ryuFunction == "addFlow" and len(sys.argv) == 3):
addFlow(sys.argv[2])
elif(ryuFunction == "getFlowDetails" and len(sys.argv) == 3):
getFlowDetails(sys.argv[2])
elif(ryuFunction == "getTableDetails" and len(sys.argv) == 3):
getTableDetails(sys.argv[2])
elif(ryuFunction == "getPortDetails" and len(sys.argv) == 3):
getPortDetails(sys.argv[2])
elif(ryuFunction == "getPortdescDetails" and len(sys.argv) == 3):
getPortdescDetails(sys.argv[2])
elif(ryuFunction == "getGroupDetails" and len(sys.argv) == 3):
getGroupDetails(sys.argv[2]) | 0.057282 | 0.073497 |
import miio
import miio.exceptions
import miio.airhumidifier
import paho.mqtt.client
import urllib.parse
import sys
import yaml
import argparse
import os
import json
import copy
import typing
import datetime
_parser = argparse.ArgumentParser("Bridge python-miio xiaomi device to mqtt")
_parser.add_argument("--config", default=os.path.join(os.getcwd(), "config.yml"))
_args = _parser.parse_args()
_config = yaml.safe_load(open(_args.config, "rb").read())
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
if _config["simplify"]:
def simplify_dict(obj, prop: str):
if prop in obj:
del obj[prop]
else:
def simplify_dict(obj, prop: str):
pass
class StdoutBackend:
def output(self, topic: str, value: dict):
print(f"{topic}: {json.dumps(value)}")
class PahoMqttBackend:
def __init__(self, client: paho.mqtt.client.Client):
self._client = client
def output(self, topic: str, value: dict):
self._client.publish(topic, json.dumps(value), retain=True)
def subcribe_to_control(self, topic: str, command_handler):
self._client.message_callback_add(topic, command_handler)
self._client.subscribe(topic)
_all_backends = []
_mqtt_backends = [] # typing.List[PahoMqttBackend]
def prepare_backends(
all_backends, mqtt_backends: typing.List[PahoMqttBackend], backends_config
):
for b in backends_config:
if b == "stdout":
print("Adding stdout backend")
stdout_backend = StdoutBackend()
all_backends.append(stdout_backend)
else:
mqtt_url = urllib.parse.urlparse(b)
c = paho.mqtt.client.Client()
c.username_pw_set(mqtt_url.username, mqtt_url.password)
c.connect(mqtt_url.hostname)
mqtt_backend = PahoMqttBackend(c)
all_backends.append(mqtt_backend)
mqtt_backends.append(mqtt_backend)
prepare_backends(_all_backends, _mqtt_backends, _config["backends"])
class InterfacedDevice:
def __init__(self, miio_device, config):
self._miio_device = miio_device
self._config = config
self._last_succesful_report = datetime.datetime.now(datetime.timezone.utc)
self._last_succesful_control = datetime.datetime.now(datetime.timezone.utc)
def get_report(self):
raise NotImplementedError
def topic(self):
return os.path.join(_config["topic_prefix"], self._config["topic"])
def status_topic(self):
return os.path.join(self.topic(), "status")
def control_topic(self):
return os.path.join(self.topic(), "control")
def error_topic(self):
return os.path.join(self.topic(), "error")
def handle_control(self, client, userdata, message: paho.mqtt.client.MQTTMessage):
raise NotImplementedError
class InterfacedHumidifier(InterfacedDevice):
def __init__(self, *args, **kwargs):
super(InterfacedHumidifier, self).__init__(*args, **kwargs)
self._last_status = None
def get_report(self):
try:
status: miio.airhumidifier.AirHumidifierStatus = self._miio_device.status()
self._last_status = status
data = self.get_humidifier_report(status)
data["location"] = self._config["location"]
if "sublocation" in self._config:
data["sublocation"] = self._config["sublocation"]
self._last_succesful_report = datetime.datetime.now(datetime.timezone.utc)
return data
except (miio.exceptions.DeviceException, OSError) as e:
eprint(e)
return None
def get_humidifier_report(self, status):
data = copy.deepcopy(status.data)
data["temperature"] = status.temperature
simplify_dict(data, "hw_version")
simplify_dict(data, "temp_dec")
simplify_dict(data, "use_time")
simplify_dict(data, "buzzer")
simplify_dict(data, "child_lock")
simplify_dict(data, "led_b")
simplify_dict(data, "limit_hum")
simplify_dict(data, "speed")
simplify_dict(data, "dry")
return data
def apply_control(self, mdev: miio.airhumidifier.AirHumidifierCA1, control: dict):
target_speed = control.get("speed", 0.0)
print(f"{self.control_topic()}: setting speed = {target_speed} from {control}")
try:
self.set_active_control(target_speed, mdev)
self.set_passive_control(mdev)
self._last_succesful_control = datetime.datetime.now(datetime.timezone.utc)
except Exception as e:
eprint(f"{self.control_topic()}: failed to apply control: ", e)
def is_tank_empty(self):
if self._last_status is not None:
return self._last_status.depth < _config["minimal_water_depth"]
else:
return False
def set_active_control(self, target_speed, mdev):
if target_speed < 0.05:
if self._last_status.is_on:
mdev.off()
else:
if self.is_tank_empty():
if self._last_status.is_on:
mdev.off()
else:
if not self._last_status.is_on:
mdev.on()
if target_speed < 0.33:
if (
not self._last_status.mode
== miio.airhumidifier.OperationMode.Silent
):
mdev.set_mode(miio.airhumidifier.OperationMode.Silent)
elif target_speed < 0.66:
if (
not self._last_status.mode
== miio.airhumidifier.OperationMode.Medium
):
mdev.set_mode(miio.airhumidifier.OperationMode.Medium)
elif target_speed < 1.01:
if (
not self._last_status.mode
== miio.airhumidifier.OperationMode.High
):
mdev.set_mode(miio.airhumidifier.OperationMode.High)
else:
pass
def set_passive_control(self, mdev):
if self._last_status.child_lock != True:
mdev.set_child_lock(True)
if (
self._last_status.led_brightness is not None
and self._last_status.led_brightness != miio.airhumidifier.LedBrightness.Dim
):
mdev.set_led_brightness(miio.airhumidifier.LedBrightness.Dim)
if self._last_status.target_humidity != 80:
mdev.set_target_humidity(80)
if self._last_status.buzzer != False:
mdev.set_buzzer(False)
if self._last_status.dry != False:
mdev.set_dry(False)
def handle_control(self, client, userdata, message: paho.mqtt.client.MQTTMessage):
if self._last_status is None:
return
try:
self.apply_control(self._miio_device, json.loads(message.payload))
except miio.exceptions.DeviceError as e:
eprint(e)
_interfaced_devices = []
def prepare_devices(device_list, humidifiers_config):
for cfg in humidifiers_config:
print(f"Configuring humidifer: {cfg['topic']} from {cfg['ip']}")
d = miio.airhumidifier.AirHumidifierCA1(
cfg["ip"], cfg["token"], lazy_discover=True
)
id = InterfacedHumidifier(d, cfg)
device_list.append(id)
for mqtt in _mqtt_backends:
mqtt.subcribe_to_control(id.control_topic(), id.handle_control)
prepare_devices(_interfaced_devices, _config["humidifiers"])
def send_output(topic: str, data):
for b in _all_backends:
b.output(topic, data)
for b in _mqtt_backends:
b._client.loop()
for d in _interfaced_devices:
send_output(d.error_topic(), "Deamon startup")
communication_time_delta = datetime.timedelta(
minutes=_config["communication_error_timout_minutes"]
)
while True:
now = datetime.datetime.now(datetime.timezone.utc)
for d in _interfaced_devices:
if (
now - d._last_succesful_report > communication_time_delta
or now - d._last_succesful_control > communication_time_delta
):
send_output(
d.error_topic(),
f"Failed to communicate for {communication_time_delta.days}",
)
if d is InterfacedHumidifier and d.is_tank_empty():
send_output(d.error_topic(), "Water Tank is empty")
for d in _interfaced_devices:
report = d.get_report()
if report != None:
send_output(d.status_topic(), report)
for b in _mqtt_backends:
b._client.loop(1) | service.py |
import miio
import miio.exceptions
import miio.airhumidifier
import paho.mqtt.client
import urllib.parse
import sys
import yaml
import argparse
import os
import json
import copy
import typing
import datetime
_parser = argparse.ArgumentParser("Bridge python-miio xiaomi device to mqtt")
_parser.add_argument("--config", default=os.path.join(os.getcwd(), "config.yml"))
_args = _parser.parse_args()
_config = yaml.safe_load(open(_args.config, "rb").read())
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
if _config["simplify"]:
def simplify_dict(obj, prop: str):
if prop in obj:
del obj[prop]
else:
def simplify_dict(obj, prop: str):
pass
class StdoutBackend:
def output(self, topic: str, value: dict):
print(f"{topic}: {json.dumps(value)}")
class PahoMqttBackend:
def __init__(self, client: paho.mqtt.client.Client):
self._client = client
def output(self, topic: str, value: dict):
self._client.publish(topic, json.dumps(value), retain=True)
def subcribe_to_control(self, topic: str, command_handler):
self._client.message_callback_add(topic, command_handler)
self._client.subscribe(topic)
_all_backends = []
_mqtt_backends = [] # typing.List[PahoMqttBackend]
def prepare_backends(
all_backends, mqtt_backends: typing.List[PahoMqttBackend], backends_config
):
for b in backends_config:
if b == "stdout":
print("Adding stdout backend")
stdout_backend = StdoutBackend()
all_backends.append(stdout_backend)
else:
mqtt_url = urllib.parse.urlparse(b)
c = paho.mqtt.client.Client()
c.username_pw_set(mqtt_url.username, mqtt_url.password)
c.connect(mqtt_url.hostname)
mqtt_backend = PahoMqttBackend(c)
all_backends.append(mqtt_backend)
mqtt_backends.append(mqtt_backend)
prepare_backends(_all_backends, _mqtt_backends, _config["backends"])
class InterfacedDevice:
def __init__(self, miio_device, config):
self._miio_device = miio_device
self._config = config
self._last_succesful_report = datetime.datetime.now(datetime.timezone.utc)
self._last_succesful_control = datetime.datetime.now(datetime.timezone.utc)
def get_report(self):
raise NotImplementedError
def topic(self):
return os.path.join(_config["topic_prefix"], self._config["topic"])
def status_topic(self):
return os.path.join(self.topic(), "status")
def control_topic(self):
return os.path.join(self.topic(), "control")
def error_topic(self):
return os.path.join(self.topic(), "error")
def handle_control(self, client, userdata, message: paho.mqtt.client.MQTTMessage):
raise NotImplementedError
class InterfacedHumidifier(InterfacedDevice):
def __init__(self, *args, **kwargs):
super(InterfacedHumidifier, self).__init__(*args, **kwargs)
self._last_status = None
def get_report(self):
try:
status: miio.airhumidifier.AirHumidifierStatus = self._miio_device.status()
self._last_status = status
data = self.get_humidifier_report(status)
data["location"] = self._config["location"]
if "sublocation" in self._config:
data["sublocation"] = self._config["sublocation"]
self._last_succesful_report = datetime.datetime.now(datetime.timezone.utc)
return data
except (miio.exceptions.DeviceException, OSError) as e:
eprint(e)
return None
def get_humidifier_report(self, status):
data = copy.deepcopy(status.data)
data["temperature"] = status.temperature
simplify_dict(data, "hw_version")
simplify_dict(data, "temp_dec")
simplify_dict(data, "use_time")
simplify_dict(data, "buzzer")
simplify_dict(data, "child_lock")
simplify_dict(data, "led_b")
simplify_dict(data, "limit_hum")
simplify_dict(data, "speed")
simplify_dict(data, "dry")
return data
def apply_control(self, mdev: miio.airhumidifier.AirHumidifierCA1, control: dict):
target_speed = control.get("speed", 0.0)
print(f"{self.control_topic()}: setting speed = {target_speed} from {control}")
try:
self.set_active_control(target_speed, mdev)
self.set_passive_control(mdev)
self._last_succesful_control = datetime.datetime.now(datetime.timezone.utc)
except Exception as e:
eprint(f"{self.control_topic()}: failed to apply control: ", e)
def is_tank_empty(self):
if self._last_status is not None:
return self._last_status.depth < _config["minimal_water_depth"]
else:
return False
def set_active_control(self, target_speed, mdev):
if target_speed < 0.05:
if self._last_status.is_on:
mdev.off()
else:
if self.is_tank_empty():
if self._last_status.is_on:
mdev.off()
else:
if not self._last_status.is_on:
mdev.on()
if target_speed < 0.33:
if (
not self._last_status.mode
== miio.airhumidifier.OperationMode.Silent
):
mdev.set_mode(miio.airhumidifier.OperationMode.Silent)
elif target_speed < 0.66:
if (
not self._last_status.mode
== miio.airhumidifier.OperationMode.Medium
):
mdev.set_mode(miio.airhumidifier.OperationMode.Medium)
elif target_speed < 1.01:
if (
not self._last_status.mode
== miio.airhumidifier.OperationMode.High
):
mdev.set_mode(miio.airhumidifier.OperationMode.High)
else:
pass
def set_passive_control(self, mdev):
if self._last_status.child_lock != True:
mdev.set_child_lock(True)
if (
self._last_status.led_brightness is not None
and self._last_status.led_brightness != miio.airhumidifier.LedBrightness.Dim
):
mdev.set_led_brightness(miio.airhumidifier.LedBrightness.Dim)
if self._last_status.target_humidity != 80:
mdev.set_target_humidity(80)
if self._last_status.buzzer != False:
mdev.set_buzzer(False)
if self._last_status.dry != False:
mdev.set_dry(False)
def handle_control(self, client, userdata, message: paho.mqtt.client.MQTTMessage):
if self._last_status is None:
return
try:
self.apply_control(self._miio_device, json.loads(message.payload))
except miio.exceptions.DeviceError as e:
eprint(e)
_interfaced_devices = []
def prepare_devices(device_list, humidifiers_config):
for cfg in humidifiers_config:
print(f"Configuring humidifer: {cfg['topic']} from {cfg['ip']}")
d = miio.airhumidifier.AirHumidifierCA1(
cfg["ip"], cfg["token"], lazy_discover=True
)
id = InterfacedHumidifier(d, cfg)
device_list.append(id)
for mqtt in _mqtt_backends:
mqtt.subcribe_to_control(id.control_topic(), id.handle_control)
prepare_devices(_interfaced_devices, _config["humidifiers"])
def send_output(topic: str, data):
for b in _all_backends:
b.output(topic, data)
for b in _mqtt_backends:
b._client.loop()
for d in _interfaced_devices:
send_output(d.error_topic(), "Deamon startup")
communication_time_delta = datetime.timedelta(
minutes=_config["communication_error_timout_minutes"]
)
while True:
now = datetime.datetime.now(datetime.timezone.utc)
for d in _interfaced_devices:
if (
now - d._last_succesful_report > communication_time_delta
or now - d._last_succesful_control > communication_time_delta
):
send_output(
d.error_topic(),
f"Failed to communicate for {communication_time_delta.days}",
)
if d is InterfacedHumidifier and d.is_tank_empty():
send_output(d.error_topic(), "Water Tank is empty")
for d in _interfaced_devices:
report = d.get_report()
if report != None:
send_output(d.status_topic(), report)
for b in _mqtt_backends:
b._client.loop(1) | 0.317426 | 0.061509 |
import numpy as np
import matplotlib.pyplot as plt
import os
os.chdir("..\Codes_OMMADE")
from readData import *
from timeLoops import *
from classDataPoint import *
from classParameters import *
from time import time
colors = ['k','b','r','g','c','m','y','k','b','r','g','c','m','y']
# =================================================================
#
# WSADE DATA READING
#
# =================================================================
os.chdir("..\Validations")
# The WSADE software is used to generate reference data
filename = "Comparison_WSADE\WSADE_RefData.txt"
pfile = open(filename,'r')
WSADE_main = np.zeros((101,3))
WSADE_sec = np.zeros((101,3))
WSADE_t = np.zeros(101)
it = -1
for ligne in pfile:
it += 1
mots = ligne.split('\t')
WSADE_t[it] = float(mots[0])
for ix in range(3):
WSADE_main[it,ix] = float(mots[2*ix+1])
WSADE_sec[it,ix] = float(mots[2*ix+2])
pfile.close()
# =================================================================
#
# NUMERICAL SIMULATION AND RESULT SAVING
#
# =================================================================
simfile = "Comparison_WSADE\WSADE_simulation.txt"
datafile = "Comparison_WSADE\WSADE_parameters.txt"
boundfile = "Comparison_WSADE\WSADE_boundary.txt"
print("Initialisation...")
dx,nx,dt,tmax,c0,Xprt,Tprt,scheme = readGeneralData(simfile)
bound = readBound(boundfile)
dataset, C, points = initialise(datafile,dt,dx, c0, nx, scheme)
print("Start Computing...")
t0 = time()
dataobs = timeloop(points, C, dataset, nx, bound, dx, dt, tmax, Xprt, Tprt, scheme)
t1 = time()
print(t1-t0)
ne = len(dataset)-1
for ie in range(ne):
np.save("Comparison_WSADE\WSADE_Results_C"+str(ie),dataobs[ie])
# =================================================================
#
# PLOTS OF RESULTS
#
# =================================================================
locs = list(Xprt)
ic = -1
rmse = []
rmse2 = []
for ix in range(len(Xprt)):
x = Xprt[ix]
ic += 2
# Plot of WSADE results
plt.plot(WSADE_t,WSADE_main[:,ic//2],"o"+colors[ic])
plt.plot(WSADE_t,WSADE_sec[:,ic//2],"o"+colors[ic+1])
# Plot of simulation results
plt.plot(Tprt,dataobs[0][ix,:],colors[ic%len(colors)]+"--",label="OM-MADE "+str(x)+"-C0")
plt.plot(Tprt,dataobs[1][ix,:],colors[(ic+1)%len(colors)]+"--",label="OM-MADE "+str(x)+"-C1")
somme = 0
mean = 0
somme2 = 0
mean2 = 0
for i in range(len(WSADE_t)):
somme += (WSADE_main[i,ic//2] - dataobs[0][ix,2*i])**2
mean += WSADE_main[i,ic//2]
somme2 += (WSADE_sec[i,ic//2] - dataobs[1][ix,2*i])**2
mean2 += WSADE_sec[i,ic//2]
somme = somme**0.5 / mean
somme2 = somme2**0.5 / mean2
rmse.append(somme)
rmse2.append(somme2)
plt.legend(loc='best')
plt.xlabel("Time (days)")
plt.ylabel("Concentration (micro-g/l)")
plt.title("Local values (o WSADE -- OM-MADE)")
plt.show()
plt.plot(Xprt, rmse, "k*", label = "Main Channel")
plt.plot(Xprt, rmse2, "r*", label = "Second Channel")
plt.yscale('log')
plt.xlabel("Distance (m)")
plt.ylabel("NRMSE")
plt.show() | OMMADE/Validations/_Main_WSADECompare.py | import numpy as np
import matplotlib.pyplot as plt
import os
os.chdir("..\Codes_OMMADE")
from readData import *
from timeLoops import *
from classDataPoint import *
from classParameters import *
from time import time
colors = ['k','b','r','g','c','m','y','k','b','r','g','c','m','y']
# =================================================================
#
# WSADE DATA READING
#
# =================================================================
os.chdir("..\Validations")
# The WSADE software is used to generate reference data
filename = "Comparison_WSADE\WSADE_RefData.txt"
pfile = open(filename,'r')
WSADE_main = np.zeros((101,3))
WSADE_sec = np.zeros((101,3))
WSADE_t = np.zeros(101)
it = -1
for ligne in pfile:
it += 1
mots = ligne.split('\t')
WSADE_t[it] = float(mots[0])
for ix in range(3):
WSADE_main[it,ix] = float(mots[2*ix+1])
WSADE_sec[it,ix] = float(mots[2*ix+2])
pfile.close()
# =================================================================
#
# NUMERICAL SIMULATION AND RESULT SAVING
#
# =================================================================
simfile = "Comparison_WSADE\WSADE_simulation.txt"
datafile = "Comparison_WSADE\WSADE_parameters.txt"
boundfile = "Comparison_WSADE\WSADE_boundary.txt"
print("Initialisation...")
dx,nx,dt,tmax,c0,Xprt,Tprt,scheme = readGeneralData(simfile)
bound = readBound(boundfile)
dataset, C, points = initialise(datafile,dt,dx, c0, nx, scheme)
print("Start Computing...")
t0 = time()
dataobs = timeloop(points, C, dataset, nx, bound, dx, dt, tmax, Xprt, Tprt, scheme)
t1 = time()
print(t1-t0)
ne = len(dataset)-1
for ie in range(ne):
np.save("Comparison_WSADE\WSADE_Results_C"+str(ie),dataobs[ie])
# =================================================================
#
# PLOTS OF RESULTS
#
# =================================================================
locs = list(Xprt)
ic = -1
rmse = []
rmse2 = []
for ix in range(len(Xprt)):
x = Xprt[ix]
ic += 2
# Plot of WSADE results
plt.plot(WSADE_t,WSADE_main[:,ic//2],"o"+colors[ic])
plt.plot(WSADE_t,WSADE_sec[:,ic//2],"o"+colors[ic+1])
# Plot of simulation results
plt.plot(Tprt,dataobs[0][ix,:],colors[ic%len(colors)]+"--",label="OM-MADE "+str(x)+"-C0")
plt.plot(Tprt,dataobs[1][ix,:],colors[(ic+1)%len(colors)]+"--",label="OM-MADE "+str(x)+"-C1")
somme = 0
mean = 0
somme2 = 0
mean2 = 0
for i in range(len(WSADE_t)):
somme += (WSADE_main[i,ic//2] - dataobs[0][ix,2*i])**2
mean += WSADE_main[i,ic//2]
somme2 += (WSADE_sec[i,ic//2] - dataobs[1][ix,2*i])**2
mean2 += WSADE_sec[i,ic//2]
somme = somme**0.5 / mean
somme2 = somme2**0.5 / mean2
rmse.append(somme)
rmse2.append(somme2)
plt.legend(loc='best')
plt.xlabel("Time (days)")
plt.ylabel("Concentration (micro-g/l)")
plt.title("Local values (o WSADE -- OM-MADE)")
plt.show()
plt.plot(Xprt, rmse, "k*", label = "Main Channel")
plt.plot(Xprt, rmse2, "r*", label = "Second Channel")
plt.yscale('log')
plt.xlabel("Distance (m)")
plt.ylabel("NRMSE")
plt.show() | 0.30632 | 0.168241 |
from karen_brain import Skill, dayPart
import logging, time
class TellDateTimeSkill(Skill):
"""
Skill to give the date and time.
"""
def __init__(self):
"""
Tell Date and Time Skill Initialization
"""
self._name = "TellDateTimeSkill"
self.logger = logging.getLogger("SKILL")
self.logger.debug(self._name + "loaded successfully.")
def initialize(self):
"""
Load intent files for Tell Date Time Skill
Returns:
(bool): True on success else raises an exception
"""
self.register_intent_file("telltime.intent", self.handle_telltime_intent)
self.register_intent_file("telldate.intent", self.handle_telldate_intent)
return True
def handle_telltime_intent(self, message, context=None):
"""
Primary function for intent matches when a TIME intent is detected. Called by skill manager.
Args:
message (obj): text that triggered the intent
context (KContext): Context surrounding the request. (optional)
Returns:
(bool): True on success or False on failure
"""
if message.conf == 1.0:
dp = dayPart().lower()
if dp == "night":
dp = " P M"
else:
dp = " in the " + dp
text = "It is " + time.strftime("%l") + ":" + time.strftime("%M") + dp
return self.say(text, context=context)
return False
def handle_telldate_intent(self, message, context=None):
"""
Primary function for intent matches when a DATE intent is detected. Called by skill manager.
Args:
message (str): text that triggered the intent
context (KContext): Context surrounding the request. (optional)
Returns:
(bool): True on success or False on failure
"""
if message.conf == 1.0:
text = "It is " + time.strftime("%A, %B %d")
return self.say(text, context=context)
return True
def stop(self):
"""
Method to stop any daemons created during startup/initialization for this skill.
Returns:
(bool): True on success and False on failure
"""
return True
def create_skill():
"""
Method to create the instance of this skill for delivering to the skill manager
Returns:
(object): TellDateTimeSkill instantiated class object
"""
return TellDateTimeSkill() | src/karen_brain/skills/TellDateTimeSkill/__init__.py | from karen_brain import Skill, dayPart
import logging, time
class TellDateTimeSkill(Skill):
"""
Skill to give the date and time.
"""
def __init__(self):
"""
Tell Date and Time Skill Initialization
"""
self._name = "TellDateTimeSkill"
self.logger = logging.getLogger("SKILL")
self.logger.debug(self._name + "loaded successfully.")
def initialize(self):
"""
Load intent files for Tell Date Time Skill
Returns:
(bool): True on success else raises an exception
"""
self.register_intent_file("telltime.intent", self.handle_telltime_intent)
self.register_intent_file("telldate.intent", self.handle_telldate_intent)
return True
def handle_telltime_intent(self, message, context=None):
"""
Primary function for intent matches when a TIME intent is detected. Called by skill manager.
Args:
message (obj): text that triggered the intent
context (KContext): Context surrounding the request. (optional)
Returns:
(bool): True on success or False on failure
"""
if message.conf == 1.0:
dp = dayPart().lower()
if dp == "night":
dp = " P M"
else:
dp = " in the " + dp
text = "It is " + time.strftime("%l") + ":" + time.strftime("%M") + dp
return self.say(text, context=context)
return False
def handle_telldate_intent(self, message, context=None):
"""
Primary function for intent matches when a DATE intent is detected. Called by skill manager.
Args:
message (str): text that triggered the intent
context (KContext): Context surrounding the request. (optional)
Returns:
(bool): True on success or False on failure
"""
if message.conf == 1.0:
text = "It is " + time.strftime("%A, %B %d")
return self.say(text, context=context)
return True
def stop(self):
"""
Method to stop any daemons created during startup/initialization for this skill.
Returns:
(bool): True on success and False on failure
"""
return True
def create_skill():
"""
Method to create the instance of this skill for delivering to the skill manager
Returns:
(object): TellDateTimeSkill instantiated class object
"""
return TellDateTimeSkill() | 0.767167 | 0.157622 |
import unittest
from struct_parse import FieldList, FieldType, ByteOrder, BYTE_ORDER, FIELD_TYPE
class TestFieldListConstructor(unittest.TestCase):
def test_empty(self):
field_list = FieldList()
self.assertEqual(len(field_list.fields), 0)
self.assertEqual(field_list.fields, [])
def test_basic(self):
field_list = FieldList([FieldType.BOOL])
self.assertEqual(len(field_list.fields), 1)
self.assertTrue(FieldType.BOOL in field_list)
self.assertEqual(field_list.fields, [FieldType.BOOL])
class TestFieldListFromString(unittest.TestCase):
def test_only_endianness(self):
for c in BYTE_ORDER.keys():
field_list = FieldList.from_string(c)
self.assertEqual(field_list.byte_order, ByteOrder.NATIVE)
self.assertEqual(len(field_list), 0)
self.assertEqual(field_list.fields, [])
def test_single_element(self):
for c, typ in FIELD_TYPE.items():
field_list = FieldList.from_string(c)
self.assertEqual(len(field_list), 1)
self.assertEqual(field_list.fields, [typ])
def test_single_element_with_endianness(self):
for order_c, order in BYTE_ORDER.items():
for c, typ in FIELD_TYPE.items():
field_list = FieldList.from_string(order_c + c)
self.assertEqual(len(field_list), 1)
self.assertEqual(field_list.byte_order, order)
self.assertEqual(field_list.fields, [typ])
def test_multiple_elements(self):
field_list = FieldList.from_string('xcbB?hHiIlLqQnNefdspP')
self.assertEqual(field_list.byte_order, ByteOrder.NATIVE)
self.assertEqual(field_list.fields, [
FieldType.PAD,
FieldType.CHAR,
FieldType.SIGNED_CHAR,
FieldType.UNSIGNED_CHAR,
FieldType.BOOL,
FieldType.SHORT,
FieldType.UNSIGNED_SHORT,
FieldType.INT,
FieldType.UNSIGNED_INT,
FieldType.LONG,
FieldType.UNSIGNED_LONG,
FieldType.LONG_LONG,
FieldType.UNSIGNED_LONG_LONG,
FieldType.SSIZE_T,
FieldType.SIZE_T,
FieldType.HALF_PRECISION_FLOAT,
FieldType.FLOAT,
FieldType.DOUBLE,
FieldType.CHAR_ARRAY,
FieldType.CHAR_ARRAY,
FieldType.VOID_POINTER,
])
def test_multiple_elements_with_endianness(self):
for order_c, order in BYTE_ORDER.items():
field_list = FieldList.from_string(
order_c + 'xcbB?hHiIlLqQnNefdspP')
self.assertEqual(field_list.byte_order, order)
self.assertEqual(field_list.fields, [
FieldType.PAD,
FieldType.CHAR,
FieldType.SIGNED_CHAR,
FieldType.UNSIGNED_CHAR,
FieldType.BOOL,
FieldType.SHORT,
FieldType.UNSIGNED_SHORT,
FieldType.INT,
FieldType.UNSIGNED_INT,
FieldType.LONG,
FieldType.UNSIGNED_LONG,
FieldType.LONG_LONG,
FieldType.UNSIGNED_LONG_LONG,
FieldType.SSIZE_T,
FieldType.SIZE_T,
FieldType.HALF_PRECISION_FLOAT,
FieldType.FLOAT,
FieldType.DOUBLE,
FieldType.CHAR_ARRAY,
FieldType.CHAR_ARRAY,
FieldType.VOID_POINTER,
])
def test_two_byte_orders_throws(self):
with self.assertRaises(KeyError):
FieldList.from_string('@<')
def test_invalid_format_char_throws(self):
with self.assertRaises(KeyError):
FieldList.from_string('z')
class TestFieldListToString(unittest.TestCase):
def test_basic(self):
self.assertEqual(FieldList([]).to_string(), '')
def test_single_element(self):
for typ_s, typ in FIELD_TYPE.items():
if typ_s == 'p':
continue
self.assertEqual(FieldList([typ]).to_string(), '@' + typ_s)
def test_single_element_with_endianness(self):
for order_s, order in BYTE_ORDER.items():
if order_s == '=':
continue
for typ_s, typ in FIELD_TYPE.items():
if typ_s == 'p':
continue
self.assertEqual(FieldList([typ], order).to_string(),
order_s + typ_s)
def test_multiple_elements(self):
self.assertEqual(FieldList([
FieldType.PAD,
FieldType.CHAR,
FieldType.SIGNED_CHAR,
FieldType.UNSIGNED_CHAR,
FieldType.BOOL,
FieldType.SHORT,
FieldType.UNSIGNED_SHORT,
FieldType.INT,
FieldType.UNSIGNED_INT,
FieldType.LONG,
FieldType.UNSIGNED_LONG,
FieldType.LONG_LONG,
FieldType.UNSIGNED_LONG_LONG,
FieldType.SSIZE_T,
FieldType.SIZE_T,
FieldType.HALF_PRECISION_FLOAT,
FieldType.FLOAT,
FieldType.DOUBLE,
FieldType.CHAR_ARRAY,
FieldType.VOID_POINTER,
]).to_string(), '@xcbB?hHiIlLqQnNefdsP')
def test_multiple_elements_with_endianness(self):
for order_s, order in BYTE_ORDER.items():
if order_s == '=':
continue
self.assertEqual(FieldList([
FieldType.PAD,
FieldType.CHAR,
FieldType.SIGNED_CHAR,
FieldType.UNSIGNED_CHAR,
FieldType.BOOL,
FieldType.SHORT,
FieldType.UNSIGNED_SHORT,
FieldType.INT,
FieldType.UNSIGNED_INT,
FieldType.LONG,
FieldType.UNSIGNED_LONG,
FieldType.LONG_LONG,
FieldType.UNSIGNED_LONG_LONG,
FieldType.SSIZE_T,
FieldType.SIZE_T,
FieldType.HALF_PRECISION_FLOAT,
FieldType.FLOAT,
FieldType.DOUBLE,
FieldType.CHAR_ARRAY,
FieldType.VOID_POINTER,
], order).to_string(), order_s + 'xcbB?hHiIlLqQnNefdsP')
if __name__ == '__main__':
unittest.main() | test/test_struct_parse.py | import unittest
from struct_parse import FieldList, FieldType, ByteOrder, BYTE_ORDER, FIELD_TYPE
class TestFieldListConstructor(unittest.TestCase):
def test_empty(self):
field_list = FieldList()
self.assertEqual(len(field_list.fields), 0)
self.assertEqual(field_list.fields, [])
def test_basic(self):
field_list = FieldList([FieldType.BOOL])
self.assertEqual(len(field_list.fields), 1)
self.assertTrue(FieldType.BOOL in field_list)
self.assertEqual(field_list.fields, [FieldType.BOOL])
class TestFieldListFromString(unittest.TestCase):
def test_only_endianness(self):
for c in BYTE_ORDER.keys():
field_list = FieldList.from_string(c)
self.assertEqual(field_list.byte_order, ByteOrder.NATIVE)
self.assertEqual(len(field_list), 0)
self.assertEqual(field_list.fields, [])
def test_single_element(self):
for c, typ in FIELD_TYPE.items():
field_list = FieldList.from_string(c)
self.assertEqual(len(field_list), 1)
self.assertEqual(field_list.fields, [typ])
def test_single_element_with_endianness(self):
for order_c, order in BYTE_ORDER.items():
for c, typ in FIELD_TYPE.items():
field_list = FieldList.from_string(order_c + c)
self.assertEqual(len(field_list), 1)
self.assertEqual(field_list.byte_order, order)
self.assertEqual(field_list.fields, [typ])
def test_multiple_elements(self):
field_list = FieldList.from_string('xcbB?hHiIlLqQnNefdspP')
self.assertEqual(field_list.byte_order, ByteOrder.NATIVE)
self.assertEqual(field_list.fields, [
FieldType.PAD,
FieldType.CHAR,
FieldType.SIGNED_CHAR,
FieldType.UNSIGNED_CHAR,
FieldType.BOOL,
FieldType.SHORT,
FieldType.UNSIGNED_SHORT,
FieldType.INT,
FieldType.UNSIGNED_INT,
FieldType.LONG,
FieldType.UNSIGNED_LONG,
FieldType.LONG_LONG,
FieldType.UNSIGNED_LONG_LONG,
FieldType.SSIZE_T,
FieldType.SIZE_T,
FieldType.HALF_PRECISION_FLOAT,
FieldType.FLOAT,
FieldType.DOUBLE,
FieldType.CHAR_ARRAY,
FieldType.CHAR_ARRAY,
FieldType.VOID_POINTER,
])
def test_multiple_elements_with_endianness(self):
for order_c, order in BYTE_ORDER.items():
field_list = FieldList.from_string(
order_c + 'xcbB?hHiIlLqQnNefdspP')
self.assertEqual(field_list.byte_order, order)
self.assertEqual(field_list.fields, [
FieldType.PAD,
FieldType.CHAR,
FieldType.SIGNED_CHAR,
FieldType.UNSIGNED_CHAR,
FieldType.BOOL,
FieldType.SHORT,
FieldType.UNSIGNED_SHORT,
FieldType.INT,
FieldType.UNSIGNED_INT,
FieldType.LONG,
FieldType.UNSIGNED_LONG,
FieldType.LONG_LONG,
FieldType.UNSIGNED_LONG_LONG,
FieldType.SSIZE_T,
FieldType.SIZE_T,
FieldType.HALF_PRECISION_FLOAT,
FieldType.FLOAT,
FieldType.DOUBLE,
FieldType.CHAR_ARRAY,
FieldType.CHAR_ARRAY,
FieldType.VOID_POINTER,
])
def test_two_byte_orders_throws(self):
with self.assertRaises(KeyError):
FieldList.from_string('@<')
def test_invalid_format_char_throws(self):
with self.assertRaises(KeyError):
FieldList.from_string('z')
class TestFieldListToString(unittest.TestCase):
def test_basic(self):
self.assertEqual(FieldList([]).to_string(), '')
def test_single_element(self):
for typ_s, typ in FIELD_TYPE.items():
if typ_s == 'p':
continue
self.assertEqual(FieldList([typ]).to_string(), '@' + typ_s)
def test_single_element_with_endianness(self):
for order_s, order in BYTE_ORDER.items():
if order_s == '=':
continue
for typ_s, typ in FIELD_TYPE.items():
if typ_s == 'p':
continue
self.assertEqual(FieldList([typ], order).to_string(),
order_s + typ_s)
def test_multiple_elements(self):
self.assertEqual(FieldList([
FieldType.PAD,
FieldType.CHAR,
FieldType.SIGNED_CHAR,
FieldType.UNSIGNED_CHAR,
FieldType.BOOL,
FieldType.SHORT,
FieldType.UNSIGNED_SHORT,
FieldType.INT,
FieldType.UNSIGNED_INT,
FieldType.LONG,
FieldType.UNSIGNED_LONG,
FieldType.LONG_LONG,
FieldType.UNSIGNED_LONG_LONG,
FieldType.SSIZE_T,
FieldType.SIZE_T,
FieldType.HALF_PRECISION_FLOAT,
FieldType.FLOAT,
FieldType.DOUBLE,
FieldType.CHAR_ARRAY,
FieldType.VOID_POINTER,
]).to_string(), '@xcbB?hHiIlLqQnNefdsP')
def test_multiple_elements_with_endianness(self):
for order_s, order in BYTE_ORDER.items():
if order_s == '=':
continue
self.assertEqual(FieldList([
FieldType.PAD,
FieldType.CHAR,
FieldType.SIGNED_CHAR,
FieldType.UNSIGNED_CHAR,
FieldType.BOOL,
FieldType.SHORT,
FieldType.UNSIGNED_SHORT,
FieldType.INT,
FieldType.UNSIGNED_INT,
FieldType.LONG,
FieldType.UNSIGNED_LONG,
FieldType.LONG_LONG,
FieldType.UNSIGNED_LONG_LONG,
FieldType.SSIZE_T,
FieldType.SIZE_T,
FieldType.HALF_PRECISION_FLOAT,
FieldType.FLOAT,
FieldType.DOUBLE,
FieldType.CHAR_ARRAY,
FieldType.VOID_POINTER,
], order).to_string(), order_s + 'xcbB?hHiIlLqQnNefdsP')
if __name__ == '__main__':
unittest.main() | 0.553264 | 0.499634 |
import json
from typing import MutableMapping, Union, Optional, Type, Any
from urlobject import URLObject
from spoffy.exceptions import (
SpotifyException,
SpotifyUnauthorized,
SpotifyPremiumRequired,
)
class Request:
"""
:param method: The request method (`"GET"/"POST"/"PUT"/"DELETE"`)
:param url: The request URL (absolute or relative to client base URL)
:param body: Optional request body, can be passed either as raw bytes
or in dict format (in which case it will be json encoded)
:param headers: Additional request headers
:param access_token: Will be added to Authorization header
"""
def __init__(
self,
method: str,
url: str,
params: Optional[MutableMapping] = None,
body: Optional[Union[bytes, MutableMapping[str, Any]]] = None,
headers: Optional[MutableMapping[str, str]] = None,
access_token: Optional[str] = None,
):
self.method = method
self.url = str(URLObject(url).add_query_params(**(params or {})))
self.headers = dict(headers or {})
self.body = body
if body is not None and isinstance(body, MutableMapping):
charset = "utf-8"
self.body = json.dumps(body).encode(charset)
self.headers[
"Content-Type"
] = f"application/json; charset={charset}"
self.headers["Content-Length"] = str(len(self.body))
elif body is not None and isinstance(body, bytes):
self.body = body
self.headers["Content-Length"] = str(len(self.body))
if access_token:
self.headers["Authorization"] = "Bearer " + access_token
def __repr__(self):
return "<{}(method={}, url={}, body={}, headers={})>".format(
self.__class__.__name__,
repr(self.method),
repr(self.url),
repr(self.body),
repr(self.headers),
)
def __str__(self):
return "Request: {} {}".format(self.method, self.url)
class Response:
def __init__(
self,
request: Request,
status_code: int,
headers: MutableMapping,
content: Optional[bytes] = None,
):
self.request = request
self.status_code = status_code
self.headers = headers
self.content = content
def raise_for_status(self):
"""
Raise a :class:`~spotify.exceptions.SpotifyException`
if response status code is an error code.
"""
if self.status_code < 400:
return
kwargs = dict(
status_code=self.status_code,
headers=self.headers,
request_method=self.request.method,
request_url=self.request.url,
)
try:
error_info = self.json
if isinstance(error_info["error"], dict):
error_info = error_info["error"]
if "error" in error_info and "message" not in error_info:
error_info["message"] = error_info["error"]
except Exception:
error_info = {"status": self.status_code, "message": self.text}
reason = error_info.get("reason")
exc_class: Type[SpotifyException]
if kwargs["status_code"] == 401:
exc_class = SpotifyUnauthorized
elif kwargs["status_code"] == 403 and reason == "PREMIUM_REQUIRED":
exc_class = SpotifyPremiumRequired
else:
exc_class = SpotifyException
kwargs["reason"] = reason
kwargs["error_description"] = error_info.get("error_description")
raise exc_class(error_info["message"], **kwargs) # type: ignore
@property
def json(self) -> Optional[dict]:
if not self.content:
return None
return json.loads(self.content)
@property
def text(self) -> Optional[str]:
if self.content is None:
return None
return self.content.decode() | spoffy/sansio.py | import json
from typing import MutableMapping, Union, Optional, Type, Any
from urlobject import URLObject
from spoffy.exceptions import (
SpotifyException,
SpotifyUnauthorized,
SpotifyPremiumRequired,
)
class Request:
"""
:param method: The request method (`"GET"/"POST"/"PUT"/"DELETE"`)
:param url: The request URL (absolute or relative to client base URL)
:param body: Optional request body, can be passed either as raw bytes
or in dict format (in which case it will be json encoded)
:param headers: Additional request headers
:param access_token: Will be added to Authorization header
"""
def __init__(
self,
method: str,
url: str,
params: Optional[MutableMapping] = None,
body: Optional[Union[bytes, MutableMapping[str, Any]]] = None,
headers: Optional[MutableMapping[str, str]] = None,
access_token: Optional[str] = None,
):
self.method = method
self.url = str(URLObject(url).add_query_params(**(params or {})))
self.headers = dict(headers or {})
self.body = body
if body is not None and isinstance(body, MutableMapping):
charset = "utf-8"
self.body = json.dumps(body).encode(charset)
self.headers[
"Content-Type"
] = f"application/json; charset={charset}"
self.headers["Content-Length"] = str(len(self.body))
elif body is not None and isinstance(body, bytes):
self.body = body
self.headers["Content-Length"] = str(len(self.body))
if access_token:
self.headers["Authorization"] = "Bearer " + access_token
def __repr__(self):
return "<{}(method={}, url={}, body={}, headers={})>".format(
self.__class__.__name__,
repr(self.method),
repr(self.url),
repr(self.body),
repr(self.headers),
)
def __str__(self):
return "Request: {} {}".format(self.method, self.url)
class Response:
def __init__(
self,
request: Request,
status_code: int,
headers: MutableMapping,
content: Optional[bytes] = None,
):
self.request = request
self.status_code = status_code
self.headers = headers
self.content = content
def raise_for_status(self):
"""
Raise a :class:`~spotify.exceptions.SpotifyException`
if response status code is an error code.
"""
if self.status_code < 400:
return
kwargs = dict(
status_code=self.status_code,
headers=self.headers,
request_method=self.request.method,
request_url=self.request.url,
)
try:
error_info = self.json
if isinstance(error_info["error"], dict):
error_info = error_info["error"]
if "error" in error_info and "message" not in error_info:
error_info["message"] = error_info["error"]
except Exception:
error_info = {"status": self.status_code, "message": self.text}
reason = error_info.get("reason")
exc_class: Type[SpotifyException]
if kwargs["status_code"] == 401:
exc_class = SpotifyUnauthorized
elif kwargs["status_code"] == 403 and reason == "PREMIUM_REQUIRED":
exc_class = SpotifyPremiumRequired
else:
exc_class = SpotifyException
kwargs["reason"] = reason
kwargs["error_description"] = error_info.get("error_description")
raise exc_class(error_info["message"], **kwargs) # type: ignore
@property
def json(self) -> Optional[dict]:
if not self.content:
return None
return json.loads(self.content)
@property
def text(self) -> Optional[str]:
if self.content is None:
return None
return self.content.decode() | 0.796134 | 0.121243 |
import datetime
import inspect
import os
import sys
import unittest
import numpy
import cf
class FieldTest(unittest.TestCase):
def setUp(self):
self.filename2 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'test_file2.nc')
self.filename4 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'test_file4.nc')
self.chunk_sizes = (17, 50, 100, 300, 3000, 300000)[::-1]
self.original_chunksize = cf.CHUNKSIZE()
self.test_only = []
# self.test_only = ['nought']
# self.test_only = ['test_COLLAPSE_CLIMATOLOGICAL_TIME']
# self.test_only = ['test_COLLAPSE']
# self.test_only = ['test_COLLAPSE_GROUP_OPTIONS']
def test_COLLAPSE_CLIMATOLOGICAL_TIME(self):
if self.test_only and inspect.stack()[0][3] not in self.test_only:
return
verbose = False
f = cf.read(self.filename4)[0]
g = f.collapse('T: mean within years time: minimum over years', within_years=cf.seasons(), _debug=False)
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f.collapse('T: max within years time: minimum over years', within_years=cf.seasons())
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f.collapse('T: mean within years time: minimum over years', within_years=cf.M(), _debug=0)
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
g = f.collapse('T: max within years time: minimum over years', within_years=cf.M())
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
g = f[:12].collapse('T: mean within years time: minimum over years', within_years=cf.seasons())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f[:12].collapse('T: max within years time: minimum over years', within_years=cf.seasons())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f[:12].collapse('T: mean within years time: minimum over years', within_years=cf.M())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
g = f[:12].collapse('T: max within years time: minimum over years', within_years=cf.M())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
for key in f.cell_methods:
f.del_construct(key)
g = f.collapse('T: max within years time: minimum over years', within_years=cf.seasons())
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f.collapse('T: max within years time: min over years', within_years=cf.M())
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
g = f[:12].collapse('T: max within years time: minimum over years', within_years=cf.seasons())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f[:12].collapse('T: max within years time: minimum over years', within_years=cf.M())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
g = f.collapse('T: max within years time: minimum over years',
within_years=cf.seasons(), over_years=cf.Y(5))
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (8, 4, 5))
g = f[::-1, ...].collapse('T: max within years time: minimum over years',
within_years=cf.seasons(), over_years=cf.Y(5))
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (8, 4, 5))
#--- End: def
def test_COLLAPSE(self):
if self.test_only and inspect.stack()[0][3] not in self.test_only:
return
verbose = False
f = cf.read(self.filename2)[0]
g = f.collapse('mean')
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 1, 1), g.shape)
g = f.collapse('mean', axes=['T', 'X'])
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 4, 1))
g = f.collapse('mean', axes=[0, 2])
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 4, 1))
g = f.collapse('mean', axes=[0, 1])
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 1, 5))
g = f.collapse('mean', axes='domainaxis1')
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1800, 1, 5))
g = f.collapse('mean', axes=['domainaxis1'])
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1800, 1, 5))
g = f.collapse('mean', axes=[1])
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1800, 1, 5))
g = f.collapse('mean', axes=1)
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1800, 1, 5))
g = f.collapse('T: mean')
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 4, 5))
g = f.collapse('T: mean X: maximum')
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 4, 1))
g = f.collapse('T: mean within years time: minimum over years',
within_years=cf.M(), _debug=0)
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
for m in range(1, 13):
a = numpy.empty((5, 4, 5))
for i, year in enumerate(f.subspace(T=cf.month(m)).coord('T').year.unique()):
q = cf.month(m) & cf.year(year)
x = f.subspace(T=cf.month(m) & cf.year(year))
x.data.mean(axes=0, inplace=True)
a[i] = x.array
a = a.min(axis=0)
self.assertTrue(numpy.allclose(a, g.array[m % 12]))
#--- End: for
g = f.collapse('T: mean', group=360)
for group in (cf.M(12),
cf.M(12, month=12),
cf.M(12, day=16),
cf.M(12, month=11, day=27)):
g = f.collapse('T: mean', group=group)
bound = g.coord('T').bounds.datetime_array[0, 1]
self.assertTrue(bound.month == group.offset.month,
"{}!={}, group={}".format(bound.month, group.offset.month, group))
self.assertTrue(bound.day == group.offset.day,
"{}!={}, group={}".format(bound.day, group.offset.day, group))
#--- End: for
# for group in (cf.D(30),
# cf.D(30, month=12),
# cf.D(30, day=16),
# cf.D(30, month=11, day=27)):
# g = f.collapse('T: mean', group=group)
# bound = g.coord('T').bounds.datetime_array[0, 1]
# self.assertTrue(bound.day == group.offset.day,
# "{}!={}, bound={}, group={}".format(bound.day, group.offset.day, bound, group))
#--- End: def
def test_COLLAPSE_weights(self):
if self.test_only and inspect.stack()[0][3] not in self.test_only:
return
verbose = False
f = cf.read(self.filename4)[0]
if verbose:
print(f)
g = f.collapse('area: mean')
g = f.collapse('area: mean', weights='area')
if verbose:
print(g)
#--- End: def
def test_COLLAPSE_groups(self):
if self.test_only and inspect.stack()[0][3] not in self.test_only:
return
verbose = False
f = cf.read(self.filename4)[0]
g = f.collapse('T: mean', group=cf.M(12), group_span=cf.Y(), _debug=0)
if verbose:
print(f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (9, 4, 5))
g = f.collapse('T: mean', group=cf.M(12, month=12) , group_span=cf.Y())
if verbose:
print(f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (10, 4, 5))
g = f.collapse('T: mean', group=cf.M(12, day=16) , group_span=cf.Y())
if verbose:
print(f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (9, 4, 5))
g = f.collapse('T: mean', group=cf.M(12, month=11, day=27), group_span=cf.Y())
if verbose:
print(f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (10, 4, 5))
g = f.collapse('T: mean', group=cf.M(12, month=6, day=27), group_span=cf.Y(), _debug=0)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (9, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=12), group_span=cf.M(5), group_contiguous=1)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month= 3), group_span=cf.M(5), group_contiguous=1)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=2), group_span=cf.M(5), group_contiguous=1)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=12), group_span=cf.M(5), group_contiguous=2)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=3), _debug=0)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5)) # TODO - look into month offset when M< 12
g = f.collapse('T: mean', group=cf.M(5, month=3), group_span=cf.M(5), group_contiguous=2)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=12), group_contiguous=1)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month= 3), group_contiguous=1)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=12), group_contiguous=2)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month= 3), group_contiguous=2)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean within years time: minimum over years', within_years=cf.M(3), group_span=True)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f.collapse('T: mean within years time: minimum over years', within_years=cf.seasons(), group_span=cf.M(3))
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
# g = f[::2].collapse('T: mean', group=cf.M(5, month=12),
# group_span=cf.M(5),group_contiguous=1)
# print (g)
# g = f.collapse('T: mean', group=cf.M(5, month= 3),
# group_contiguous=1)
# g = f.collapse('T: mean', group=cf.M(5, month=12),
# group_contiguous=2)
# g = f.collapse('T: mean', group=cf.M(5, month= 3),
# group_contiguous=2)
#--- End: def
#--- End: class
if __name__ == '__main__':
print('Run date:', datetime.datetime.now())
cf.environment()
print()
unittest.main(verbosity=2) | cf/test/test_collapse.py | import datetime
import inspect
import os
import sys
import unittest
import numpy
import cf
class FieldTest(unittest.TestCase):
def setUp(self):
self.filename2 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'test_file2.nc')
self.filename4 = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'test_file4.nc')
self.chunk_sizes = (17, 50, 100, 300, 3000, 300000)[::-1]
self.original_chunksize = cf.CHUNKSIZE()
self.test_only = []
# self.test_only = ['nought']
# self.test_only = ['test_COLLAPSE_CLIMATOLOGICAL_TIME']
# self.test_only = ['test_COLLAPSE']
# self.test_only = ['test_COLLAPSE_GROUP_OPTIONS']
def test_COLLAPSE_CLIMATOLOGICAL_TIME(self):
if self.test_only and inspect.stack()[0][3] not in self.test_only:
return
verbose = False
f = cf.read(self.filename4)[0]
g = f.collapse('T: mean within years time: minimum over years', within_years=cf.seasons(), _debug=False)
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f.collapse('T: max within years time: minimum over years', within_years=cf.seasons())
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f.collapse('T: mean within years time: minimum over years', within_years=cf.M(), _debug=0)
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
g = f.collapse('T: max within years time: minimum over years', within_years=cf.M())
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
g = f[:12].collapse('T: mean within years time: minimum over years', within_years=cf.seasons())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f[:12].collapse('T: max within years time: minimum over years', within_years=cf.seasons())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f[:12].collapse('T: mean within years time: minimum over years', within_years=cf.M())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
g = f[:12].collapse('T: max within years time: minimum over years', within_years=cf.M())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
for key in f.cell_methods:
f.del_construct(key)
g = f.collapse('T: max within years time: minimum over years', within_years=cf.seasons())
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f.collapse('T: max within years time: min over years', within_years=cf.M())
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
g = f[:12].collapse('T: max within years time: minimum over years', within_years=cf.seasons())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f[:12].collapse('T: max within years time: minimum over years', within_years=cf.M())
if verbose:
print('\n',f[:12])
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
g = f.collapse('T: max within years time: minimum over years',
within_years=cf.seasons(), over_years=cf.Y(5))
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (8, 4, 5))
g = f[::-1, ...].collapse('T: max within years time: minimum over years',
within_years=cf.seasons(), over_years=cf.Y(5))
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (8, 4, 5))
#--- End: def
def test_COLLAPSE(self):
if self.test_only and inspect.stack()[0][3] not in self.test_only:
return
verbose = False
f = cf.read(self.filename2)[0]
g = f.collapse('mean')
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 1, 1), g.shape)
g = f.collapse('mean', axes=['T', 'X'])
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 4, 1))
g = f.collapse('mean', axes=[0, 2])
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 4, 1))
g = f.collapse('mean', axes=[0, 1])
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 1, 5))
g = f.collapse('mean', axes='domainaxis1')
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1800, 1, 5))
g = f.collapse('mean', axes=['domainaxis1'])
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1800, 1, 5))
g = f.collapse('mean', axes=[1])
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1800, 1, 5))
g = f.collapse('mean', axes=1)
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1800, 1, 5))
g = f.collapse('T: mean')
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 4, 5))
g = f.collapse('T: mean X: maximum')
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (1, 4, 1))
g = f.collapse('T: mean within years time: minimum over years',
within_years=cf.M(), _debug=0)
if verbose:
print('\n',f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (12, 4, 5))
for m in range(1, 13):
a = numpy.empty((5, 4, 5))
for i, year in enumerate(f.subspace(T=cf.month(m)).coord('T').year.unique()):
q = cf.month(m) & cf.year(year)
x = f.subspace(T=cf.month(m) & cf.year(year))
x.data.mean(axes=0, inplace=True)
a[i] = x.array
a = a.min(axis=0)
self.assertTrue(numpy.allclose(a, g.array[m % 12]))
#--- End: for
g = f.collapse('T: mean', group=360)
for group in (cf.M(12),
cf.M(12, month=12),
cf.M(12, day=16),
cf.M(12, month=11, day=27)):
g = f.collapse('T: mean', group=group)
bound = g.coord('T').bounds.datetime_array[0, 1]
self.assertTrue(bound.month == group.offset.month,
"{}!={}, group={}".format(bound.month, group.offset.month, group))
self.assertTrue(bound.day == group.offset.day,
"{}!={}, group={}".format(bound.day, group.offset.day, group))
#--- End: for
# for group in (cf.D(30),
# cf.D(30, month=12),
# cf.D(30, day=16),
# cf.D(30, month=11, day=27)):
# g = f.collapse('T: mean', group=group)
# bound = g.coord('T').bounds.datetime_array[0, 1]
# self.assertTrue(bound.day == group.offset.day,
# "{}!={}, bound={}, group={}".format(bound.day, group.offset.day, bound, group))
#--- End: def
def test_COLLAPSE_weights(self):
if self.test_only and inspect.stack()[0][3] not in self.test_only:
return
verbose = False
f = cf.read(self.filename4)[0]
if verbose:
print(f)
g = f.collapse('area: mean')
g = f.collapse('area: mean', weights='area')
if verbose:
print(g)
#--- End: def
def test_COLLAPSE_groups(self):
if self.test_only and inspect.stack()[0][3] not in self.test_only:
return
verbose = False
f = cf.read(self.filename4)[0]
g = f.collapse('T: mean', group=cf.M(12), group_span=cf.Y(), _debug=0)
if verbose:
print(f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (9, 4, 5))
g = f.collapse('T: mean', group=cf.M(12, month=12) , group_span=cf.Y())
if verbose:
print(f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (10, 4, 5))
g = f.collapse('T: mean', group=cf.M(12, day=16) , group_span=cf.Y())
if verbose:
print(f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (9, 4, 5))
g = f.collapse('T: mean', group=cf.M(12, month=11, day=27), group_span=cf.Y())
if verbose:
print(f)
print(g)
print(g.constructs)
self.assertTrue(g.shape == (10, 4, 5))
g = f.collapse('T: mean', group=cf.M(12, month=6, day=27), group_span=cf.Y(), _debug=0)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (9, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=12), group_span=cf.M(5), group_contiguous=1)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month= 3), group_span=cf.M(5), group_contiguous=1)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=2), group_span=cf.M(5), group_contiguous=1)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=12), group_span=cf.M(5), group_contiguous=2)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=3), _debug=0)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5)) # TODO - look into month offset when M< 12
g = f.collapse('T: mean', group=cf.M(5, month=3), group_span=cf.M(5), group_contiguous=2)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=12), group_contiguous=1)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month= 3), group_contiguous=1)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month=12), group_contiguous=2)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean', group=cf.M(5, month= 3), group_contiguous=2)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (24, 4, 5))
g = f.collapse('T: mean within years time: minimum over years', within_years=cf.M(3), group_span=True)
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
g = f.collapse('T: mean within years time: minimum over years', within_years=cf.seasons(), group_span=cf.M(3))
if verbose:
print(f)
print(g)
print(g.dimension_coordinates('T').value().bounds.data.datetime_array)
print(g.constructs)
self.assertTrue(g.shape == (4, 4, 5))
# g = f[::2].collapse('T: mean', group=cf.M(5, month=12),
# group_span=cf.M(5),group_contiguous=1)
# print (g)
# g = f.collapse('T: mean', group=cf.M(5, month= 3),
# group_contiguous=1)
# g = f.collapse('T: mean', group=cf.M(5, month=12),
# group_contiguous=2)
# g = f.collapse('T: mean', group=cf.M(5, month= 3),
# group_contiguous=2)
#--- End: def
#--- End: class
if __name__ == '__main__':
print('Run date:', datetime.datetime.now())
cf.environment()
print()
unittest.main(verbosity=2) | 0.318379 | 0.370709 |
import datetime
import io
import logging
import tempfile
import time
from stat import S_IFDIR, S_IFREG
import diskcache
from fuse import LoggingMixIn, Operations
from .core import _convert_time
class WholeTaleMetadataFS(LoggingMixIn, Operations):
"""
Class for handling Tale's metadata.
:param girder_cli: Authenticated instance of GirderClient
:type girder_cli: girder_client.GriderClient
"""
def __init__(self, tale_id, girder_cli):
super(WholeTaleMetadataFS, self).__init__()
self.girder_cli = girder_cli
self.tale_id = tale_id
self.fd = 0
self.cachedir = tempfile.mkdtemp(prefix="wtmeta")
self.cache = diskcache.Cache(self.cachedir)
def read(self, path, size, offset, fh):
logging.warning(
"path = {}\nsize = {}\noffset = {}\nfh = {}\n".format(
path, size, offset, fh
)
)
if path == "/metadata.json":
fp = self._get_manifest()
fp.seek(offset)
return fp.read(size)
raise NotImplementedError
def readdir(self, path, fh):
dirents = [".", "..", "metadata.json", "image_build.log"]
return dirents
def getinfo(self, path):
"""Pyfilesystem essential method."""
logging.debug("-> getinfo({})".format(path))
if not path.startswith("/"):
path = "/" + path
return self.getattr(path)
def getattr(self, path, fh=None):
logging.warning("-> getattr({})".format(path))
now = _convert_time(str(datetime.datetime.now()))
stat = dict(st_ctime=now, st_atime=now, st_mtime=now)
if path == "/":
stat.update(dict(st_mode=(S_IFDIR | 0o755), st_nlink=2))
elif path == "/metadata.json":
manifest = self._get_manifest()
stat.update(
dict(
st_mode=(S_IFREG | 0o644),
st_nlink=1,
st_blocks=1,
st_size=manifest.seek(0, io.SEEK_END),
st_atime=time.time(),
)
)
else:
stat.update(
dict(
st_mode=(S_IFREG | 0o644),
st_nlink=1,
st_blocks=1,
st_size=1024 ** 2,
st_atime=time.time(),
)
)
return stat
def _get_manifest(self):
fp = self.cache.get("manifest", read=True)
if not fp:
resp = self.girder_cli.sendRestRequest(
"get",
"tale/{}/manifest".format(self.tale_id),
stream=False,
jsonResp=False,
)
fp = io.BytesIO(resp.content)
self.cache.set("manifest", fp, read=True, expire=15)
return fp
def open(self, path, mode="r", **kwargs):
logging.debug("-> open({}, {})".format(path, self.fd))
self.fd += 1
return self.fd
def release(self, path, fh): # pylint: disable=unused-argument
logging.debug("-> release({}, {})".format(path, self.fd))
self.fd -= 1
return self.fd | girderfs/metadata.py | import datetime
import io
import logging
import tempfile
import time
from stat import S_IFDIR, S_IFREG
import diskcache
from fuse import LoggingMixIn, Operations
from .core import _convert_time
class WholeTaleMetadataFS(LoggingMixIn, Operations):
"""
Class for handling Tale's metadata.
:param girder_cli: Authenticated instance of GirderClient
:type girder_cli: girder_client.GriderClient
"""
def __init__(self, tale_id, girder_cli):
super(WholeTaleMetadataFS, self).__init__()
self.girder_cli = girder_cli
self.tale_id = tale_id
self.fd = 0
self.cachedir = tempfile.mkdtemp(prefix="wtmeta")
self.cache = diskcache.Cache(self.cachedir)
def read(self, path, size, offset, fh):
logging.warning(
"path = {}\nsize = {}\noffset = {}\nfh = {}\n".format(
path, size, offset, fh
)
)
if path == "/metadata.json":
fp = self._get_manifest()
fp.seek(offset)
return fp.read(size)
raise NotImplementedError
def readdir(self, path, fh):
dirents = [".", "..", "metadata.json", "image_build.log"]
return dirents
def getinfo(self, path):
"""Pyfilesystem essential method."""
logging.debug("-> getinfo({})".format(path))
if not path.startswith("/"):
path = "/" + path
return self.getattr(path)
def getattr(self, path, fh=None):
logging.warning("-> getattr({})".format(path))
now = _convert_time(str(datetime.datetime.now()))
stat = dict(st_ctime=now, st_atime=now, st_mtime=now)
if path == "/":
stat.update(dict(st_mode=(S_IFDIR | 0o755), st_nlink=2))
elif path == "/metadata.json":
manifest = self._get_manifest()
stat.update(
dict(
st_mode=(S_IFREG | 0o644),
st_nlink=1,
st_blocks=1,
st_size=manifest.seek(0, io.SEEK_END),
st_atime=time.time(),
)
)
else:
stat.update(
dict(
st_mode=(S_IFREG | 0o644),
st_nlink=1,
st_blocks=1,
st_size=1024 ** 2,
st_atime=time.time(),
)
)
return stat
def _get_manifest(self):
fp = self.cache.get("manifest", read=True)
if not fp:
resp = self.girder_cli.sendRestRequest(
"get",
"tale/{}/manifest".format(self.tale_id),
stream=False,
jsonResp=False,
)
fp = io.BytesIO(resp.content)
self.cache.set("manifest", fp, read=True, expire=15)
return fp
def open(self, path, mode="r", **kwargs):
logging.debug("-> open({}, {})".format(path, self.fd))
self.fd += 1
return self.fd
def release(self, path, fh): # pylint: disable=unused-argument
logging.debug("-> release({}, {})".format(path, self.fd))
self.fd -= 1
return self.fd | 0.417984 | 0.090053 |
from wifi import Cell, Scheme
import socket
from lib import jsonfile, network
global port
def testInternetConnection():
s = createConnection()
if s is not None:
s.send('testInternetConnection')
if s.recv(1024) == '1':
print 'Server connection successful.'
return True
else:
return False
else:
print 'Cannot connect to server.'
def createConnection():
defaultGateway = network.getDefaultGateway('wlan0')
socket.setdefaulttimeout(5)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print 'start connection'
if s.connect_ex((defaultGateway, port)) == 0:
return s
else:
return None
def createScheme(interface, cell, ssidName, passkey):
scheme = Scheme.for_cell(interface, ssidName, cell, passkey)
scheme.save()
return scheme
def connectNode():
jsonfile.open_file('/home/pi/TrailSafe/config/config.ini')
info = jsonfile.read()
print info
deviceSSID = info['device-SSID']
passkey = info['passkey']
interface = info['client-interface']
targetSSIDPrefix = info['target-SSIDPrefix']
port = info['port']
cellList = Cell.all(interface)
targetSSID = []
internetSSID = []
for cell in cellList:
if targetSSIDPrefix in cell.ssid:
targetSSID.append(cell)
print 'target amount %d' % len(targetSSID)
for x in range (0, len(targetSSID)):
print targetSSID[x].ssid
scheme = Scheme.find(interface, targetSSID[x].ssid)
if scheme is None:
print 'create scheme'
scheme = createScheme(interface, targetSSID[x], targetSSID[x].ssid, passkey)
scheme.activate()
if testInternetConnection() == True:
internetSSID.append(targetSSID[x])
print internetSSID
highSignal = internetSSID[0]
print 'find maximum high signal'
for ssid in internetSSID:
if ssid.signal > highSignal.signal:
highSignal = ssid
print 'connect'
scheme = Scheme.find(interface, highSignal.ssid)
scheme.activate()
jsonfile.update({'node-defaultGateway': network.getDefaultGateway(interface)}) | Device/files/connect.py | from wifi import Cell, Scheme
import socket
from lib import jsonfile, network
global port
def testInternetConnection():
s = createConnection()
if s is not None:
s.send('testInternetConnection')
if s.recv(1024) == '1':
print 'Server connection successful.'
return True
else:
return False
else:
print 'Cannot connect to server.'
def createConnection():
defaultGateway = network.getDefaultGateway('wlan0')
socket.setdefaulttimeout(5)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print 'start connection'
if s.connect_ex((defaultGateway, port)) == 0:
return s
else:
return None
def createScheme(interface, cell, ssidName, passkey):
scheme = Scheme.for_cell(interface, ssidName, cell, passkey)
scheme.save()
return scheme
def connectNode():
jsonfile.open_file('/home/pi/TrailSafe/config/config.ini')
info = jsonfile.read()
print info
deviceSSID = info['device-SSID']
passkey = info['passkey']
interface = info['client-interface']
targetSSIDPrefix = info['target-SSIDPrefix']
port = info['port']
cellList = Cell.all(interface)
targetSSID = []
internetSSID = []
for cell in cellList:
if targetSSIDPrefix in cell.ssid:
targetSSID.append(cell)
print 'target amount %d' % len(targetSSID)
for x in range (0, len(targetSSID)):
print targetSSID[x].ssid
scheme = Scheme.find(interface, targetSSID[x].ssid)
if scheme is None:
print 'create scheme'
scheme = createScheme(interface, targetSSID[x], targetSSID[x].ssid, passkey)
scheme.activate()
if testInternetConnection() == True:
internetSSID.append(targetSSID[x])
print internetSSID
highSignal = internetSSID[0]
print 'find maximum high signal'
for ssid in internetSSID:
if ssid.signal > highSignal.signal:
highSignal = ssid
print 'connect'
scheme = Scheme.find(interface, highSignal.ssid)
scheme.activate()
jsonfile.update({'node-defaultGateway': network.getDefaultGateway(interface)}) | 0.236516 | 0.085175 |
import os
import sys
project_folder = os.path.realpath('..')
sys.path.append(project_folder)
import tensorflow as tf
import numpy as np
from PIL import Image
from app.pipeline import generate_data_skeleton, data_pipe, multithreading
from app.settings import IMAGE_PATH, BATCH_SIZE, IMAGE_SHAPE
train_file_array, train_label_array, valid_file_array, valid_label_array =\
generate_data_skeleton(root_dir=os.path.join(project_folder,
IMAGE_PATH,
'train'),
valid_size=.15,
ext=('.png', '.csv'))
train_image_batch, train_label_batch = data_pipe(
train_file_array,
train_label_array,
num_epochs=None,
shape=IMAGE_SHAPE,
batch_size=BATCH_SIZE,
shuffle=True)
valid_image_batch, valid_label_batch = data_pipe(
valid_file_array,
valid_label_array,
num_epochs=None,
shape=IMAGE_SHAPE,
batch_size=BATCH_SIZE,
shuffle=True)
test_file_array, _ = generate_data_skeleton(
root_dir=os.path.join(project_folder,
IMAGE_PATH + 'test'),
valid_size=None,
ext=('.png', '.csv'))
# !!! no shuffling and only 1 epoch of test set.
test_image_batch, _ = data_pipe(
test_file_array,
_,
num_epochs=1,
shape=IMAGE_SHAPE,
batch_size=BATCH_SIZE,
shuffle=False)
sess = tf.Session()
init_op = tf.group(tf.local_variables_initializer(),
tf.global_variables_initializer())
sess.run(init_op)
@multithreading
def test_shuffle_queue():
whole_train_images = list()
for _ in range(3):
image_batch = valid_image_batch.eval()
whole_train_images.append(image_batch)
return [piece for blk in whole_train_images for piece in blk]
@multithreading
def test_unshuffle_queue():
whole_test_images = list()
while True:
try:
test_image = sess.run(test_image_batch)
whole_test_images.append(test_image)
print(test_image.shape)
except tf.errors.OutOfRangeError as e:
break
return [piece for blk in whole_test_images for piece in blk]
with sess:
total = test_shuffle_queue()
n = int(input('choose a image to test'))
print(total[n])
# Image.fromarray(np.array(total[n], dtype=np.uint8)).show() | tests/test_pipeline.py | import os
import sys
project_folder = os.path.realpath('..')
sys.path.append(project_folder)
import tensorflow as tf
import numpy as np
from PIL import Image
from app.pipeline import generate_data_skeleton, data_pipe, multithreading
from app.settings import IMAGE_PATH, BATCH_SIZE, IMAGE_SHAPE
train_file_array, train_label_array, valid_file_array, valid_label_array =\
generate_data_skeleton(root_dir=os.path.join(project_folder,
IMAGE_PATH,
'train'),
valid_size=.15,
ext=('.png', '.csv'))
train_image_batch, train_label_batch = data_pipe(
train_file_array,
train_label_array,
num_epochs=None,
shape=IMAGE_SHAPE,
batch_size=BATCH_SIZE,
shuffle=True)
valid_image_batch, valid_label_batch = data_pipe(
valid_file_array,
valid_label_array,
num_epochs=None,
shape=IMAGE_SHAPE,
batch_size=BATCH_SIZE,
shuffle=True)
test_file_array, _ = generate_data_skeleton(
root_dir=os.path.join(project_folder,
IMAGE_PATH + 'test'),
valid_size=None,
ext=('.png', '.csv'))
# !!! no shuffling and only 1 epoch of test set.
test_image_batch, _ = data_pipe(
test_file_array,
_,
num_epochs=1,
shape=IMAGE_SHAPE,
batch_size=BATCH_SIZE,
shuffle=False)
sess = tf.Session()
init_op = tf.group(tf.local_variables_initializer(),
tf.global_variables_initializer())
sess.run(init_op)
@multithreading
def test_shuffle_queue():
whole_train_images = list()
for _ in range(3):
image_batch = valid_image_batch.eval()
whole_train_images.append(image_batch)
return [piece for blk in whole_train_images for piece in blk]
@multithreading
def test_unshuffle_queue():
whole_test_images = list()
while True:
try:
test_image = sess.run(test_image_batch)
whole_test_images.append(test_image)
print(test_image.shape)
except tf.errors.OutOfRangeError as e:
break
return [piece for blk in whole_test_images for piece in blk]
with sess:
total = test_shuffle_queue()
n = int(input('choose a image to test'))
print(total[n])
# Image.fromarray(np.array(total[n], dtype=np.uint8)).show() | 0.212232 | 0.166947 |
import os
import pathlib
from typing import List
import boto3
import botocore
def s3_bucket_exists(name: str) -> bool:
s3 = boto3.client("s3")
try:
s3.head_bucket(Bucket=name)
except botocore.exceptions.ClientError as e:
print(e)
return False
return True
def s3_get_object_names_from_dir(
bucket_name: str, dir_name: str, file_type: str = None
) -> List[str]:
s3 = boto3.resource("s3")
bucket = s3.Bucket(bucket_name) # pylint: disable=no-member
object_names = [
object_summary.key for object_summary in bucket.objects.filter(Prefix=dir_name)
]
if file_type is not None:
object_names = [
object_name
for object_name in object_names
if object_name.lower().endswith(file_type.lower())
]
return object_names
def s3_download_files(
bucket_name: str,
s3_object_paths: List[str],
destination_dir: str,
notify_if_exists: bool = False,
) -> None:
s3_client = boto3.client("s3")
s3_resource = boto3.resource("s3")
object_summary_list = [
s3_resource.ObjectSummary( # pylint: disable=no-member
bucket_name, s3_object_path
)
for s3_object_path in s3_object_paths
]
if not os.path.isdir(destination_dir):
pathlib.Path(destination_dir).mkdir(parents=True, exist_ok=True)
for object_index, object_summary in enumerate(object_summary_list):
destination_file_path = os.path.join(
destination_dir, os.path.basename(object_summary.key)
)
if not os.path.isfile(destination_file_path):
try:
s3_client.download_file( # pylint: disable=no-member
object_summary.bucket_name,
object_summary.key,
destination_file_path,
)
except botocore.exceptions.ClientError as e:
print(e)
print(
"Downloading file from %s:%s, %i/%i"
% (
object_summary.bucket_name,
object_summary.key,
object_index + 1,
len(object_summary_list),
)
)
else:
if notify_if_exists:
print(
"File already downloaded: %s:%s, %i/%i"
% (
object_summary.bucket_name,
object_summary.key,
object_index + 1,
len(object_summary_list),
)
)
def file_exists(bucket_name: str, s3_object_path: str) -> None:
s3 = boto3.resource("s3")
try:
s3.Object(bucket_name, s3_object_path).load() # pylint: disable=no-member
except botocore.exceptions.ClientError as e:
if e.response["Error"]["Code"] == "404":
return False
else:
raise
else:
return True
def upload_files(
bucket_name,
files_to_send: List[str],
s3_destination_object_dir: str,
notify_if_exists: bool = False,
) -> None:
s3 = boto3.client("s3")
for file_index, file_to_send in enumerate(files_to_send):
s3_destination_object_path = os.path.join(
s3_destination_object_dir, os.path.basename(file_to_send)
)
try:
if file_exists(bucket_name, s3_destination_object_path):
if notify_if_exists:
print(
"S3 object already exists %s:%s, %i/%i"
% (
bucket_name,
s3_destination_object_dir,
file_index + 1,
len(files_to_send),
)
)
continue
s3.upload_file(file_to_send, bucket_name, s3_destination_object_path)
except botocore.exceptions.ClientError as e:
print(e)
continue
print(
"Uploading file to %s:%s, %i/%i"
% (
bucket_name,
s3_destination_object_path,
file_index + 1,
len(files_to_send),
)
) | ft/s3_util.py | import os
import pathlib
from typing import List
import boto3
import botocore
def s3_bucket_exists(name: str) -> bool:
s3 = boto3.client("s3")
try:
s3.head_bucket(Bucket=name)
except botocore.exceptions.ClientError as e:
print(e)
return False
return True
def s3_get_object_names_from_dir(
bucket_name: str, dir_name: str, file_type: str = None
) -> List[str]:
s3 = boto3.resource("s3")
bucket = s3.Bucket(bucket_name) # pylint: disable=no-member
object_names = [
object_summary.key for object_summary in bucket.objects.filter(Prefix=dir_name)
]
if file_type is not None:
object_names = [
object_name
for object_name in object_names
if object_name.lower().endswith(file_type.lower())
]
return object_names
def s3_download_files(
bucket_name: str,
s3_object_paths: List[str],
destination_dir: str,
notify_if_exists: bool = False,
) -> None:
s3_client = boto3.client("s3")
s3_resource = boto3.resource("s3")
object_summary_list = [
s3_resource.ObjectSummary( # pylint: disable=no-member
bucket_name, s3_object_path
)
for s3_object_path in s3_object_paths
]
if not os.path.isdir(destination_dir):
pathlib.Path(destination_dir).mkdir(parents=True, exist_ok=True)
for object_index, object_summary in enumerate(object_summary_list):
destination_file_path = os.path.join(
destination_dir, os.path.basename(object_summary.key)
)
if not os.path.isfile(destination_file_path):
try:
s3_client.download_file( # pylint: disable=no-member
object_summary.bucket_name,
object_summary.key,
destination_file_path,
)
except botocore.exceptions.ClientError as e:
print(e)
print(
"Downloading file from %s:%s, %i/%i"
% (
object_summary.bucket_name,
object_summary.key,
object_index + 1,
len(object_summary_list),
)
)
else:
if notify_if_exists:
print(
"File already downloaded: %s:%s, %i/%i"
% (
object_summary.bucket_name,
object_summary.key,
object_index + 1,
len(object_summary_list),
)
)
def file_exists(bucket_name: str, s3_object_path: str) -> None:
s3 = boto3.resource("s3")
try:
s3.Object(bucket_name, s3_object_path).load() # pylint: disable=no-member
except botocore.exceptions.ClientError as e:
if e.response["Error"]["Code"] == "404":
return False
else:
raise
else:
return True
def upload_files(
bucket_name,
files_to_send: List[str],
s3_destination_object_dir: str,
notify_if_exists: bool = False,
) -> None:
s3 = boto3.client("s3")
for file_index, file_to_send in enumerate(files_to_send):
s3_destination_object_path = os.path.join(
s3_destination_object_dir, os.path.basename(file_to_send)
)
try:
if file_exists(bucket_name, s3_destination_object_path):
if notify_if_exists:
print(
"S3 object already exists %s:%s, %i/%i"
% (
bucket_name,
s3_destination_object_dir,
file_index + 1,
len(files_to_send),
)
)
continue
s3.upload_file(file_to_send, bucket_name, s3_destination_object_path)
except botocore.exceptions.ClientError as e:
print(e)
continue
print(
"Uploading file to %s:%s, %i/%i"
% (
bucket_name,
s3_destination_object_path,
file_index + 1,
len(files_to_send),
)
) | 0.254972 | 0.086439 |
from structure.abstract_note import AbstractNote
from timemodel.duration import Duration
class Note(AbstractNote):
"""
Class representation for a musical note.
"""
STANDARD_NOTES = {'W': Duration(1),
'H': Duration(1, 2),
'Q': Duration(1, 4),
'E': Duration(1, 8),
'S': Duration(1, 16),
'T': Duration(1, 32),
'X': Duration(1, 64),
}
def __init__(self, diatonic_pitch, base_duration, num_dots=0):
"""
Constructor.
Args
diatontic_pitch: ref. class DiatonicPitch.
base_duration: either a Duration, or key in STANDARD_NOTES (upper or lower case).
num_dots: number of duration extension dots.
"""
AbstractNote.__init__(self)
self.__diatonic_pitch = diatonic_pitch
self.__num_dots = num_dots
if type(base_duration) == Duration:
self.__base_duration = base_duration
elif isinstance(base_duration, str):
if base_duration.upper() in Note.STANDARD_NOTES.keys():
self.__base_duration = Note.STANDARD_NOTES[base_duration.upper()]
else:
raise Exception('Base duration can only be a Duration or string in key set [w, h, q, e, s, t. x]')
self.__duration = self.base_duration.apply_dots(num_dots)
self.__tied_to = None
self.__tied_from = None
@property
def diatonic_pitch(self):
return self.__diatonic_pitch
@diatonic_pitch.setter
def diatonic_pitch(self, new_pitch):
self.__diatonic_pitch = new_pitch
@property
def duration(self):
return self.__duration
@property
def base_duration(self):
return self.__base_duration
@property
def num_dots(self):
return self.__num_dots
@property
def is_tied_to(self):
return self.__tied_to is not None
@property
def is_tied_from(self):
return self.__tied_from is not None
@property
def tied_to(self):
return self.__tied_to
@property
def tied_from(self):
return self.__tied_from
@property
def is_rest(self):
return self.diatonic_pitch is None
def get_all_notes(self):
return [self]
def tie(self):
"""
Tie this note to the next note.
"""
original_parent = self.get_original_parent()
if original_parent is None:
raise Exception('Cannot tie note that has no parent')
note = self.next_note()
if note is None:
raise Exception('No next note to tie to.')
# notes must have the same pitch
if note.diatonic_pitch != self.diatonic_pitch:
raise Exception(
'Tied notes require to have same pitch {0} != {1}'.format(self.diatonic_pitch, note.diatonic_pitch))
self.__tied_to = note
note.__tied_from = self
def untie(self):
if not self.is_tied_to:
return
self.__tied_to.__tied_from = None
self.__tied_to = None
def next_note(self):
"""
Determine the successor Note within the context of the note structure parentage.
Returns:
The successor Note, or None if there is none, e.g. this is the last note.
"""
child = self
p = child.parent
while True:
if p is None:
break
next_str = p.get_next_child(child)
if next_str is not None:
if isinstance(next_str, Note):
return next_str
else:
return next_str.get_first_note()
else:
child = p
p = p.parent
# At this point, we are the last note in the structure - there is no next
return None
def prior_note(self):
"""
Determine the Note prior to this one within the context of the note structure parentage.
Returns:
The prior Note, or None is there is none, e.g. this is the first note.
"""
child = self
p = child.parent
while True:
if p is None:
break
next_str = p.get_prior_child(child)
if next_str is not None:
if isinstance(next_str, Note):
return next_str
else:
return next_str.get_last_note()
else:
child = p
p = p.parent
# At this point, we are the last note in the structure - there is no next
return None
def apply_factor(self, factor):
self.__base_duration *= factor
self.__duration *= factor
self.relative_position *= factor
self.contextual_reduction_factor *= factor
def reverse(self):
return self
def __str__(self):
dot_string = str(self.base_duration) + self.num_dots * '@'
return '[{0}<{1}>-({2}){3}] off={4} f={5}'.format(
self.diatonic_pitch if self.diatonic_pitch is not None else 'R', dot_string, self.duration,
'T' if self.is_tied_to else '', self.relative_position, self.contextual_reduction_factor) | structure/note.py | from structure.abstract_note import AbstractNote
from timemodel.duration import Duration
class Note(AbstractNote):
"""
Class representation for a musical note.
"""
STANDARD_NOTES = {'W': Duration(1),
'H': Duration(1, 2),
'Q': Duration(1, 4),
'E': Duration(1, 8),
'S': Duration(1, 16),
'T': Duration(1, 32),
'X': Duration(1, 64),
}
def __init__(self, diatonic_pitch, base_duration, num_dots=0):
"""
Constructor.
Args
diatontic_pitch: ref. class DiatonicPitch.
base_duration: either a Duration, or key in STANDARD_NOTES (upper or lower case).
num_dots: number of duration extension dots.
"""
AbstractNote.__init__(self)
self.__diatonic_pitch = diatonic_pitch
self.__num_dots = num_dots
if type(base_duration) == Duration:
self.__base_duration = base_duration
elif isinstance(base_duration, str):
if base_duration.upper() in Note.STANDARD_NOTES.keys():
self.__base_duration = Note.STANDARD_NOTES[base_duration.upper()]
else:
raise Exception('Base duration can only be a Duration or string in key set [w, h, q, e, s, t. x]')
self.__duration = self.base_duration.apply_dots(num_dots)
self.__tied_to = None
self.__tied_from = None
@property
def diatonic_pitch(self):
return self.__diatonic_pitch
@diatonic_pitch.setter
def diatonic_pitch(self, new_pitch):
self.__diatonic_pitch = new_pitch
@property
def duration(self):
return self.__duration
@property
def base_duration(self):
return self.__base_duration
@property
def num_dots(self):
return self.__num_dots
@property
def is_tied_to(self):
return self.__tied_to is not None
@property
def is_tied_from(self):
return self.__tied_from is not None
@property
def tied_to(self):
return self.__tied_to
@property
def tied_from(self):
return self.__tied_from
@property
def is_rest(self):
return self.diatonic_pitch is None
def get_all_notes(self):
return [self]
def tie(self):
"""
Tie this note to the next note.
"""
original_parent = self.get_original_parent()
if original_parent is None:
raise Exception('Cannot tie note that has no parent')
note = self.next_note()
if note is None:
raise Exception('No next note to tie to.')
# notes must have the same pitch
if note.diatonic_pitch != self.diatonic_pitch:
raise Exception(
'Tied notes require to have same pitch {0} != {1}'.format(self.diatonic_pitch, note.diatonic_pitch))
self.__tied_to = note
note.__tied_from = self
def untie(self):
if not self.is_tied_to:
return
self.__tied_to.__tied_from = None
self.__tied_to = None
def next_note(self):
"""
Determine the successor Note within the context of the note structure parentage.
Returns:
The successor Note, or None if there is none, e.g. this is the last note.
"""
child = self
p = child.parent
while True:
if p is None:
break
next_str = p.get_next_child(child)
if next_str is not None:
if isinstance(next_str, Note):
return next_str
else:
return next_str.get_first_note()
else:
child = p
p = p.parent
# At this point, we are the last note in the structure - there is no next
return None
def prior_note(self):
"""
Determine the Note prior to this one within the context of the note structure parentage.
Returns:
The prior Note, or None is there is none, e.g. this is the first note.
"""
child = self
p = child.parent
while True:
if p is None:
break
next_str = p.get_prior_child(child)
if next_str is not None:
if isinstance(next_str, Note):
return next_str
else:
return next_str.get_last_note()
else:
child = p
p = p.parent
# At this point, we are the last note in the structure - there is no next
return None
def apply_factor(self, factor):
self.__base_duration *= factor
self.__duration *= factor
self.relative_position *= factor
self.contextual_reduction_factor *= factor
def reverse(self):
return self
def __str__(self):
dot_string = str(self.base_duration) + self.num_dots * '@'
return '[{0}<{1}>-({2}){3}] off={4} f={5}'.format(
self.diatonic_pitch if self.diatonic_pitch is not None else 'R', dot_string, self.duration,
'T' if self.is_tied_to else '', self.relative_position, self.contextual_reduction_factor) | 0.84672 | 0.280256 |
from codecs import open
from os import path
from setuptools import setup, Extension
from setuptools.command.sdist import sdist as _sdist
import sys
try:
from Cython.Distutils import build_ext
except ImportError:
USE_CYTHON = False
else:
USE_CYTHON = True
HERE = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(HERE, 'README.rst'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
# http://stackoverflow.com/a/4515279/1062499
cmdclass = { }
# http://stackoverflow.com/a/18418524/1062499
class sdist(_sdist):
def run(self):
# Make sure the compiled Cython files in the distribution are up-to-date
from Cython.Build import cythonize
cythonize(['cython/mycythonmodule.pyx'])
_sdist.run(self)
cmdclass['sdist'] = sdist
if USE_CYTHON:
cmdclass.update({ 'build_ext': build_ext })
ext_modules=[
Extension("srilm",
sources=["srilm.pyx" if USE_CYTHON else "srilm.cpp",
'srilm/include/Array.cc',
'srilm/include/CachedMem.cc',
'srilm/include/IntervalHeap.cc',
'srilm/include/LHash.cc',
'srilm/include/Map.cc',
'srilm/include/Map2.cc',
#'srilm/include/NgramStats.cc',
'srilm/include/SArray.cc',
'srilm/include/Trellis.cc',
'srilm/include/Trie.cc',
'srilm/misc/src/Debug.cc',
'srilm/misc/src/File.cc',
'srilm/misc/src/MStringTokUtil.cc',
'srilm/misc/src/fake-rand48.c',
'srilm/misc/src/fcheck.c',
'srilm/misc/src/option.c',
#'srilm/misc/src/tclmain.cc',
#'srilm/misc/src/testFile.cc',
'srilm/misc/src/tls.cc',
'srilm/misc/src/tserror.cc',
#'srilm/misc/src/version.c',
'srilm/misc/src/zio.c',
#'srilm/misc/src/ztest.c',
'srilm/lm/src/matherr.c',
'srilm/lm/src/Prob.cc',
'srilm/lm/src/Counts.cc',
'srilm/lm/src/XCount.cc',
'srilm/lm/src/Vocab.cc',
'srilm/lm/src/VocabMap.cc',
'srilm/lm/src/VocabMultiMap.cc',
'srilm/lm/src/VocabDistance.cc',
'srilm/lm/src/SubVocab.cc',
'srilm/lm/src/MultiwordVocab.cc',
'srilm/lm/src/TextStats.cc',
'srilm/lm/src/LM.cc',
'srilm/lm/src/LMClient.cc',
'srilm/lm/src/LMStats.cc',
'srilm/lm/src/RefList.cc',
'srilm/lm/src/Bleu.cc',
'srilm/lm/src/NBest.cc',
'srilm/lm/src/NBestSet.cc',
'srilm/lm/src/NgramLM.cc',
'srilm/lm/src/NgramStatsInt.cc',
'srilm/lm/src/NgramStatsShort.cc',
'srilm/lm/src/NgramStatsLong.cc',
'srilm/lm/src/NgramStatsLongLong.cc',
'srilm/lm/src/NgramStatsFloat.cc',
'srilm/lm/src/NgramStatsDouble.cc',
'srilm/lm/src/NgramStatsXCount.cc',
'srilm/lm/src/NgramProbArrayTrie.cc',
'srilm/lm/src/NgramCountLM.cc',
'srilm/lm/src/MSWebNgramLM.cc',
'srilm/lm/src/Discount.cc',
'srilm/lm/src/ClassNgram.cc',
'srilm/lm/src/SimpleClassNgram.cc',
'srilm/lm/src/DFNgram.cc',
'srilm/lm/src/SkipNgram.cc',
'srilm/lm/src/HiddenNgram.cc',
'srilm/lm/src/HiddenSNgram.cc',
'srilm/lm/src/VarNgram.cc',
'srilm/lm/src/DecipherNgram.cc',
'srilm/lm/src/TaggedVocab.cc',
'srilm/lm/src/TaggedNgram.cc',
'srilm/lm/src/TaggedNgramStats.cc',
'srilm/lm/src/StopNgram.cc',
'srilm/lm/src/StopNgramStats.cc',
'srilm/lm/src/MultiwordLM.cc',
'srilm/lm/src/NonzeroLM.cc',
'srilm/lm/src/BayesMix.cc',
'srilm/lm/src/LoglinearMix.cc',
'srilm/lm/src/AdaptiveMix.cc',
'srilm/lm/src/AdaptiveMarginals.cc',
'srilm/lm/src/CacheLM.cc',
'srilm/lm/src/DynamicLM.cc',
'srilm/lm/src/HMMofNgrams.cc',
'srilm/lm/src/WordAlign.cc',
'srilm/lm/src/WordLattice.cc',
'srilm/lm/src/WordMesh.cc',
'srilm/lm/src/simpleTrigram.cc',
'srilm/lm/src/LMThreads.cc',
'srilm/lm/src/MEModel.cc',
'srilm/lm/src/hmaxent.cc',
'srilm/dstruct/src/Array.cc',
'srilm/dstruct/src/BlockMalloc.cc',
'srilm/dstruct/src/CachedMem.cc',
'srilm/dstruct/src/DStructThreads.cc',
'srilm/dstruct/src/IntervalHeap.cc',
'srilm/dstruct/src/LHash.cc',
'srilm/dstruct/src/LHashTrie.cc',
#'srilm/dstruct/src/Map.cc',
'srilm/dstruct/src/Map2.cc',
'srilm/dstruct/src/MemStats.cc',
'srilm/dstruct/src/SArray.cc',
'srilm/dstruct/src/SArrayTrie.cc',
'srilm/dstruct/src/Trie.cc',
#'srilm/dstruct/src/benchHash.cc',
'srilm/dstruct/src/maxalloc.c',
'srilm/dstruct/src/qsort.c',
#'srilm/dstruct/src/testArray.cc',
#'srilm/dstruct/src/testBlockMalloc.cc',
#'srilm/dstruct/src/testCachedMem.cc',
#'srilm/dstruct/src/testFloatMap.cc',
#'srilm/dstruct/src/testHash.cc',
#'srilm/dstruct/src/testMap.cc',
#'srilm/dstruct/src/testMap2.cc',
#'srilm/dstruct/src/testSizes.cc',
#'srilm/dstruct/src/testTrie.cc',
],
libraries=['z', 'iconv'],
include_dirs=['srilm/lm/src', 'srilm/include'],
language="c++",
define_macros=[
('HAVE_ZOPEN', 1),
('INSTANTIATE_TEMPLATES', 1),
#('NO_BLOCK_MALLOC', 1),
]
)
]
setup(
name='pysrilm',
version='0.0.1',
description='Python Interface to SRILM',
long_description=LONG_DESCRIPTION,
# The project's main homepage.
url='https://github.com/wroberts/pysrilm',
# Author details
author='<NAME>',
author_email='<EMAIL>',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Text Processing',
'Natural Language :: English',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
# What does your project relate to?
keywords='ngram statistics language model',
ext_modules = ext_modules,
cmdclass = cmdclass,
install_requires=[],
) | setup.py | from codecs import open
from os import path
from setuptools import setup, Extension
from setuptools.command.sdist import sdist as _sdist
import sys
try:
from Cython.Distutils import build_ext
except ImportError:
USE_CYTHON = False
else:
USE_CYTHON = True
HERE = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(HERE, 'README.rst'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
# http://stackoverflow.com/a/4515279/1062499
cmdclass = { }
# http://stackoverflow.com/a/18418524/1062499
class sdist(_sdist):
def run(self):
# Make sure the compiled Cython files in the distribution are up-to-date
from Cython.Build import cythonize
cythonize(['cython/mycythonmodule.pyx'])
_sdist.run(self)
cmdclass['sdist'] = sdist
if USE_CYTHON:
cmdclass.update({ 'build_ext': build_ext })
ext_modules=[
Extension("srilm",
sources=["srilm.pyx" if USE_CYTHON else "srilm.cpp",
'srilm/include/Array.cc',
'srilm/include/CachedMem.cc',
'srilm/include/IntervalHeap.cc',
'srilm/include/LHash.cc',
'srilm/include/Map.cc',
'srilm/include/Map2.cc',
#'srilm/include/NgramStats.cc',
'srilm/include/SArray.cc',
'srilm/include/Trellis.cc',
'srilm/include/Trie.cc',
'srilm/misc/src/Debug.cc',
'srilm/misc/src/File.cc',
'srilm/misc/src/MStringTokUtil.cc',
'srilm/misc/src/fake-rand48.c',
'srilm/misc/src/fcheck.c',
'srilm/misc/src/option.c',
#'srilm/misc/src/tclmain.cc',
#'srilm/misc/src/testFile.cc',
'srilm/misc/src/tls.cc',
'srilm/misc/src/tserror.cc',
#'srilm/misc/src/version.c',
'srilm/misc/src/zio.c',
#'srilm/misc/src/ztest.c',
'srilm/lm/src/matherr.c',
'srilm/lm/src/Prob.cc',
'srilm/lm/src/Counts.cc',
'srilm/lm/src/XCount.cc',
'srilm/lm/src/Vocab.cc',
'srilm/lm/src/VocabMap.cc',
'srilm/lm/src/VocabMultiMap.cc',
'srilm/lm/src/VocabDistance.cc',
'srilm/lm/src/SubVocab.cc',
'srilm/lm/src/MultiwordVocab.cc',
'srilm/lm/src/TextStats.cc',
'srilm/lm/src/LM.cc',
'srilm/lm/src/LMClient.cc',
'srilm/lm/src/LMStats.cc',
'srilm/lm/src/RefList.cc',
'srilm/lm/src/Bleu.cc',
'srilm/lm/src/NBest.cc',
'srilm/lm/src/NBestSet.cc',
'srilm/lm/src/NgramLM.cc',
'srilm/lm/src/NgramStatsInt.cc',
'srilm/lm/src/NgramStatsShort.cc',
'srilm/lm/src/NgramStatsLong.cc',
'srilm/lm/src/NgramStatsLongLong.cc',
'srilm/lm/src/NgramStatsFloat.cc',
'srilm/lm/src/NgramStatsDouble.cc',
'srilm/lm/src/NgramStatsXCount.cc',
'srilm/lm/src/NgramProbArrayTrie.cc',
'srilm/lm/src/NgramCountLM.cc',
'srilm/lm/src/MSWebNgramLM.cc',
'srilm/lm/src/Discount.cc',
'srilm/lm/src/ClassNgram.cc',
'srilm/lm/src/SimpleClassNgram.cc',
'srilm/lm/src/DFNgram.cc',
'srilm/lm/src/SkipNgram.cc',
'srilm/lm/src/HiddenNgram.cc',
'srilm/lm/src/HiddenSNgram.cc',
'srilm/lm/src/VarNgram.cc',
'srilm/lm/src/DecipherNgram.cc',
'srilm/lm/src/TaggedVocab.cc',
'srilm/lm/src/TaggedNgram.cc',
'srilm/lm/src/TaggedNgramStats.cc',
'srilm/lm/src/StopNgram.cc',
'srilm/lm/src/StopNgramStats.cc',
'srilm/lm/src/MultiwordLM.cc',
'srilm/lm/src/NonzeroLM.cc',
'srilm/lm/src/BayesMix.cc',
'srilm/lm/src/LoglinearMix.cc',
'srilm/lm/src/AdaptiveMix.cc',
'srilm/lm/src/AdaptiveMarginals.cc',
'srilm/lm/src/CacheLM.cc',
'srilm/lm/src/DynamicLM.cc',
'srilm/lm/src/HMMofNgrams.cc',
'srilm/lm/src/WordAlign.cc',
'srilm/lm/src/WordLattice.cc',
'srilm/lm/src/WordMesh.cc',
'srilm/lm/src/simpleTrigram.cc',
'srilm/lm/src/LMThreads.cc',
'srilm/lm/src/MEModel.cc',
'srilm/lm/src/hmaxent.cc',
'srilm/dstruct/src/Array.cc',
'srilm/dstruct/src/BlockMalloc.cc',
'srilm/dstruct/src/CachedMem.cc',
'srilm/dstruct/src/DStructThreads.cc',
'srilm/dstruct/src/IntervalHeap.cc',
'srilm/dstruct/src/LHash.cc',
'srilm/dstruct/src/LHashTrie.cc',
#'srilm/dstruct/src/Map.cc',
'srilm/dstruct/src/Map2.cc',
'srilm/dstruct/src/MemStats.cc',
'srilm/dstruct/src/SArray.cc',
'srilm/dstruct/src/SArrayTrie.cc',
'srilm/dstruct/src/Trie.cc',
#'srilm/dstruct/src/benchHash.cc',
'srilm/dstruct/src/maxalloc.c',
'srilm/dstruct/src/qsort.c',
#'srilm/dstruct/src/testArray.cc',
#'srilm/dstruct/src/testBlockMalloc.cc',
#'srilm/dstruct/src/testCachedMem.cc',
#'srilm/dstruct/src/testFloatMap.cc',
#'srilm/dstruct/src/testHash.cc',
#'srilm/dstruct/src/testMap.cc',
#'srilm/dstruct/src/testMap2.cc',
#'srilm/dstruct/src/testSizes.cc',
#'srilm/dstruct/src/testTrie.cc',
],
libraries=['z', 'iconv'],
include_dirs=['srilm/lm/src', 'srilm/include'],
language="c++",
define_macros=[
('HAVE_ZOPEN', 1),
('INSTANTIATE_TEMPLATES', 1),
#('NO_BLOCK_MALLOC', 1),
]
)
]
setup(
name='pysrilm',
version='0.0.1',
description='Python Interface to SRILM',
long_description=LONG_DESCRIPTION,
# The project's main homepage.
url='https://github.com/wroberts/pysrilm',
# Author details
author='<NAME>',
author_email='<EMAIL>',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Text Processing',
'Natural Language :: English',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
# What does your project relate to?
keywords='ngram statistics language model',
ext_modules = ext_modules,
cmdclass = cmdclass,
install_requires=[],
) | 0.234144 | 0.069038 |
from mutagenmonlib.local.run import *
from mutagenmonlib.local.file import *
global_session_config = {}
def session_config():
return global_session_config
def mutagen_sync_list():
st = run(
[cfg('MUTAGEN_PATH'), 'sync', 'list'],
shell=True,
interactive_error=False)
st = st.replace('Attempting to start Mutagen daemon...', '')
st = st.replace('Started Mutagen daemon in background (terminate with "mutagen daemon stop")', '')
st = st.replace('\n\t', '\n ')
st = re.sub(r"Labels: .*?\n", "", st)
st = st.strip()
st = st.strip('-')
st = format_current_datetime() + "\n" + st
return st
def stop_session(sname):
return run(
[cfg('MUTAGEN_PATH'), 'sync', 'terminate', sname],
shell = True,
interactive_error = False)
def start_session(sname):
ca = session_config()[sname].split()
ca[0] = cfg('MUTAGEN_PATH')
return run(
ca,
shell = True,
interactive_error = False)
def init_session_dict():
return {x: {} for x in session_config()}
def init_session_list():
return {x: [] for x in session_config()}
def init_session_default(dflt):
return {x: dflt for x in session_config()}
def get_session_status():
st = mutagen_sync_list()
sa = st.splitlines()
name = ''
aname = ''
astate = ''
session_status = init_session_dict()
conflicts = init_session_list()
side = 0
for s in sa:
s = s.strip()
if s.startswith('Name: '):
name = s[6:]
# Detect if there are duplicate sessions with same name
if session_status[name]:
session_status[name]['duplicate'] = "dupl"
else:
session_status[name]['duplicate'] = ''
session_status[name]['conflicts'] = 0
session_status[name]['problems'] = 0
if s.startswith('Identifier: '):
session_status[name]['id'] = s[12:]
if s.startswith('Status: '):
status = s[8:]
session_status[name]['status'] = status
if s.startswith('Alpha:'):
side = 1
if s.startswith('Beta:'):
side = 2
if s.startswith('URL: '):
session_status[name]['url' + str(side)] = s[5:]
if ':/' in s[5:]:
session_status[name]['transport' + str(side)] = 'ssh'
session_status[name]['server' + str(side)] = s[5:s.find(':/')]
session_status[name]['dir' + str(side)] = s[s.find(':/')+1:]
else:
session_status[name]['transport' + str(side)] = 'local'
if s.startswith('Conflicts:'):
session_status[name]['conflicts'] = 1
if s.startswith('Problems:'):
session_status[name]['problems'] = 1
if s.startswith('(alpha) '):
pos = get_matching_open_parenth(s, len(s) - 1)
aname = s[8:pos - 1]
astate = s[pos + 1:]
if s.startswith('(beta) '):
pos = get_matching_open_parenth(s, len(s) - 1)
bname = s[8:pos - 1]
bstate = s[pos + 1:]
conflicts[name].append({
'aname': aname,
'bname': bname,
'astate': astate,
'bstate': bstate,
'autoresolved': False
})
return st, session_status, conflicts
def get_sessions():
global global_session_config
fa = file_to_list_strip(cfg('MUTAGEN_SESSIONS_BAT_FILE'))
for s in fa:
if s.startswith('rem '):
continue
result = re.search(r'--name=(.*?) ', s)
if result is None:
continue
sname = result.group(1)
if sname:
if sname in global_session_config:
dlg = wx.MessageDialog(
None,
sname + ' session name is duplicate in ' + cfg('MUTAGEN_SESSIONS_BAT_FILE'),
'MutagenMon',
wx.OK | wx.ICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
global_session_config[sname] = s
def restart_session(sname):
try:
stop_session(sname)
except Exception as e:
pass
try:
start_session(sname)
except Exception as e:
pass
def get_worst_code(session_code):
worst_code = 100
for sname in session_config():
worst_code = min(worst_code, session_code[sname])
return worst_code
def get_conflict_names(conflicts, session_code):
cnames = set()
for sname in session_config():
if session_code[sname] and conflicts[sname]:
for conflict in conflicts[sname]:
if conflict['autoresolved']:
continue
cnames.add(sname + ':' + conflict['aname'])
return cnames | mutagenmonlib/remote/mutagen.py | from mutagenmonlib.local.run import *
from mutagenmonlib.local.file import *
global_session_config = {}
def session_config():
return global_session_config
def mutagen_sync_list():
st = run(
[cfg('MUTAGEN_PATH'), 'sync', 'list'],
shell=True,
interactive_error=False)
st = st.replace('Attempting to start Mutagen daemon...', '')
st = st.replace('Started Mutagen daemon in background (terminate with "mutagen daemon stop")', '')
st = st.replace('\n\t', '\n ')
st = re.sub(r"Labels: .*?\n", "", st)
st = st.strip()
st = st.strip('-')
st = format_current_datetime() + "\n" + st
return st
def stop_session(sname):
return run(
[cfg('MUTAGEN_PATH'), 'sync', 'terminate', sname],
shell = True,
interactive_error = False)
def start_session(sname):
ca = session_config()[sname].split()
ca[0] = cfg('MUTAGEN_PATH')
return run(
ca,
shell = True,
interactive_error = False)
def init_session_dict():
return {x: {} for x in session_config()}
def init_session_list():
return {x: [] for x in session_config()}
def init_session_default(dflt):
return {x: dflt for x in session_config()}
def get_session_status():
st = mutagen_sync_list()
sa = st.splitlines()
name = ''
aname = ''
astate = ''
session_status = init_session_dict()
conflicts = init_session_list()
side = 0
for s in sa:
s = s.strip()
if s.startswith('Name: '):
name = s[6:]
# Detect if there are duplicate sessions with same name
if session_status[name]:
session_status[name]['duplicate'] = "dupl"
else:
session_status[name]['duplicate'] = ''
session_status[name]['conflicts'] = 0
session_status[name]['problems'] = 0
if s.startswith('Identifier: '):
session_status[name]['id'] = s[12:]
if s.startswith('Status: '):
status = s[8:]
session_status[name]['status'] = status
if s.startswith('Alpha:'):
side = 1
if s.startswith('Beta:'):
side = 2
if s.startswith('URL: '):
session_status[name]['url' + str(side)] = s[5:]
if ':/' in s[5:]:
session_status[name]['transport' + str(side)] = 'ssh'
session_status[name]['server' + str(side)] = s[5:s.find(':/')]
session_status[name]['dir' + str(side)] = s[s.find(':/')+1:]
else:
session_status[name]['transport' + str(side)] = 'local'
if s.startswith('Conflicts:'):
session_status[name]['conflicts'] = 1
if s.startswith('Problems:'):
session_status[name]['problems'] = 1
if s.startswith('(alpha) '):
pos = get_matching_open_parenth(s, len(s) - 1)
aname = s[8:pos - 1]
astate = s[pos + 1:]
if s.startswith('(beta) '):
pos = get_matching_open_parenth(s, len(s) - 1)
bname = s[8:pos - 1]
bstate = s[pos + 1:]
conflicts[name].append({
'aname': aname,
'bname': bname,
'astate': astate,
'bstate': bstate,
'autoresolved': False
})
return st, session_status, conflicts
def get_sessions():
global global_session_config
fa = file_to_list_strip(cfg('MUTAGEN_SESSIONS_BAT_FILE'))
for s in fa:
if s.startswith('rem '):
continue
result = re.search(r'--name=(.*?) ', s)
if result is None:
continue
sname = result.group(1)
if sname:
if sname in global_session_config:
dlg = wx.MessageDialog(
None,
sname + ' session name is duplicate in ' + cfg('MUTAGEN_SESSIONS_BAT_FILE'),
'MutagenMon',
wx.OK | wx.ICON_INFORMATION)
dlg.ShowModal()
dlg.Destroy()
global_session_config[sname] = s
def restart_session(sname):
try:
stop_session(sname)
except Exception as e:
pass
try:
start_session(sname)
except Exception as e:
pass
def get_worst_code(session_code):
worst_code = 100
for sname in session_config():
worst_code = min(worst_code, session_code[sname])
return worst_code
def get_conflict_names(conflicts, session_code):
cnames = set()
for sname in session_config():
if session_code[sname] and conflicts[sname]:
for conflict in conflicts[sname]:
if conflict['autoresolved']:
continue
cnames.add(sname + ':' + conflict['aname'])
return cnames | 0.313735 | 0.079854 |
from functools import partial
from PySide2.QtCore import *
from PySide2.QtWidgets import *
from ..editor import EditorScene
from .layerlist import LayerListWidget
class InspectorWidget(QWidget):
image_changed = Signal()
scene_changed = Signal(EditorScene)
def __init__(self):
super().__init__()
self.scene = None
self.current_image = 0
self._slider_down_value = 0
self._setup_ui()
self.slider_box.hide()
self.layer_box.hide()
def set_scene(self, scene):
self.scene = scene
self.current_image = 0
if not scene:
self.slider_box.hide()
self.layer_box.hide()
self._remove_tool_inspector()
return
self._add_layer_widgets()
self.slider.setValue(0)
self.slider.setMaximum(self.scene.image_count-1)
self.scene_changed.emit(scene)
self.slider_box.show()
self.layer_box.show()
def show_next(self):
if self.current_image < self.scene.image_count-1:
command = ChangeImageCommand(
self.slider, self.current_image, self.current_image + 1)
command.setText("Next Image")
self.scene.undo_stack.push(command)
def show_previous(self):
if self.current_image > 0:
command = ChangeImageCommand(
self.slider, self.current_image, self.current_image - 1)
command.setText("Previous Image")
self.scene.undo_stack.push(command)
def change_image(self, idx):
self.current_image = idx
active_layer = self.scene.active_layer
self.scene.load(idx)
self.slider_box.setTitle("Image {0}/{1}".format(idx+1, self.scene.image_count))
self._activate_layer(active_layer)
self.image_changed.emit()
def show_tool_inspector(self):
self._remove_tool_inspector()
self._add_tool_inspector()
def _activate_layer(self, idx):
self.scene.active_layer = idx
self.scene.update()
self.show_tool_inspector()
def _slider_pressed(self):
self._slider_down_value = self.slider.value()
def _slider_released(self):
command = ChangeImageCommand(
self.slider, self._slider_down_value, self.slider.value())
command.setText("Change Image")
self.scene.undo_stack.push(command)
def _add_tool_inspector(self):
idx = self.scene.active_layer
widget = self.scene.layers.tool_widget
if widget:
self.dock_layout.insertWidget(1, widget)
def _remove_tool_inspector(self):
if self.dock_layout.count() <= 3:
return
widget = self.dock_layout.itemAt(1).widget()
if widget:
widget.deleteLater()
def _add_layer_widgets(self):
self.layer_box.clear()
for index, name in enumerate(self.scene.data_store.folders):
self.layer_box.add(name.title())
def _change_layer_opacity(self, idx, value):
self.scene.set_layer_opacity(idx, value)
def _setup_ui(self):
self.dock_layout = QVBoxLayout(self)
self.dock_layout.setContentsMargins(4, 4, 4, 0)
self.slider_box = QGroupBox("Images")
self.slider_box.setObjectName("imageSlider")
hlayout = QHBoxLayout(self.slider_box)
arrow_left = QToolButton(self)
arrow_left.setMaximumSize(25, 25)
arrow_left.setArrowType(Qt.LeftArrow)
left_action = QAction()
left_action.triggered.connect(self.show_previous)
arrow_left.setDefaultAction(left_action)
hlayout.addWidget(arrow_left)
self.slider = QSlider(Qt.Horizontal)
self.slider.setValue(0)
self.slider.valueChanged.connect(self.change_image)
self.slider.sliderPressed.connect(self._slider_pressed)
self.slider.sliderReleased.connect(self._slider_released)
hlayout.addWidget(self.slider)
arrow_right = QToolButton()
arrow_right.setMaximumSize(25, 25)
arrow_right.setArrowType(Qt.RightArrow)
right_action = QAction()
right_action.triggered.connect(self.show_next)
arrow_right.setDefaultAction(right_action)
hlayout.addWidget(arrow_right)
self.dock_layout.addWidget(self.slider_box)
self.layer_box = LayerListWidget()
self.layer_box.opacity_changed.connect(self._change_layer_opacity)
self.layer_box.layer_activated.connect(self._activate_layer)
self.dock_layout.addWidget(self.layer_box)
self.dock_layout.addItem(
QSpacerItem(1, 1, QSizePolicy.Minimum, QSizePolicy.Expanding))
class ChangeImageCommand(QUndoCommand):
def __init__(self, slider, old_value, new_value):
super().__init__()
self.slider = slider
self.old_value = old_value
self.new_value = new_value
def undo(self):
if self.slider:
self.slider.setValue(self.old_value)
def redo(self):
if self.slider:
self.slider.setValue(self.new_value) | segmate/widgets/inspector.py | from functools import partial
from PySide2.QtCore import *
from PySide2.QtWidgets import *
from ..editor import EditorScene
from .layerlist import LayerListWidget
class InspectorWidget(QWidget):
image_changed = Signal()
scene_changed = Signal(EditorScene)
def __init__(self):
super().__init__()
self.scene = None
self.current_image = 0
self._slider_down_value = 0
self._setup_ui()
self.slider_box.hide()
self.layer_box.hide()
def set_scene(self, scene):
self.scene = scene
self.current_image = 0
if not scene:
self.slider_box.hide()
self.layer_box.hide()
self._remove_tool_inspector()
return
self._add_layer_widgets()
self.slider.setValue(0)
self.slider.setMaximum(self.scene.image_count-1)
self.scene_changed.emit(scene)
self.slider_box.show()
self.layer_box.show()
def show_next(self):
if self.current_image < self.scene.image_count-1:
command = ChangeImageCommand(
self.slider, self.current_image, self.current_image + 1)
command.setText("Next Image")
self.scene.undo_stack.push(command)
def show_previous(self):
if self.current_image > 0:
command = ChangeImageCommand(
self.slider, self.current_image, self.current_image - 1)
command.setText("Previous Image")
self.scene.undo_stack.push(command)
def change_image(self, idx):
self.current_image = idx
active_layer = self.scene.active_layer
self.scene.load(idx)
self.slider_box.setTitle("Image {0}/{1}".format(idx+1, self.scene.image_count))
self._activate_layer(active_layer)
self.image_changed.emit()
def show_tool_inspector(self):
self._remove_tool_inspector()
self._add_tool_inspector()
def _activate_layer(self, idx):
self.scene.active_layer = idx
self.scene.update()
self.show_tool_inspector()
def _slider_pressed(self):
self._slider_down_value = self.slider.value()
def _slider_released(self):
command = ChangeImageCommand(
self.slider, self._slider_down_value, self.slider.value())
command.setText("Change Image")
self.scene.undo_stack.push(command)
def _add_tool_inspector(self):
idx = self.scene.active_layer
widget = self.scene.layers.tool_widget
if widget:
self.dock_layout.insertWidget(1, widget)
def _remove_tool_inspector(self):
if self.dock_layout.count() <= 3:
return
widget = self.dock_layout.itemAt(1).widget()
if widget:
widget.deleteLater()
def _add_layer_widgets(self):
self.layer_box.clear()
for index, name in enumerate(self.scene.data_store.folders):
self.layer_box.add(name.title())
def _change_layer_opacity(self, idx, value):
self.scene.set_layer_opacity(idx, value)
def _setup_ui(self):
self.dock_layout = QVBoxLayout(self)
self.dock_layout.setContentsMargins(4, 4, 4, 0)
self.slider_box = QGroupBox("Images")
self.slider_box.setObjectName("imageSlider")
hlayout = QHBoxLayout(self.slider_box)
arrow_left = QToolButton(self)
arrow_left.setMaximumSize(25, 25)
arrow_left.setArrowType(Qt.LeftArrow)
left_action = QAction()
left_action.triggered.connect(self.show_previous)
arrow_left.setDefaultAction(left_action)
hlayout.addWidget(arrow_left)
self.slider = QSlider(Qt.Horizontal)
self.slider.setValue(0)
self.slider.valueChanged.connect(self.change_image)
self.slider.sliderPressed.connect(self._slider_pressed)
self.slider.sliderReleased.connect(self._slider_released)
hlayout.addWidget(self.slider)
arrow_right = QToolButton()
arrow_right.setMaximumSize(25, 25)
arrow_right.setArrowType(Qt.RightArrow)
right_action = QAction()
right_action.triggered.connect(self.show_next)
arrow_right.setDefaultAction(right_action)
hlayout.addWidget(arrow_right)
self.dock_layout.addWidget(self.slider_box)
self.layer_box = LayerListWidget()
self.layer_box.opacity_changed.connect(self._change_layer_opacity)
self.layer_box.layer_activated.connect(self._activate_layer)
self.dock_layout.addWidget(self.layer_box)
self.dock_layout.addItem(
QSpacerItem(1, 1, QSizePolicy.Minimum, QSizePolicy.Expanding))
class ChangeImageCommand(QUndoCommand):
def __init__(self, slider, old_value, new_value):
super().__init__()
self.slider = slider
self.old_value = old_value
self.new_value = new_value
def undo(self):
if self.slider:
self.slider.setValue(self.old_value)
def redo(self):
if self.slider:
self.slider.setValue(self.new_value) | 0.631822 | 0.126569 |
import hashlib
import logging
import os
import boto3
from botocore.exceptions import ClientError
class S3CasClient:
FILENAME_KEY = 'filename'
HASH_FN = hashlib.sha256
def __init__(self, bucket, prefix=None, client=None, debug=True):
"""
A content-addressable storage interface to an s3 bucket.
:param bucket: the name of the s3 bucket to use as the backing store
:param prefix: a subdirectory in the bucket (optional)
:param client: an optional s3 client (one will be created if not passed)
:param debug: whether to print debug info
"""
self.client = client or boto3.client('s3')
self.bucket = bucket
self.prefix = prefix
self.debug = debug
if debug:
self.logger = logging.getLogger('s3cas')
logging.basicConfig()
self.logger.setLevel(logging.INFO)
def _hash_file(self, file_name):
h = self.HASH_FN()
buf_size = 65536
with open(file_name, 'rb') as f:
while True:
data = f.read(buf_size)
if not data:
break
h.update(data)
return h.hexdigest()
def _object_name(self, object_hash):
if self.prefix is None:
return object_hash
return f'{self.prefix}/{object_hash}'
def _get_existing_filename(self, object_name):
try:
return self.client.head_object(Bucket=self.bucket, Key=object_name)['Metadata'][self.FILENAME_KEY]
except (ClientError, KeyError, TypeError):
return None
def _log(self, s):
if not self.debug:
return
self.logger.info(s)
def upload_file(self, file_name):
"""
Upload a file to the backing store and index by hash, storing the filename in metadata.
Skips uploading if it's already present.
:param file_name: the name of the local file
:return: the hash of the stored file
"""
object_hash = self._hash_file(file_name)
object_name = self._object_name(object_hash)
existing_filename = self._get_existing_filename(object_name)
if existing_filename is not None:
self._log(f'File already exists with hash {object_hash}, name {existing_filename}, not uploading.')
else:
m_filename = os.path.basename(file_name)
self.client.upload_file(file_name, self.bucket, object_name, ExtraArgs={'Metadata': {self.FILENAME_KEY: m_filename}})
self._log(f'Uploaded file to s3://{self.bucket}/{object_name}')
return object_hash
def download_file(self, object_hash, download_dir=None, file_name=None):
"""
Download a file indexed by hash. At least one of `download_dir` or `file_name` must be present.
If only the download_dir parameter is present, then the file's metadata is used to determine
the file name.
:param object_hash: the object's hash
:param download_dir: the directory to download to
:param file_name: the file name to download to
:return: the filename of the downloaded file, or None if there was an existing file at that name with
a different hash.
"""
if download_dir is None and file_name is None:
raise ValueError('At least download_dir or file_name must be specified.')
object_name = self._object_name(object_hash)
existing_filename = self._get_existing_filename(object_name)
if existing_filename is None:
raise KeyError(f'No file found with hash {object_hash}')
if file_name is None:
download_to = os.path.join(download_dir, existing_filename)
elif download_dir is None:
download_to = file_name
else:
download_to = os.path.join(download_dir, file_name)
if not os.path.exists(download_to):
print(self.bucket, object_name, download_to)
self.client.download_file(Bucket=self.bucket, Key=object_name, Filename=download_to)
self._log(f'Downloaded file to {download_to}')
elif self._hash_file(download_to) == object_hash:
self._log(f'File with correct hash already exists at {download_to}')
else:
self._log(f'Not overwriting existing file with different hash at {download_to}')
return None
return download_to | s3_cas/__init__.py | import hashlib
import logging
import os
import boto3
from botocore.exceptions import ClientError
class S3CasClient:
FILENAME_KEY = 'filename'
HASH_FN = hashlib.sha256
def __init__(self, bucket, prefix=None, client=None, debug=True):
"""
A content-addressable storage interface to an s3 bucket.
:param bucket: the name of the s3 bucket to use as the backing store
:param prefix: a subdirectory in the bucket (optional)
:param client: an optional s3 client (one will be created if not passed)
:param debug: whether to print debug info
"""
self.client = client or boto3.client('s3')
self.bucket = bucket
self.prefix = prefix
self.debug = debug
if debug:
self.logger = logging.getLogger('s3cas')
logging.basicConfig()
self.logger.setLevel(logging.INFO)
def _hash_file(self, file_name):
h = self.HASH_FN()
buf_size = 65536
with open(file_name, 'rb') as f:
while True:
data = f.read(buf_size)
if not data:
break
h.update(data)
return h.hexdigest()
def _object_name(self, object_hash):
if self.prefix is None:
return object_hash
return f'{self.prefix}/{object_hash}'
def _get_existing_filename(self, object_name):
try:
return self.client.head_object(Bucket=self.bucket, Key=object_name)['Metadata'][self.FILENAME_KEY]
except (ClientError, KeyError, TypeError):
return None
def _log(self, s):
if not self.debug:
return
self.logger.info(s)
def upload_file(self, file_name):
"""
Upload a file to the backing store and index by hash, storing the filename in metadata.
Skips uploading if it's already present.
:param file_name: the name of the local file
:return: the hash of the stored file
"""
object_hash = self._hash_file(file_name)
object_name = self._object_name(object_hash)
existing_filename = self._get_existing_filename(object_name)
if existing_filename is not None:
self._log(f'File already exists with hash {object_hash}, name {existing_filename}, not uploading.')
else:
m_filename = os.path.basename(file_name)
self.client.upload_file(file_name, self.bucket, object_name, ExtraArgs={'Metadata': {self.FILENAME_KEY: m_filename}})
self._log(f'Uploaded file to s3://{self.bucket}/{object_name}')
return object_hash
def download_file(self, object_hash, download_dir=None, file_name=None):
"""
Download a file indexed by hash. At least one of `download_dir` or `file_name` must be present.
If only the download_dir parameter is present, then the file's metadata is used to determine
the file name.
:param object_hash: the object's hash
:param download_dir: the directory to download to
:param file_name: the file name to download to
:return: the filename of the downloaded file, or None if there was an existing file at that name with
a different hash.
"""
if download_dir is None and file_name is None:
raise ValueError('At least download_dir or file_name must be specified.')
object_name = self._object_name(object_hash)
existing_filename = self._get_existing_filename(object_name)
if existing_filename is None:
raise KeyError(f'No file found with hash {object_hash}')
if file_name is None:
download_to = os.path.join(download_dir, existing_filename)
elif download_dir is None:
download_to = file_name
else:
download_to = os.path.join(download_dir, file_name)
if not os.path.exists(download_to):
print(self.bucket, object_name, download_to)
self.client.download_file(Bucket=self.bucket, Key=object_name, Filename=download_to)
self._log(f'Downloaded file to {download_to}')
elif self._hash_file(download_to) == object_hash:
self._log(f'File with correct hash already exists at {download_to}')
else:
self._log(f'Not overwriting existing file with different hash at {download_to}')
return None
return download_to | 0.604866 | 0.130175 |
import click
import foolbox
from counter_attack import defenses, tests, utils
from counter_attack.cli import definitions, options, parsing
@click.group(name='preprocessor')
def preprocessor_defense():
"""
Defends using a "preprocessor", which modifies
the image before passing it to the standard model.
"""
pass
@preprocessor_defense.command(name='shallow')
@options.global_options
@options.dataset_options('test', 'test')
@options.standard_model_options
@options.pretrained_model_options
@options.test_options('defense/preprocessor/shallow')
@options.preprocessor_options
@options.attack_options(definitions.supported_attacks)
def shallow_preprocessor(options):
"""
Simply evaluates the effectiveness of the preprocessor defense, without additional
attack strategies.
Adversarial samples are generated to fool the undefended model.
"""
attack_p = options['attack_p']
attack_name = options['attack_name']
attack_workers = options['attack_workers']
command = options['command']
cuda = options['cuda']
foolbox_model = options['foolbox_model']
loader = options['loader']
results_path = options['results_path']
preprocessor = options['preprocessor']
criterion = foolbox.criteria.Misclassification()
# The attack will be against the undefended model
attack = parsing.parse_attack(
attack_name, attack_p, criterion)
defended_model = defenses.PreprocessorDefenseModel(
foolbox_model, preprocessor)
samples_count, correct_count, successful_attack_count, distances = tests.shallow_defense_test(foolbox_model, loader, attack, attack_p,
defended_model, cuda, attack_workers,
name='Shallow Preprocessor Attack')
info = utils.attack_statistics_info(samples_count, correct_count, successful_attack_count, distances)
header = ['Distances']
utils.save_results(results_path, table=[distances], command=command,
info=info, header=header)
@preprocessor_defense.command(name='substitute')
@options.global_options
@options.dataset_options('test', 'test')
@options.standard_model_options
@options.pretrained_model_options
@options.test_options('defense/preprocessor/substitute')
@options.preprocessor_options
@options.attack_options(definitions.supported_attacks)
@options.substitute_options
def substitute_preprocessor(options):
"""
Uses BPDA with a substitute model to evade the preprocessor defense.
BPDA uses predictions from the defended model and gradients
from the substitute model.
"""
attack_p = options['attack_p']
attack_name = options['attack_name']
attack_workers = options['attack_workers']
command = options['command']
cuda = options['cuda']
foolbox_model = options['foolbox_model']
loader = options['loader']
results_path = options['results_path']
preprocessor = options['preprocessor']
substitute_foolbox_model = options['substitute_foolbox_model']
defended_model = defenses.PreprocessorDefenseModel(
foolbox_model, preprocessor)
if substitute_foolbox_model.num_classes() != defended_model.num_classes():
raise click.BadArgumentUsage('The substitute model ({} classes) must have the same '
'number of classes as the defended model ({} classes)'.format(
substitute_foolbox_model.num_classes(), defended_model.num_classes()))
composite_model = foolbox.models.CompositeModel(defended_model, substitute_foolbox_model)
criterion = foolbox.criteria.Misclassification()
# The attack will be against the defended model with estimated gradients
attack = parsing.parse_attack(
attack_name, attack_p, criterion)
samples_count, correct_count, successful_attack_count, distances, _, _ = tests.attack_test(composite_model, loader, attack, attack_p,
cuda, attack_workers, name='Substitute Preprocessor Attack')
info = utils.attack_statistics_info(samples_count, correct_count, successful_attack_count, distances)
header = ['Distances']
utils.save_results(results_path, table=[distances], command=command,
info=info, header=header)
@preprocessor_defense.command(name='black-box')
@options.global_options
@options.dataset_options('test', 'test')
@options.standard_model_options
@options.pretrained_model_options
@options.test_options('defense/preprocessor/black-box')
@options.preprocessor_options
@options.attack_options(definitions.black_box_attacks)
def black_box_preprocessor(options):
"""
Uses a black box attack to evade the preprocessor defense.
Adversarial samples are generated to fool the defended model,
which only provides the labels when queried.
"""
attack_p = options['attack_p']
attack_name = options['attack_name']
attack_workers = options['attack_workers']
command = options['command']
cuda = options['cuda']
foolbox_model = options['foolbox_model']
loader = options['loader']
results_path = options['results_path']
preprocessor = options['preprocessor']
defended_model = defenses.PreprocessorDefenseModel(
foolbox_model, preprocessor)
criterion = foolbox.criteria.Misclassification()
# The attack will be against the defended model
attack = parsing.parse_attack(
attack_name, attack_p, criterion)
samples_count, correct_count, successful_attack_count, distances, _, _ = tests.attack_test(defended_model, loader, attack, attack_p,
cuda, attack_workers, name='Black-Box Preprocessor Attack')
info = utils.attack_statistics_info(samples_count, correct_count, successful_attack_count, distances)
header = ['Distances']
utils.save_results(results_path, table=[distances], command=command,
info=info, header=header) | counter_attack/cli/commands/defense/preprocessor.py | import click
import foolbox
from counter_attack import defenses, tests, utils
from counter_attack.cli import definitions, options, parsing
@click.group(name='preprocessor')
def preprocessor_defense():
"""
Defends using a "preprocessor", which modifies
the image before passing it to the standard model.
"""
pass
@preprocessor_defense.command(name='shallow')
@options.global_options
@options.dataset_options('test', 'test')
@options.standard_model_options
@options.pretrained_model_options
@options.test_options('defense/preprocessor/shallow')
@options.preprocessor_options
@options.attack_options(definitions.supported_attacks)
def shallow_preprocessor(options):
"""
Simply evaluates the effectiveness of the preprocessor defense, without additional
attack strategies.
Adversarial samples are generated to fool the undefended model.
"""
attack_p = options['attack_p']
attack_name = options['attack_name']
attack_workers = options['attack_workers']
command = options['command']
cuda = options['cuda']
foolbox_model = options['foolbox_model']
loader = options['loader']
results_path = options['results_path']
preprocessor = options['preprocessor']
criterion = foolbox.criteria.Misclassification()
# The attack will be against the undefended model
attack = parsing.parse_attack(
attack_name, attack_p, criterion)
defended_model = defenses.PreprocessorDefenseModel(
foolbox_model, preprocessor)
samples_count, correct_count, successful_attack_count, distances = tests.shallow_defense_test(foolbox_model, loader, attack, attack_p,
defended_model, cuda, attack_workers,
name='Shallow Preprocessor Attack')
info = utils.attack_statistics_info(samples_count, correct_count, successful_attack_count, distances)
header = ['Distances']
utils.save_results(results_path, table=[distances], command=command,
info=info, header=header)
@preprocessor_defense.command(name='substitute')
@options.global_options
@options.dataset_options('test', 'test')
@options.standard_model_options
@options.pretrained_model_options
@options.test_options('defense/preprocessor/substitute')
@options.preprocessor_options
@options.attack_options(definitions.supported_attacks)
@options.substitute_options
def substitute_preprocessor(options):
"""
Uses BPDA with a substitute model to evade the preprocessor defense.
BPDA uses predictions from the defended model and gradients
from the substitute model.
"""
attack_p = options['attack_p']
attack_name = options['attack_name']
attack_workers = options['attack_workers']
command = options['command']
cuda = options['cuda']
foolbox_model = options['foolbox_model']
loader = options['loader']
results_path = options['results_path']
preprocessor = options['preprocessor']
substitute_foolbox_model = options['substitute_foolbox_model']
defended_model = defenses.PreprocessorDefenseModel(
foolbox_model, preprocessor)
if substitute_foolbox_model.num_classes() != defended_model.num_classes():
raise click.BadArgumentUsage('The substitute model ({} classes) must have the same '
'number of classes as the defended model ({} classes)'.format(
substitute_foolbox_model.num_classes(), defended_model.num_classes()))
composite_model = foolbox.models.CompositeModel(defended_model, substitute_foolbox_model)
criterion = foolbox.criteria.Misclassification()
# The attack will be against the defended model with estimated gradients
attack = parsing.parse_attack(
attack_name, attack_p, criterion)
samples_count, correct_count, successful_attack_count, distances, _, _ = tests.attack_test(composite_model, loader, attack, attack_p,
cuda, attack_workers, name='Substitute Preprocessor Attack')
info = utils.attack_statistics_info(samples_count, correct_count, successful_attack_count, distances)
header = ['Distances']
utils.save_results(results_path, table=[distances], command=command,
info=info, header=header)
@preprocessor_defense.command(name='black-box')
@options.global_options
@options.dataset_options('test', 'test')
@options.standard_model_options
@options.pretrained_model_options
@options.test_options('defense/preprocessor/black-box')
@options.preprocessor_options
@options.attack_options(definitions.black_box_attacks)
def black_box_preprocessor(options):
"""
Uses a black box attack to evade the preprocessor defense.
Adversarial samples are generated to fool the defended model,
which only provides the labels when queried.
"""
attack_p = options['attack_p']
attack_name = options['attack_name']
attack_workers = options['attack_workers']
command = options['command']
cuda = options['cuda']
foolbox_model = options['foolbox_model']
loader = options['loader']
results_path = options['results_path']
preprocessor = options['preprocessor']
defended_model = defenses.PreprocessorDefenseModel(
foolbox_model, preprocessor)
criterion = foolbox.criteria.Misclassification()
# The attack will be against the defended model
attack = parsing.parse_attack(
attack_name, attack_p, criterion)
samples_count, correct_count, successful_attack_count, distances, _, _ = tests.attack_test(defended_model, loader, attack, attack_p,
cuda, attack_workers, name='Black-Box Preprocessor Attack')
info = utils.attack_statistics_info(samples_count, correct_count, successful_attack_count, distances)
header = ['Distances']
utils.save_results(results_path, table=[distances], command=command,
info=info, header=header) | 0.579995 | 0.270546 |
import os
import inspect
import matplotlib.pyplot as plt
# Line styles
LINE_COLOR = '#0060ff'
# Default marker styles
MARKER_SIZE = 200
MARKER_FACE_COLOR = '#bcd5fdaa'
MARKER_EDGE_COLOR = '#0060ff'
MARKER_EDGE_WIDTH = 1.5
def set_plot_style():
"""Set global style"""
plt.rcParams['font.family'] = 'serif'
TINY_SIZE = 13
SMALL_SIZE = 18
NORMAL_SIZE = 20
LARGE_SIZE = 23
# Title size
plt.rcParams['axes.titlesize'] = LARGE_SIZE
# Axes label size
plt.rcParams['axes.labelsize'] = SMALL_SIZE
# Tick label size
plt.rcParams['xtick.labelsize'] = TINY_SIZE
plt.rcParams['ytick.labelsize'] = TINY_SIZE
# Text size
plt.rcParams['font.size'] = NORMAL_SIZE
# Legend location
plt.rcParams["legend.loc"] = 'upper right'
plt.rcParams["legend.framealpha"] = 0.9
plt.rcParams["legend.edgecolor"] = '#000000'
# Legend text size
plt.rcParams['legend.fontsize'] = SMALL_SIZE
# Grid color
plt.rcParams['grid.color'] = '#cccccc'
# Define plot size
plt.rcParams['figure.figsize'] = [9, 6]
# Lines
plt.rcParams['lines.color'] = 'red'
plt.rcParams['lines.linewidth'] = 2
# Grid
plt.rcParams['grid.color'] = '#555555'
plt.rcParams['grid.alpha'] = 0.2
def save_plot(plt, file_name=None, suffix=None,
extensions=['pdf'], subdir='plots', dpi=300,
silent=False):
"""
Saves a plot to an image file. The name of the
the image file is constructed from file name of the python script
that called `plot_to_image` with an added `suffix`.
The plot is saved to a
Parameters
-----------
plt :
Matplotlib's plot object
file_name: str
Base name (name without extension) for the plot file.
If None, the name of Python script that called this function
will be used.
suffix : str
File name suffix for the output image file name.
No suffix is used if None.
extensions : list of str
The output image file extensions which will be used to save the plot.
subdir : str
Directory where the plot will be placed.
silent : bool
If True will not print out the path the image is save to.
"""
frame = inspect.stack()[1]
module = inspect.getmodule(frame[0])
codefile = module.__file__
this_dir = os.path.dirname(codefile)
plot_dir = os.path.join(this_dir, subdir)
os.makedirs(plot_dir, exist_ok=True)
if file_name is None:
file_name = os.path.basename(codefile).rsplit('.', 1)[0]
if suffix is None:
suffix = ''
else:
suffix = f'_{suffix}'
for extension in extensions:
filename = f'{file_name}{suffix}.{extension}'
figure_path = os.path.join(plot_dir, filename)
plt.savefig(figure_path, dpi=dpi)
printed_path = os.path.join(subdir, filename)
if not silent:
print(f"Figure saved to {printed_path}") | ps5/code/plot_utils.py |
import os
import inspect
import matplotlib.pyplot as plt
# Line styles
LINE_COLOR = '#0060ff'
# Default marker styles
MARKER_SIZE = 200
MARKER_FACE_COLOR = '#bcd5fdaa'
MARKER_EDGE_COLOR = '#0060ff'
MARKER_EDGE_WIDTH = 1.5
def set_plot_style():
"""Set global style"""
plt.rcParams['font.family'] = 'serif'
TINY_SIZE = 13
SMALL_SIZE = 18
NORMAL_SIZE = 20
LARGE_SIZE = 23
# Title size
plt.rcParams['axes.titlesize'] = LARGE_SIZE
# Axes label size
plt.rcParams['axes.labelsize'] = SMALL_SIZE
# Tick label size
plt.rcParams['xtick.labelsize'] = TINY_SIZE
plt.rcParams['ytick.labelsize'] = TINY_SIZE
# Text size
plt.rcParams['font.size'] = NORMAL_SIZE
# Legend location
plt.rcParams["legend.loc"] = 'upper right'
plt.rcParams["legend.framealpha"] = 0.9
plt.rcParams["legend.edgecolor"] = '#000000'
# Legend text size
plt.rcParams['legend.fontsize'] = SMALL_SIZE
# Grid color
plt.rcParams['grid.color'] = '#cccccc'
# Define plot size
plt.rcParams['figure.figsize'] = [9, 6]
# Lines
plt.rcParams['lines.color'] = 'red'
plt.rcParams['lines.linewidth'] = 2
# Grid
plt.rcParams['grid.color'] = '#555555'
plt.rcParams['grid.alpha'] = 0.2
def save_plot(plt, file_name=None, suffix=None,
extensions=['pdf'], subdir='plots', dpi=300,
silent=False):
"""
Saves a plot to an image file. The name of the
the image file is constructed from file name of the python script
that called `plot_to_image` with an added `suffix`.
The plot is saved to a
Parameters
-----------
plt :
Matplotlib's plot object
file_name: str
Base name (name without extension) for the plot file.
If None, the name of Python script that called this function
will be used.
suffix : str
File name suffix for the output image file name.
No suffix is used if None.
extensions : list of str
The output image file extensions which will be used to save the plot.
subdir : str
Directory where the plot will be placed.
silent : bool
If True will not print out the path the image is save to.
"""
frame = inspect.stack()[1]
module = inspect.getmodule(frame[0])
codefile = module.__file__
this_dir = os.path.dirname(codefile)
plot_dir = os.path.join(this_dir, subdir)
os.makedirs(plot_dir, exist_ok=True)
if file_name is None:
file_name = os.path.basename(codefile).rsplit('.', 1)[0]
if suffix is None:
suffix = ''
else:
suffix = f'_{suffix}'
for extension in extensions:
filename = f'{file_name}{suffix}.{extension}'
figure_path = os.path.join(plot_dir, filename)
plt.savefig(figure_path, dpi=dpi)
printed_path = os.path.join(subdir, filename)
if not silent:
print(f"Figure saved to {printed_path}") | 0.693265 | 0.523725 |
import time, copy, os #Inport required modules
#Define the landscape for the apocolypse
apocalypse = [
['H','H','H','H','H','C','H','C','H','C','H','H',],
['H','H','H','H','H','C','H','C','H','C','H','H',],
['H','H','H','H','H','C','C','C','C','C','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','Z','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','C','C','C','H',],
['H','H','H','H','H','H','H','H','H','H','H','H',]]
def clearScreen():
os.system('cls')#CLear the screen. Note this will not work in the python terminal
def printGrid(gridToPrint):#Print each line of the landscape
for i in range(0,len(gridToPrint)):#Iterate through each row
line = " "
for j in range(0,len(gridToPrint[0])):#Iterate through each character
line = line + gridToPrint[i][j] + ' '#Add character to line
print(line)#Print line
def zombieBite(gridToBite):
updatedGridToBite = copy.deepcopy(gridToBite)
for i in range(0,len(gridToBite)):
for j in range(0,len(gridToBite[0])):#Iterate for the legnth of the row
if gridToBite[i][j] == 'Z':
if i > 0 and gridToBite[i - 1][j] != 'C': #Change character to top
updatedGridToBite[i - 1][j] = 'Z'
if i < len(gridToBite) - 1 and gridToBite[i + 1][j] != 'C':#Change character to bottom
updatedGridToBite[i + 1][j] = 'Z'
if j > 0 and gridToBite[i][j - 1] != 'C':#Change character to left
updatedGridToBite[i][j - 1] = 'Z'
if j < len(gridToBite[0]) - 1 and gridToBite[i][j + 1] != 'C':#Change character to right
updatedGridToBite[i][j + 1] = 'Z'
#Diagonals
if j > 0 and i > 0 and gridToBite[i-1][j-1] != 'C':#Change character to top left
updatedGridToBite[i-1][j-1] = 'Z'
if j < len(gridToBite[0]) - 1 and i > 0 and gridToBite[i-1][j+1] != 'C':#Change character to top right
updatedGridToBite[i-1][j+1] = 'Z'
if j > 0 and i < len(gridToBite) - 1 and gridToBite[i+1][j-1] != 'C':#Change character to bottom left
updatedGridToBite[i+1][j-1] = 'Z'
if j < len(gridToBite[0]) - 1 and i < len(gridToBite) - 1 and gridToBite[i+1][j+1] != 'C':#Change character to bottom right
updatedGridToBite[i+1][j+1] = 'Z'
return updatedGridToBite#Return the new landscape
clearScreen()
printGrid(apocalypse)
gameCycle = 0
print(' Game Cycle: {}'.format(gameCycle))#Print what game cycle we are on
#Game loop
while True:
time.sleep(0.5)
gameCycle +=1
apocalypse = zombieBite(apocalypse)
clearScreen()
printGrid(apocalypse)
print(' Game Cycle: {}'.format(gameCycle)) | python-stuff/zombies/zombie apocolypse.py | import time, copy, os #Inport required modules
#Define the landscape for the apocolypse
apocalypse = [
['H','H','H','H','H','C','H','C','H','C','H','H',],
['H','H','H','H','H','C','H','C','H','C','H','H',],
['H','H','H','H','H','C','C','C','C','C','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','Z','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','H','H','H','H',],
['H','H','H','H','H','H','H','C','C','C','C','H',],
['H','H','H','H','H','H','H','H','H','H','H','H',]]
def clearScreen():
os.system('cls')#CLear the screen. Note this will not work in the python terminal
def printGrid(gridToPrint):#Print each line of the landscape
for i in range(0,len(gridToPrint)):#Iterate through each row
line = " "
for j in range(0,len(gridToPrint[0])):#Iterate through each character
line = line + gridToPrint[i][j] + ' '#Add character to line
print(line)#Print line
def zombieBite(gridToBite):
updatedGridToBite = copy.deepcopy(gridToBite)
for i in range(0,len(gridToBite)):
for j in range(0,len(gridToBite[0])):#Iterate for the legnth of the row
if gridToBite[i][j] == 'Z':
if i > 0 and gridToBite[i - 1][j] != 'C': #Change character to top
updatedGridToBite[i - 1][j] = 'Z'
if i < len(gridToBite) - 1 and gridToBite[i + 1][j] != 'C':#Change character to bottom
updatedGridToBite[i + 1][j] = 'Z'
if j > 0 and gridToBite[i][j - 1] != 'C':#Change character to left
updatedGridToBite[i][j - 1] = 'Z'
if j < len(gridToBite[0]) - 1 and gridToBite[i][j + 1] != 'C':#Change character to right
updatedGridToBite[i][j + 1] = 'Z'
#Diagonals
if j > 0 and i > 0 and gridToBite[i-1][j-1] != 'C':#Change character to top left
updatedGridToBite[i-1][j-1] = 'Z'
if j < len(gridToBite[0]) - 1 and i > 0 and gridToBite[i-1][j+1] != 'C':#Change character to top right
updatedGridToBite[i-1][j+1] = 'Z'
if j > 0 and i < len(gridToBite) - 1 and gridToBite[i+1][j-1] != 'C':#Change character to bottom left
updatedGridToBite[i+1][j-1] = 'Z'
if j < len(gridToBite[0]) - 1 and i < len(gridToBite) - 1 and gridToBite[i+1][j+1] != 'C':#Change character to bottom right
updatedGridToBite[i+1][j+1] = 'Z'
return updatedGridToBite#Return the new landscape
clearScreen()
printGrid(apocalypse)
gameCycle = 0
print(' Game Cycle: {}'.format(gameCycle))#Print what game cycle we are on
#Game loop
while True:
time.sleep(0.5)
gameCycle +=1
apocalypse = zombieBite(apocalypse)
clearScreen()
printGrid(apocalypse)
print(' Game Cycle: {}'.format(gameCycle)) | 0.187021 | 0.224714 |
import numpy as np
import datetime
import umap
import matplotlib.pyplot as plt
import os
import pickle
import time
from matplotlib.colors import ListedColormap
from sklearn.datasets import make_moons, make_circles, make_classification
from matplotlib.ticker import NullFormatter
print('setting up logging...')
dt = datetime.datetime.fromtimestamp(time.time())
logdir = os.path.join('./outputs/' ,dt.strftime('%Y-%m-%d_%H:%M:%S'))
print(f'Logging to {logdir}')
if not os.path.exists(logdir):
os.makedirs(logdir)
def construct_datasets(n_samples):
X, y = make_classification(
n_samples=n_samples, n_features=2, n_redundant=0, n_informative=2, random_state=1, n_clusters_per_class=1
)
rng = np.random.RandomState(2)
X += 2 * rng.uniform(size=X.shape)
linearly_separable = (X, y)
return [
make_moons(n_samples=n_samples, noise=0.3, random_state=0),
make_circles(n_samples=n_samples, noise=0.2, factor=0.5, random_state=1),
linearly_separable,
]
datasets = construct_datasets(100)
names = ['moons', 'circles', 'linearly seperable']
def plot_umap_hyperparam_sweep(ds, name, min_dists, n_neighbors, umap_seed, verbose=False):
fig = plt.figure(figsize=(27, 9))
k = 1
X, y = ds
ax = plt.subplot(len(n_neighbors), len(min_dists) + 1, k)
# just plot the dataset first
cm = plt.cm.RdBu
cm_bright = ListedColormap(["#FF0000", "#0000FF"])
ax.set_title("input data")
ax.scatter(x=X[:, 0], y=X[:, 1], c=y, cmap=cm_bright, edgecolors="k")
k += 1
for j, n in enumerate(n_neighbors):
for i, min_dist in enumerate(min_dists):
if k % (len(min_dists) + 1) == 1: # don't plot below input data subplot
k += 1
ax = plt.subplot(len(n_neighbors), len(min_dists) + 1, k)
umapper = umap.UMAP(random_state=umap_seed, min_dist=min_dist, n_neighbors=n, verbose=verbose, n_epochs=10000, log_losses="after",)
umap_proj = umapper.fit_transform(X)
ax.scatter(umap_proj[:, 0], umap_proj[:, 1], c=y, cmap=cm_bright, edgecolors="k")
ax.set_title(f' n_neighbors={n}, min_dist={min_dist}' )
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
k+=1
fig.suptitle(f'rand_seed={umap_seed}')
fig.savefig(join(logdir, f'umap_hyperparams_{name}_{len(n_neighbors)}n_{len(min_dists)}_md'))
fig.tight_layout(rect=[0, 0.07, 1, 0.95])
plt.show()
for name, ds in zip(names, datasets):
# plot_umap_hyperparam_sweep(ds, name, np.arange(0.01, 0.5, 0.01), np.arange(5, 30, 5), 42)
plot_umap_hyperparam_sweep(ds, name, [0.1], [2], 42, True)
break | unsupervised.py | import numpy as np
import datetime
import umap
import matplotlib.pyplot as plt
import os
import pickle
import time
from matplotlib.colors import ListedColormap
from sklearn.datasets import make_moons, make_circles, make_classification
from matplotlib.ticker import NullFormatter
print('setting up logging...')
dt = datetime.datetime.fromtimestamp(time.time())
logdir = os.path.join('./outputs/' ,dt.strftime('%Y-%m-%d_%H:%M:%S'))
print(f'Logging to {logdir}')
if not os.path.exists(logdir):
os.makedirs(logdir)
def construct_datasets(n_samples):
X, y = make_classification(
n_samples=n_samples, n_features=2, n_redundant=0, n_informative=2, random_state=1, n_clusters_per_class=1
)
rng = np.random.RandomState(2)
X += 2 * rng.uniform(size=X.shape)
linearly_separable = (X, y)
return [
make_moons(n_samples=n_samples, noise=0.3, random_state=0),
make_circles(n_samples=n_samples, noise=0.2, factor=0.5, random_state=1),
linearly_separable,
]
datasets = construct_datasets(100)
names = ['moons', 'circles', 'linearly seperable']
def plot_umap_hyperparam_sweep(ds, name, min_dists, n_neighbors, umap_seed, verbose=False):
fig = plt.figure(figsize=(27, 9))
k = 1
X, y = ds
ax = plt.subplot(len(n_neighbors), len(min_dists) + 1, k)
# just plot the dataset first
cm = plt.cm.RdBu
cm_bright = ListedColormap(["#FF0000", "#0000FF"])
ax.set_title("input data")
ax.scatter(x=X[:, 0], y=X[:, 1], c=y, cmap=cm_bright, edgecolors="k")
k += 1
for j, n in enumerate(n_neighbors):
for i, min_dist in enumerate(min_dists):
if k % (len(min_dists) + 1) == 1: # don't plot below input data subplot
k += 1
ax = plt.subplot(len(n_neighbors), len(min_dists) + 1, k)
umapper = umap.UMAP(random_state=umap_seed, min_dist=min_dist, n_neighbors=n, verbose=verbose, n_epochs=10000, log_losses="after",)
umap_proj = umapper.fit_transform(X)
ax.scatter(umap_proj[:, 0], umap_proj[:, 1], c=y, cmap=cm_bright, edgecolors="k")
ax.set_title(f' n_neighbors={n}, min_dist={min_dist}' )
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
k+=1
fig.suptitle(f'rand_seed={umap_seed}')
fig.savefig(join(logdir, f'umap_hyperparams_{name}_{len(n_neighbors)}n_{len(min_dists)}_md'))
fig.tight_layout(rect=[0, 0.07, 1, 0.95])
plt.show()
for name, ds in zip(names, datasets):
# plot_umap_hyperparam_sweep(ds, name, np.arange(0.01, 0.5, 0.01), np.arange(5, 30, 5), 42)
plot_umap_hyperparam_sweep(ds, name, [0.1], [2], 42, True)
break | 0.422028 | 0.456591 |
from pygame import *
import os
import pyganim
PLATFORM_WIDTH = 32
PLATFORM_HEIGHT = 32
PLATFORM_COLOR = "#000000"
ICON_DIR = os.path.dirname(__file__) # Полный путь к каталогу с файлами
ANIMATION_BLOCKTELEPORT = [
('%s/blocks/portal2.png' % ICON_DIR),
('%s/blocks/portal1.png' % ICON_DIR)]
ANIMATION_PRINCESS = [
('%s/blocks/princess_l.png' % ICON_DIR),
('%s/blocks/princess_r.png' % ICON_DIR)]
class Platform(sprite.Sprite):
def __init__(self, x, y):
sprite.Sprite.__init__(self)
self.image = Surface((PLATFORM_WIDTH, PLATFORM_HEIGHT))
self.image.fill(Color(PLATFORM_COLOR))
self.image = image.load("%s/blocks/platform.png" % ICON_DIR)
self.image.set_colorkey(Color(PLATFORM_COLOR))
self.rect = Rect(x, y, PLATFORM_WIDTH, PLATFORM_HEIGHT)
class BlockDie(Platform):
def __init__(self, x, y):
Platform.__init__(self, x, y)
self.image = image.load("%s/blocks/dieBlock.png" % ICON_DIR)
self.rect = Rect(x + PLATFORM_WIDTH / 4, y + PLATFORM_HEIGHT / 4, PLATFORM_WIDTH - PLATFORM_WIDTH / 2, PLATFORM_HEIGHT - PLATFORM_HEIGHT / 2)
class BlockTeleport(Platform):
def __init__(self, x, y, goX,goY):
Platform.__init__(self, x, y)
self.goX = goX # координаты назначения перемещения
self.goY = goY # координаты назначения перемещения
boltAnim = []
for anim in ANIMATION_BLOCKTELEPORT:
boltAnim.append((anim, 0.3))
self.boltAnim = pyganim.PygAnimation(boltAnim)
self.boltAnim.play()
def update(self):
self.image.fill(Color(PLATFORM_COLOR))
self.boltAnim.blit(self.image, (0, 0))
class Princess(Platform):
def __init__(self, x, y):
Platform.__init__(self, x,y)
boltAnim = []
for anim in ANIMATION_PRINCESS:
boltAnim.append((anim, 0.8))
self.boltAnim = pyganim.PygAnimation(boltAnim)
self.boltAnim.play()
def update(self):
self.image.fill(Color(PLATFORM_COLOR))
self.boltAnim.blit(self.image, (0, 0)) | blocks.py |
from pygame import *
import os
import pyganim
PLATFORM_WIDTH = 32
PLATFORM_HEIGHT = 32
PLATFORM_COLOR = "#000000"
ICON_DIR = os.path.dirname(__file__) # Полный путь к каталогу с файлами
ANIMATION_BLOCKTELEPORT = [
('%s/blocks/portal2.png' % ICON_DIR),
('%s/blocks/portal1.png' % ICON_DIR)]
ANIMATION_PRINCESS = [
('%s/blocks/princess_l.png' % ICON_DIR),
('%s/blocks/princess_r.png' % ICON_DIR)]
class Platform(sprite.Sprite):
def __init__(self, x, y):
sprite.Sprite.__init__(self)
self.image = Surface((PLATFORM_WIDTH, PLATFORM_HEIGHT))
self.image.fill(Color(PLATFORM_COLOR))
self.image = image.load("%s/blocks/platform.png" % ICON_DIR)
self.image.set_colorkey(Color(PLATFORM_COLOR))
self.rect = Rect(x, y, PLATFORM_WIDTH, PLATFORM_HEIGHT)
class BlockDie(Platform):
def __init__(self, x, y):
Platform.__init__(self, x, y)
self.image = image.load("%s/blocks/dieBlock.png" % ICON_DIR)
self.rect = Rect(x + PLATFORM_WIDTH / 4, y + PLATFORM_HEIGHT / 4, PLATFORM_WIDTH - PLATFORM_WIDTH / 2, PLATFORM_HEIGHT - PLATFORM_HEIGHT / 2)
class BlockTeleport(Platform):
def __init__(self, x, y, goX,goY):
Platform.__init__(self, x, y)
self.goX = goX # координаты назначения перемещения
self.goY = goY # координаты назначения перемещения
boltAnim = []
for anim in ANIMATION_BLOCKTELEPORT:
boltAnim.append((anim, 0.3))
self.boltAnim = pyganim.PygAnimation(boltAnim)
self.boltAnim.play()
def update(self):
self.image.fill(Color(PLATFORM_COLOR))
self.boltAnim.blit(self.image, (0, 0))
class Princess(Platform):
def __init__(self, x, y):
Platform.__init__(self, x,y)
boltAnim = []
for anim in ANIMATION_PRINCESS:
boltAnim.append((anim, 0.8))
self.boltAnim = pyganim.PygAnimation(boltAnim)
self.boltAnim.play()
def update(self):
self.image.fill(Color(PLATFORM_COLOR))
self.boltAnim.blit(self.image, (0, 0)) | 0.262558 | 0.130479 |
import sys
import time
import socket
import tkinter
import winsound
import platform
import threading
from tkinter import *
import tkinter.simpledialog
import tkinter.font as tkFont
from tkinter import messagebox
from random import randint, choices
from tkinter.scrolledtext import ScrolledText
from Modules import ChiffrementRSA, Fonctions, LecteurSauvegarde, Paramètres, Sauvegarde, Serveur, Kripiti
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
Index
I. Définition de AfficherMenu().......................................................128
La fonction qui affiche le menu principal de l'application. Elle est appellée au
démarrage de l'application et quand l'utilisateur retourne au menu.
II. Hôte et clients...................................................................150
Les fonctions qui servent à afficher le menus de connexion pour le client et celles
qui servent à démarrer le serveur.
A. Travail spécifique à l'hôte....................................................150
1. Définition de DevenirHôte()................................................150
Cette fonction affiche le menu qui permet à l'hôte de configurer le mode de
connexion au serveur (Ip, Port et nom d'utilisateur)
2. Définition de DémarrerServeur()............................................222
Cette fonction lance le thread du serveur, en récupérant les informations
données sur l'interface de connexion.
B. Fonctions spécifiques au client................................................266
1. Définition de DevenirClient()..............................................266
Cette fonction affiche l'interface qui permet choisir à quel serveur se
connecter
2. Définition de SeConnecter()................................................367
Fonction qui récupere les informations saisies par l'utilisateur dans la
fonction DevenirClient() et qui initie une connexion avec le serveur.
III. Connexion et envoi de messages...................................................314
Les fonctions dédiées à l'envoi et à la réception de messages au serveur
A. Connexion......................................................................314
1. Définition de Connexion()..................................................314
Cette fonction sert à se connecter au serveur et à Envoyer le nom
d'utilisateur, la clé publique, le module de chiffrement au serveur, et on
recoit les informations de chiffrement du serveur, la clé publique et le
module de chiffrement. Si le serveur demande un mot de passe, c'est cette
fonction qui le récupére auprès de l'utilisateur, le chiffre et l'envoi au
serveur.
B. Définition de AffichageConversations().........................................381
Cette fonction sert à générer l'interface de la conversation
C.Envoyer et Recevoir.............................................................481
1. Définition de Envoyer()....................................................481
Fonctions qui fonctionne avec deux mode :
- Le mode "automatique": La fonction récupere la valeur du champ de
saisie et l'envoi au serveur
- Le mode "manuel": La fonction est appellée et envoie le message au
serveur
2. Définition de Réception()..................................................607
Cette fonction est un thread (Suite d'instructions qui s'exécutent arrière
plan de l'application). Il permet de recevoir des messages du serveur.
IV. Barre d'application...............................................................687
A. Définition de RetournerMenu()..................................................687
Fonction qui efface le contenu de la fenêtre et affiche le menuPrincipal
B. Définition de InfosServeur()...................................................743
La fenêtre qui affiche les informations sur le serveur
C. Définition de Aide()...........................................................787
Fenêtre qui affiche de l'aide
D. Activer et désactiver le son...................................................828
Fonctions triviales
1. Définition de ActiverSon().................................................828
2. Définition de CouperSon()..................................................837
E. Définition de Contact()........................................................847
Fonction qui permet à l'utilisateur de reporter un bug via les Issues GitHub
avec notre bot "Kripiti"
V. Définition de fermeture()..........................................................900
Fonctions appelée quand l'utilisateur ferme la fenêtre
VI.Lancement du programme.............................................................912
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
def AfficherMenu():
""" Fonction qui affiche le menu principal de l'application """
global MessageBienvenue, CadreBouttons, Logo
Logo = Label(fen, bg="grey", image=ImageLogo)
Logo.pack()
MessageBienvenue = Label(fen, text="Bienvenue dans Kripto. Pour démarrez, dites-nous \nsi vous voulez être hôte ou bien client.", bg="grey", font=PoliceTitre)
MessageBienvenue.pack()
CadreBouttons = Frame(fen, bg="grey")
CadreBouttons.pack(pady=60)
BouttonHôte = Button(CadreBouttons, text="Être hôte", font=PoliceBoutton, command=DevenirHôte)
BouttonHôte.pack(side=LEFT, padx=7)
BouttonClient = Button(CadreBouttons, text="Être client", font=PoliceBoutton, command=DevenirClient)
BouttonClient.pack(side=LEFT, padx=7)
def DevenirHôte():
""" Fonction qui affiche l'interface qui permet de définir l'Ip et le port qui seront
utilisées par le serveur. """
global InputIp, IP, InputPort, InputNom, CadreParamètres, SousMenuCliqué
SousMenuCliqué = True
#Si l"utilisateur veut retourner au menu, on sait qu'il est dans un sous-menu
MessageBienvenue.pack_forget()
CadreBouttons.pack_forget()
Machine = socket.gethostname()
IP = socket.gethostbyname(Machine)
CadreParamètres = Frame(fen, bg="grey")
CadreParamètres.pack()
# Label de l'adresse Ip
Label(CadreParamètres, text="Votre Adresse IP", bg="Grey").pack(anchor=CENTER, pady=7)
# Pas besoin de stocker les labels dans une variable, on n'aura pas besoin de les
# récupérer plus tard
InputIp = Entry(CadreParamètres)
InputIp.insert("end", IP) #On insére l'Ip qu'on à récupéré auparavant
InputIp.pack(anchor=CENTER)
#Label du port
Label(CadreParamètres, text="Port", bg="Grey").pack(anchor=CENTER, pady=7)
InputPort = Entry(CadreParamètres)
InputPort.pack(anchor=CENTER)
if Paramètres.DicoParamètres["PortPréféré"] != "Inconnu":
# Si l'utilisateur a définit un port par défaut
Fonctions.placeholder(InputPort, Paramètres.DicoParamètres["PortPréféré"], True)
#La fonction placeholder reproduit à peu prés le même comportement que l'attribut HTML du
# même nom : Elle sert à afficher une suggestion qui s'efface de la zone de saisie au clic
# sur cette dernière.
else:
PortRecommandé = randint(49152, 65535)
#On recommande un port dans la plage de ceux les moins utilisés
Fonctions.placeholder(InputPort, PortRecommandé, True)
#Label du nom d'utilisateur
Label(CadreParamètres, text="Votre nom d'utilisateur", bg="Grey").pack(anchor=CENTER, pady=7)
InputNom = Entry(CadreParamètres)
InputNom.pack(anchor=CENTER)
if Paramètres.DicoParamètres["NomUserDéfaut"] != "Inconnu":
# Si l'utilisateur a définit un nom d'utilisateur par défaut
Fonctions.placeholder(InputNom, Paramètres.DicoParamètres["NomUserDéfaut"], True)
else:
SuggestionNom = choices(ListeNoms)
Fonctions.placeholder(InputNom, SuggestionNom[0], True)
InputNom.bind("<Button-1>", lambda z: Fonctions.placeholder(InputNom, "", False))
#On utilise une fonction anonyme lambda pour pouvoir exécuter une fonction avec des arguments
#On associe le clic gauche sur la zone de saisie du nom à la fonction placeholder, qui effacera le contenu
# de la zone si c'est la suggestion originale
InputPort.bind("<Button-1>", lambda z: Fonctions.placeholder(InputPort, "", False))
BouttonDémarrer = Button(CadreParamètres, text="Démarrer", command=DémarrerServeur)
BouttonDémarrer.pack(pady=20)
def DémarrerServeur():
""" Cette fonction récupére les coordonées du serveur saisis dans le menu d'hôte, et lance
le thread du serveur """
global InputIp, IP, InputPort, Port, Rôle, InputNom, FichierSauvegarde, MotDePasse, NomUser, SauvegardeUtilisée
if len(InputNom.get()) > 16:
tkinter.messagebox.showerror(title="Nom d'utilisateur trop long", message="Votre nom d'utilisateur doit faire moins de 16 caractères")
return False
#On stoppe l'exécution de la fonction
Rôle = "Hôte"
IP = InputIp.get()
try: Port = int(InputPort.get())
except ValueError:
tkinter.messagebox.showerror(title="Problème de port", message="Le port doit être un nombre entier entre 1 et 65535")
return False
Serveur.Démarrer(IP, Port, Paramètres.DicoParamètres["NombreUsersMax"], Paramètres.DicoParamètres["MotDePasse"])
time.sleep(0.2)
#On attend que le serveur démarre
if Connexion() == True:
#Si la connexion est une réussite, on affiche les conversations
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée":
SauvegardeUtilisée = True
MotDePasse = tkinter.simpledialog.askstring("Mot de passe", "Veuillez saisir le mot de passe de la sauvegarde", show="•")
if MotDePasse == None or MotDePasse == "":
#Si l'utilisateur annule la connexion, il faut se déconnecter du serveur
Envoyer(ModeManuel = True, MessageManuel = "/stop")
ConnexionSocket.close()
return False
ConfirmationMotDePasse = tkinter.simpledialog.askstring("Confirmation", "Veuillez confirmer le mot de passe", show="•")
if ConfirmationMotDePasse == None or ConfirmationMotDePasse == "":
Envoyer(ModeManuel = True, MessageManuel = "/stop")
ConnexionSocket.close()
return False
while ConfirmationMotDePasse != MotDePasse:
ConfirmationMotDePasse = tkinter.simpledialog.askstring("Confirmation erroné", "Les deux mots de passe ne correspondent pas. Veuillez confirmer le mot de passe", show="•")
FichierSauvegarde = Sauvegarde.InitialisationSauvegarde(MotDePasse)
AffichageConversations()
def Connexion():
""" Cette fonction sert à se connecter au serveur et à Envoyer le nom d'utilisateur, la clé publique, le module de chiffrement au serveur,
et on recoit les informations de chiffrement du serveur, la clé publique et le module de chiffrement. Si le serveur demande un mot de passe,
c'est cette fonction qui le récupére auprès de l'utilisateur, le chiffre l'envoi au serveur."
"""
global IP, Port, NomUser, InputNom, ConnexionSocket, InputIp, Rôle, CléPublique, CléPubliqueServeur, ModuleServeur, NombreConnectés
IP = InputIp.get()
try: Port = int(InputPort.get())
except ValueError:
tkinter.messagebox.showerror(title="Problème de port", message="Le port doit être un nombre entier entre 1 et 65535")
return False
NomUser = InputNom.get()
ConnexionSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#On défini notre connexion socket
# - AF_INET => Protocole IPV4
# - SOCK_STREAM => Stream veut dire cours d'eau, comme un flot continu de donnés qui est envoyé
ConnexionSocket.settimeout(5)
#Si au bout de 5secondes, il n'y pas de réponse (Délai plus que nécéssaire pour des simples paquets TCP) une exception est générée
try: ConnexionSocket.connect((IP, Port))
except (ConnectionRefusedError, socket.timeout):
#Si on arrive pas à se connecter au serveur
if Rôle != "Hôte":
#Si c'est l'hôte, il a déja recu l'erreur de la part du serveur donc affiche rien
MessageErreur = "IL semblerait que les coordonées du serveur ne soit pas valides. Réferez vous à l'Aide pour régler ce problème."
tkinter.messagebox.showerror(title = "Problème de coordonnées", message = MessageErreur)
return False
else:
InfosChiffrement = f"{NomUser}\n{CléPublique}\n{Module}"
InfosChiffrement = InfosChiffrement.encode('utf-8')
ConnexionSocket.send(bytes(InfosChiffrement))
#On formate, puis on envoi les informations de chiffrement au serveur
AutorisationEtDonnées = ConnexionSocket.recv(4096)
AutorisationEtDonnées = AutorisationEtDonnées.decode("utf-8")
#On recoit de la part du serveur l'autorisation de se connecter, et les informations de chiffrement du serveur
if AutorisationEtDonnées != "False":
#Si le serveur autorise la connexion
AutorisationEtDonnées = AutorisationEtDonnées.split("|")
#On récupere les données sous forme de le liste
CléPubliqueServeur = int(AutorisationEtDonnées[0])
ModuleServeur = int(AutorisationEtDonnées[1])
PrésenceMotDePasse = AutorisationEtDonnées[2]
NombreConnectés = int(AutorisationEtDonnées[3])
if PrésenceMotDePasse == "True" and Rôle != "Hôte":
# l'hôte n'a pas besoin de se connecter
ConnexionEnAttente = True
while ConnexionEnAttente:
MotDePasseServeur= tkinter.simpledialog.askstring("Mot de passe du serveur", "Ce serveur demande un mot de passe pour se connecter", show="•")
if MotDePasseServeur == None or MotDePasseServeur == "":
#Si l'utilisateur annule la connexion, il faut se déconnecter du serveur
ConnexionSocket.close()
return False
else:
MotDePasseServeurChiffré = ChiffrementRSA.chiffrement(MotDePasseServeur, CléPubliqueServeur, ModuleServeur)
ConnexionSocket.send(bytes(MotDePasseServeurChiffré, "utf-8"))
Autorisation = ConnexionSocket.recv(4096)
Autorisation = Autorisation.decode("utf-8")
if Autorisation == "OK":
ConnexionEnAttente = False
else:
tkinter.messagebox.showwarning(title="Mot de passe incorrect", message="Le mot de passe est incorrect")
ConnexionSocket.setblocking(0)
#On définit le mode de connexion sur non bloquant (Voir explications dans la fonction réception)
return True
#On retoune que la connexion a été validé
else:
#Si le serveur ne donne pas son autorisation
motif = ConnexionSocket.recv(4096)
#On recoit du serveur le motif du refus de
tkinter.messagebox.showerror(title="Connexion refusée par le serveur", message=motif.decode("utf-8"))
return False
def DevenirClient():
""" Cette fonction affiche l'interface qui permet choisir à quel serveur se connecter"""
global InputIp, InputPort, InputNom, CadreParamètres, SousMenuCliqué
SousMenuCliqué = True
#Si l"utilisateur veut retourner au menu, on sait qu'il est dans un sous-menu
MessageBienvenue.pack_forget()
CadreBouttons.pack_forget()
CadreParamètres = Frame(fen, bg="grey")
CadreParamètres.pack()
#Label Adresse ip du serveur
Label(CadreParamètres, text="Adresse IP du serveur", bg="Grey").pack(anchor=CENTER, pady=7)
InputIp = Entry(CadreParamètres)
InputIp.insert("end", "192.168.1.")
InputIp.pack(anchor=CENTER)
PortduServeur = Label(CadreParamètres, text="Port du serveur", bg="Grey")
PortduServeur.pack(anchor=CENTER, pady=7)
InputPort = Entry(CadreParamètres)
InputPort.pack(anchor=CENTER)
#Label de nom
Label(CadreParamètres, text="Votre nom d'utilisateur", bg="Grey").pack(anchor=CENTER, pady=7)
InputNom = Entry(CadreParamètres)
InputNom.pack(anchor=CENTER)
if Paramètres.DicoParamètres["NomUserDéfaut"] != "Inconnu":
# Si l'utilisateur a définit un nom d'utilisateur par défaut
Fonctions.placeholder(InputNom, Paramètres.DicoParamètres["NomUserDéfaut"], True)
else:
SuggestionDeNom = choices(ListeNoms)
Fonctions.placeholder(InputNom, SuggestionDeNom[0], True)
InputNom.bind("<Button-1>", lambda b: Fonctions.placeholder(InputNom, "", False))
#On utilise une fonction anonyme lambda pour pouvoir executer une fonction avec des arguments
Button(CadreParamètres, text="Se connecter", command=SeConnecter).pack(pady=20)
def SeConnecter():
""" Fonction qui affiche l'interface de discusion si la connexion au serveur est une réussite"""
global InputIp, IP, InputPort, Port, Rôle, FichierSauvegarde, MotDePasse, SauvegardeUtilisée
Rôle = "Client"
IP = InputIp.get()
try: Port = int(InputPort.get())
except ValueError:
tkinter.messagebox.showerror(title="Problème de port", message="Le port doit être un nombre entier entre 1 et 65535")
return False
if Connexion() == True:
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée":
SauvegardeUtilisée = True
MotDePasse = tkinter.simpledialog.askstring("Mot de passe", "Veuillez saisir le mot de passe de la sauvegarde", show="•")
if MotDePasse == None or MotDePasse == "":
#Si l'utilisateur annule la connexion, il faut se déconnecter du serveur
ConnexionSocket.close()
return False
ConfirmationMotDePasse = tkinter.simpledialog.askstring("Confirmation", "Veuillez confirmer le mot de passe", show="•")
if ConfirmationMotDePasse == None or ConfirmationMotDePasse == "":
#Si l'utilisateur annule la connexion, il faut se déconnecter du serveur
ConnexionSocket.close()
return False
while ConfirmationMotDePasse != MotDePasse:
ConfirmationMotDePasse = tkinter.simpledialog.askstring("Confirmation", "Confirmation erronée. Veuillez confirmer le mot de passe", show="•")
FichierSauvegarde = Sauvegarde.InitialisationSauvegarde(MotDePasse)
AffichageConversations()
def AffichageConversations():
""" Cette fonction sert à générer l'interface de la conversation"""
global CadreParamètres, SaisieMessage, NomUser, FilsMessages, BouttonEnvoyer, ConnexionEnCours, ThreadRéception
Logo.pack_forget()
CadreParamètres.pack_forget()
BarreMenu.delete(1)
BarreMenu.insert_command(1, label="Menu", command= lambda : RetournerMenu(DemandeConfirmation = True, ConversationEnCours = True))
#On remplace la commande "Menu" pour car la commande associée doit avoir l'argument "ConversationEnCours" à jour
BarreMenu.insert_command(2, label = "Couper Son", command = CouperSon)
BarreMenu.insert_command(4, label = "Infos du serveur", command = InfosServeur)
FilsMessages = Listbox(fen, width="70", height="20")
FilsMessages.pack(pady=15)
SaisieMessage = Entry(fen, width="60")
SaisieMessage.pack()
BouttonEnvoyer = Button(fen, text="Envoyer", command=Envoyer)
BouttonEnvoyer.pack(pady=15)
SaisieMessage.bind("<Button-1>", lambda a: Fonctions.placeholder(SaisieMessage, "", False))
#On utilise une lambda pour appeler une fonction avec des arguments
fen.bind_all('<Return>', lambda c: Envoyer())
#On associe l'appui a a fonction Envoyer avec une fonction lambda afin de pouvoir Envoyer aucun argument
ConnexionEnCours = True #Tant que cette variable est égale à True, le thread tournera
ThreadRéception = threading.Thread(target=Réception)
ThreadRéception.daemon = True #Cet attribut signifie que quand il ne reste que ce thread, le programme s'arrête.
ThreadRéception.start()
Fonctions.placeholder(SaisieMessage, "Saisissez votre message ici", True)
def Envoyer(ModeManuel = False, MessageManuel = None):
#Le mode manuel est un mode qui ne récupére pas l'entrée, mais le message passé en argument
"""Fonction qui chiffre et envoi les message au serveur. Les messages sont chiffrés en fonction du serveur"""
global SaisieMessage, NomUser, FilsMessages, ConnexionSocket, NombreErreurs, CléPubliqueServeur, ModuleServeur, SonActivé, EnvoiPossible
if ModeManuel == True: message = MessageManuel
else: message = SaisieMessage.get()
if len(message) > 1000: tkinter.messagebox.showerror(title="Attention au spam !", message="Afin d'éviter de surcharger le serveur, les messages de plus de 1000 caractères sont interdits")
elif message == "": pass
elif message[0] == "/":
#C'est une commande
PremierArgument = Fonctions.ParserCommande(message)
RéponseUser = None
stop = False
Permission = True
if PremierArgument == "/stop" and ModeManuel == False and Rôle == "Hôte":
RéponseUser = tkinter.messagebox.askokcancel("Kripto","Voulez vraiment arrêter le serveur ?")
stop = True
elif PremierArgument == "/stop" and ModeManuel == False and Rôle != "Hôte":
tkinter.messagebox.showerror(title = "Erreur de permission", message = "Vous ne pouvez pas arrêter le serveur, vous n'êtes pas l'hôte de la disscusion")
Permission = False
elif PremierArgument == "/lock" and Rôle == "Client" or message == "/unlock" and Rôle == "Client":
tkinter.messagebox.showerror(title = "Erreur de permission", message = "Vous ne pouvez pas verrouiler/déverrouiller le serveur, vous n'êtes pas admin de la disscusion")
Permission = False
elif PremierArgument == "/ban" and Rôle == "Client":
tkinter.messagebox.showerror(title = "Erreur de permission", message = "Vous ne pouvez pas bannir un client, vous n'êtes pas admin de la disscusion")
Permission = False
elif PremierArgument == "/kick" and Rôle == "Client":
tkinter.messagebox.showerror(title = "Erreur de permission", message = "Vous ne pouvez pas kicker un client, vous n'êtes pas admin de la disscusion")
Permission = False
elif PremierArgument == "/op" and Rôle != "Hôte":
tkinter.messagebox.showerror(title = "Erreur de permission", message = "Vous ne pouvez pas utiliser cette commande, vous n'êtes pas l'hôte de la disscusion")
Permission = False
if RéponseUser == True and Rôle == "Hôte" or ModeManuel == True or PremierArgument != "/stop" and Permission == True:
message = Fonctions.formaterPaquet("Commande", message)
message = ChiffrementRSA.chiffrement(message, CléPubliqueServeur, ModuleServeur)
messageFinal = f"{len(message)}-{message}"
messageFinal = messageFinal.encode('utf-8')
try: ConnexionSocket.send(bytes(messageFinal))
except (ConnectionResetError, ConnectionAbortedError):
#Si le serveur ne répond pas
if NombreErreurs < 3:
tkinter.messagebox.showerror(title="Erreur de serveur", message="Impossible de joindre le serveur. Veuillez réessayer.")
NombreErreurs += 1
else:
#Si il y'a plus de trois erreurs, on stoppe le programme, en invitant l'utilisateur à se reconnecter
messsageErreur = "Le serveur est injoignable pour le moment. Veuillez vous reconnecter ou bien référez vous à l'Aide"
tkinter.messagebox.showerror(title="Aïe...", message=messsageErreur)
RetournerMenu(DemandeConfirmation = False, ConversationEnCours = True)
if stop == True: RetournerMenu(DemandeConfirmation = None, ConversationEnCours = True, DemandeArrêt = False)
SaisieMessage.delete(0, 'end')
elif len(message) != 0 and EnvoiPossible:
EnvoiPossible = False
messageInterface = f"[{time.strftime('%H:%M:%S')}] {NomUser} → {message}"
#On garde de coté un message avec un formaté spécialement pour l'interface, mais on ne l'utilise que si l'envoi est réussi.
message = Fonctions.formaterPaquet("Message", message)
message = ChiffrementRSA.chiffrement(message, CléPubliqueServeur, ModuleServeur)
messageFinal = f"{len(message)}-{message}"
#On rajoute un en tête avec la longueur totale du message
messageFinal = messageFinal.encode('utf-8')
try: ConnexionSocket.send(bytes(messageFinal))
except (ConnectionResetError, ConnectionAbortedError):
#Si le serveur ne répond pas
if NombreErreurs < 3:
tkinter.messagebox.showerror(title="Aïe...", message="Impossible de joindre le serveur. Veuillez réessayer.")
NombreErreurs += 1
else:
#Si il y'a plus de trois erreurs, on stoppe le programme, en invitant l'utilisateur à se reconnecter
messsageErreur = "Le serveur est injoignable pour le moment. Veuillez vous reconnecter ou bien référez vous à l'Aide"
#On stocke le message dans un variable pour diminuer la taille de la ligne d'en dessous
tkinter.messagebox.showerror(title="Aïe...", message=messsageErreur)
RetournerMenu(DemandeConfirmation = False, ConversationEnCours = True)
else:
#Si il n'a pas eu d'execeptions
if len(messageInterface) > 70:
#Si le message à afficher fait plus de 70 caratères
LignesMessages = Fonctions.couperPhrases(messageInterface)
#On recupere plusieurs lignes de moins de 70 caractères dans une liste
for ligne in LignesMessages:
FilsMessages.insert(END, ligne)
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée" and SauvegardeUtilisée:
Sauvegarde.NouvelleLigne(FichierSauvegarde, MotDePasse, ligne)
else:
FilsMessages.insert(END, messageInterface)
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée" and SauvegardeUtilisée:
Sauvegarde.NouvelleLigne(FichierSauvegarde, MotDePasse, messageInterface)
FilsMessages.yview(END)
#On défile tout en bas cette dernière, vers le message le plus récent
if SonActivé == True:
if Paramètres.DicoParamètres["SonEnvoi"] != "Inconnu":
winsound.PlaySound("Sons/" + Paramètres.DicoParamètres["SonEnvoi"], winsound.SND_ASYNC)
else:
winsound.PlaySound("Sons/Pop.wav", winsound.SND_ASYNC)
SaisieMessage.delete(0, 'end')
def RéactivationEnvoi():
global EnvoiPossible
EnvoiPossible = True
fen.after(500, RéactivationEnvoi)
#Au bout de 500ms en asynchrone, on appelle la fonction qui rendra possible l'envoi de messages
def Réception():
"""Cette fonction est un thread (Suite d'instructions qui s'exécutent arrière plan de l'application). Il permet de recevoir
des messages du serveur."""
global FilsMessages, ConnexionSocket, CléPrivée, Module, SonActivé, ConnexionEnCours, NombreConnectés, Rôle
while ConnexionEnCours == True:
#Quand Connexion est égal à False, le Thread s'arrête
NotifSilencieuse = False
#Est égal à true si le client recoit un messsage qui ne doit pas s'afficher (connexion/déconnexion par exemple)
try: MessageReçu = ConnexionSocket.recv(32768)
#Cette partie du code est dans un bloc "try, except" car "ConnexionSocket.setblocking(0)" a été défini sur False
#Au lieu d'attendre un message, si rien n'est envoyé cela va générer une exception, ce qui permet un fonctionnement asynchrone.
except BlockingIOError:
#Si aucun message n'a été envoyé, on ne fait rien et on attend pour préserver les ressources la machine
time.sleep(0.1)
except (ConnectionAbortedError, ConnectionResetError):
#Le serveur a crashé
tkinter.messagebox.showerror(title="Problème de serveur", message="Le serveur a crashé...")
RetournerMenu(ConversationEnCours = True)
#32768 est la limite d'octets recevables
else:
#Un message a été reçu
MessageReçu = MessageReçu.decode("utf-8")
if MessageReçu != "":
MessageReçu = MessageReçu.split("-")
#Le message comporte un petit entête
#Exemple = 564-6646464/65656/4564564654, 564 est içi la longueur totale du message.
LongeurMessage = int(MessageReçu[0])
while len(MessageReçu[1]) < LongeurMessage:
#Tant que le message recu est plus petit que la longueur totale du message
SuiteDuMessage = ConnexionSocket.recv(32768)
SuiteDuMessage = SuiteDuMessage.decode("utf-8")
MessageReçu[1] += SuiteDuMessage
#On ajoute la suite du message reçu
MessageReçu = ChiffrementRSA.déchiffrement(MessageReçu[1], CléPrivée, Module)
#On ne déchiffre que l'index 1 du message, qui est le messge en lui même
#0 étant la longueur de ce message
if MessageReçu == "ban":
tkinter.messagebox.showinfo(title = "Vous avez été banni", message = "Vous avez été banni du serveur, vous ne pouvez plus vous reconnecter.")
ConnexionEnCours = False
RetournerMenu(ConversationEnCours = True)
NotifSilencieuse = True
elif MessageReçu == "kick":
tkinter.messagebox.showinfo(title = "Vous avez été kické", message = "Vous avez été kické du serveur.")
ConnexionEnCours = False
RetournerMenu(ConversationEnCours = True)
NotifSilencieuse = True
if MessageReçu == "connexion":
NombreConnectés += 1
NotifSilencieuse = True
elif MessageReçu == "déconnexion":
NombreConnectés -= 1
NotifSilencieuse = True
elif MessageReçu == "promotion":
Rôle = "Admin"
NotifSilencieuse = True
elif MessageReçu == "rétrogradé":
Rôle = "Client"
NotifSilencieuse = True
elif len(MessageReçu) > 70:
#Si le message à afficher fait plus de 70 caratères
LignesMessages = Fonctions.couperPhrases(MessageReçu)
#On recupére plusieurs lignes de moins de 70 caractères dans une liste
for ligne in LignesMessages:
FilsMessages.insert(END, ligne)
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée":
NouvelleLigne(FichierSauvegarde, MotDePasse, ligne)
else:
FilsMessages.insert(END, MessageReçu)
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée":
Sauvegarde.NouvelleLigne(FichierSauvegarde, MotDePasse, MessageReçu)
FilsMessages.yview(END)
#On force le défilement tout en bas de cette dernière
if FenêtreALeFocus == False and NotifSilencieuse == False and Paramètres.DicoParamètres["Notification"] == "Activée":
Fonctions.AfficherNotification("Kripto", MessageReçu)
if SonActivé == True and NotifSilencieuse == False:
if Paramètres.DicoParamètres["SonRéception"] != "Inconnu":
winsound.PlaySound("Sons/" + Paramètres.DicoParamètres["SonRéception"], winsound.SND_ASYNC)
else:
winsound.PlaySound("Sons/Dong.wav", winsound.SND_ASYNC)
def RetournerMenu(DemandeConfirmation = None, ConversationEnCours = None, DepuisMenu = None, DemandeArrêt = True):
global FilsMessages, SaisieMessage, BouttonEnvoyer, SousMenuCliqué, ConnexionEnCours
Confirmation = None
if DemandeConfirmation == True:
Confirmation = messagebox.askquestion (f"Vous partez déja {NomUser} ?","Vous voulez vraiment retourner au menu ?", icon = "warning")
if Confirmation == "yes" or DemandeConfirmation == None:
if ConversationEnCours:
#Si l'utilisateur était dans la fenêtre de conversation
SousMenuCliqué = False
if Rôle == "Hôte" and DemandeArrêt == True:
Envoyer(True, "/stop") #L'envoi du /stop permet d'éviter au serveur de crasher / tourner dans le vide
time.sleep(0.3)
BarreMenu.delete(1)
BarreMenu.insert_command(1, label="Menu", command= lambda : RetournerMenu(DepuisMenu = True))
#On remplace la commande "Menu" pour car la commande associée doit avoir l'argument "ConversationEnCours" à jour
FilsMessages.pack_forget()
SaisieMessage.pack_forget()
BouttonEnvoyer.pack_forget()
fen.unbind_all(ALL)
fen.bind("<FocusIn>", lambda x: PasserEnTrue())
fen.bind("<FocusOut>", lambda x: PasserEnFalse())
BarreMenu.delete(2)
BarreMenu.delete(3)
#On efface les commandes "Couper Son" et "Infos Serveur" du menu
ConnexionEnCours = False #Le thread de réception est arrêté
ConnexionSocket.close()
if DepuisMenu:
#Si l'utilisateur était dans la fenêtre de menu
if SousMenuCliqué:
#Si l'utilisateur était dans le sous menu (Démarrage du serveur ou connexion)
Logo.pack_forget()
CadreParamètres.pack_forget()
if SousMenuCliqué or ConversationEnCours:
#Si l"utilisateur n'est pas dans le menu principal
if SousMenuCliqué:
SousMenuCliqué = False
AfficherMenu()
def InfosServeur():
""" Cette fonction affiches les informations du serveur dans une fenêtre en top level"""
global IP, Port, NombreConnectés
fenInfos = Toplevel()
fenInfos.geometry("300x280")
fenInfos.configure(bg="grey")
fenInfos.resizable(width=False, height=False)
fenInfos.iconbitmap(bitmap="Médias/information.ico")
fenInfos.title("Infos du serveur")
TitreAdresseServeur = Label(fenInfos, text="Adresse du serveur", bg="Grey", font=PoliceTitre)
TitreAdresseServeur.pack(pady=10)
AdresseServeur = Label(fenInfos, text=IP, bg="Grey", font=PoliceSousTitre)
AdresseServeur.pack()
TitrePortServeur = Label(fenInfos, text="Port du serveur", bg="Grey", font=PoliceTitre)
TitrePortServeur.pack(pady=10)
PortServeur = Label(fenInfos, text=Port, bg="Grey", font=PoliceSousTitre)
PortServeur.pack()
TitreUtilisateursCo = Label(fenInfos, text="Utiliseurs connectées", bg="Grey", font=PoliceTitre)
TitreUtilisateursCo.pack(pady=10)
UtilisateurCo = Label(fenInfos, text = str(NombreConnectés), bg="Grey", font=PoliceSousTitre)
UtilisateurCo.pack()
BouttonFermer = Button(fenInfos, text="Fermer", command = lambda: fenInfos.destroy())
BouttonFermer.pack(pady=20, side=BOTTOM)
fenInfos.focus_force()
#On affiche la fenêtre au premier plan
fenInfos.mainloop()
def Aide():
""" Cette fonction affiche l'Aide dans une fenêtre en top level"""
def QuitterAide():
"""Fonction qui détruit la fenêtre d'Aide"""
fenAide.destroy()
fenAide = Toplevel()
fenAide.geometry("300x280")
fenAide.configure(bg="grey")
fenAide.resizable(width=False, height=False)
fenAide.iconbitmap(bitmap="Médias/information.ico")
fenAide.title("Aide")
#Définition de l'apparence de la fenêtre
TitreAideIP = Label(fenAide, text="Si votre IP n'est pas valide", bg="Grey", font=PoliceTitre)
TitreAideIP.pack(pady=10)
AideIP0 = Label(fenAide, text="Entrez vous même l'adresse IPv4.\nPour la trouver :", bg="Grey", font=PoliceSousTitre)
AideIP0.pack()
AideIP1 = Label(fenAide, text="le-routeur-wifi.com/adresse-ip-mac/", bg="Grey", font=PoliceSousTitre, fg="blue")
AideIP1.pack()
AideIP1.bind("<Button-1>", lambda e: Fonctions.callback("https://le-routeur-wifi.com/adresse-ip-mac/"))
TitreAidePort0 = Label(fenAide, text="Si votre port n'est pas valide", bg="Grey", font=PoliceTitre)
TitreAidePort0.pack(pady=10)
AidePort0 = Label(fenAide, text="Veillez à choisir un nombre entier\nentre 0 et 65535", bg="Grey", font=PoliceSousTitre)
AidePort0.pack()
BouttonFermer = Button(fenAide, text="Fermer", command=QuitterAide)
BouttonFermer.pack(pady=20, side=BOTTOM)
fenAide.focus_force()
#On affiche la fenêtre au premier plan
fenAide.mainloop()
def ActiverSon():
global SonActivé
SonActivé = True
BarreMenu.delete(2)
BarreMenu.insert_command(2, label="Couper le son", command=CouperSon)
#On supprime la commande à l'index 2 du menu pour y ajouter la commande CouperSon à la même position
def CouperSon():
global SonActivé
SonActivé = False
BarreMenu.delete(2)
BarreMenu.insert_command(2, label="Activer le son", command=ActiverSon)
#On supprime la commande à l'index 2 du menu pour y ajouter la commande ActiverSon à la même position
def Contact():
""" Cette fonction affiches les informations du serveur dans une fenêtre en top level"""
def EnvoiAPI():
TitreIssue = InputObjet.get()
Message = InputMessage.get("1.0", tkinter.END)
Plateforme = platform.system() + " " + platform.release() + " " + platform.version()
if Kripiti.CréerUneIssue(TitreIssue, Message, Plateforme) == True:
tkinter.messagebox.showinfo(
title = "Succès !",
message = "Votre bug a été reporté à nos équipes avec succès. Merci de votre contribution !"
)
fenContact.destroy()
else:
tkinter.messagebox.showerror(
title = "Oups...",
message = "Impossible de reporter le bug pour le moment. Merci de retenter."
)
fenContact = Toplevel()
fenContact.geometry("300x280")
fenContact.configure(bg="grey")
fenContact.resizable(width=False, height=False)
fenContact.iconbitmap(bitmap="Médias/information.ico")
fenContact.title("Contact")
#Label d'objet
Label(fenContact, text="Quel est le problème ?", bg="Grey", font=PoliceTitre).pack(pady=10)
InputObjet = Entry(fenContact, width = 50, bg="White", font=PoliceSousTitre)
InputObjet.pack(padx=20)
#Label de message
Label(fenContact, text="Un peu plus de détails ?", bg="Grey", font=PoliceTitre).pack(pady=10)
InputMessage = ScrolledText(fenContact, width = 50, height = 5, bg="White", font=PoliceSousTitre)
InputMessage.pack(padx=20)
Button(fenContact, text="Envoyer votre message", command=EnvoiAPI).pack(pady=20, side=BOTTOM)
fenContact.focus_force()
#On affiche la fenêtre au premier plan
fenContact.mainloop()
def fermeture():
""" Fonction appellée quand l'utilisateur veut fermer la fenêtre """
RéponseUser = tkinter.messagebox.askokcancel("Kripto","Vous partez déja ?")
if RéponseUser == True:
sys.exit()
#On utilise sys.exit() plutôt que exit() car cela éviter au threads de tourner en arrière plan
def PasserEnTrue():
global FenêtreALeFocus
FenêtreALeFocus = True
def PasserEnFalse():
global FenêtreALeFocus
FenêtreALeFocus = False
#Code exécuté au démarage de l'application
Paramètres.LectureParamètres()
ListeNoms = ["Autruche", "Bob", "AmiralBenson", "TomNook", "Karamazov", "PatéEnCroute", "Risitas", "Clown"]
#La liste des noms qui seront suggérés à l'utilisateur.
FichierSauvegarde = None
MotDePasse = None
#Initilisation du mot de passe de la sauvegarde et le fichier de sauvegarde
Module, CléPublique, CléPrivée = ChiffrementRSA.génération(16)
#On génére une clé publique et une clé publique et on garde en mémoire le module de chiffrement
NombreErreurs = 0
NombreConnectés = 1 #On se compte
EnvoiPossible = True
SonActivé = True
SousMenuCliqué = False
SauvegardeUtilisée = None #On ne sait pas à ce stade si la sauvegarde sera utilsée
FenêtreALeFocus = True
#Permet d'envoyer des notifcations uniquement quand la fenêtre est en arrière plan
fen = Tk()
fen.geometry("550x460")
fen.title("Kripto - Un chat chiffré")
fen.configure(bg="grey")
fen.resizable(width=False, height=False)
fen.iconbitmap(bitmap="Médias/icone.ico")
fen.bind("<FocusIn>", lambda x: PasserEnTrue())
fen.bind("<FocusOut>", lambda x: PasserEnFalse())
fen.protocol("WM_DELETE_WINDOW", fermeture)
BarreMenu = Menu(fen)
BarreMenu.add_command(label="Menu", command= lambda: RetournerMenu(DepuisMenu = True))
BarreMenu.add_command(label="Aide", command=Aide)
BarreMenu.add_command(label="Sauvegardes", command=LecteurSauvegarde.LecteurSauvegarde)
BarreMenu.add_command(label="Paramètres", command=Paramètres.InterfaceParamètres)
BarreMenu.add_command(label="Contact", command=Contact)
fen.configure(menu=BarreMenu)
PoliceTitreBienvenue = tkFont.Font(family="Verdanna",size=16,weight="bold")
PoliceBoutton = tkFont.Font(family="Arial",size=12,weight="bold")
PoliceTitre = tkFont.Font(size=14,weight="bold")
PoliceSousTitre = tkFont.Font(size=12)
ImageLogo = PhotoImage(file="Médias/Logo.png")
AfficherMenu()
fen.mainloop() | Application.pyw | import sys
import time
import socket
import tkinter
import winsound
import platform
import threading
from tkinter import *
import tkinter.simpledialog
import tkinter.font as tkFont
from tkinter import messagebox
from random import randint, choices
from tkinter.scrolledtext import ScrolledText
from Modules import ChiffrementRSA, Fonctions, LecteurSauvegarde, Paramètres, Sauvegarde, Serveur, Kripiti
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
Index
I. Définition de AfficherMenu().......................................................128
La fonction qui affiche le menu principal de l'application. Elle est appellée au
démarrage de l'application et quand l'utilisateur retourne au menu.
II. Hôte et clients...................................................................150
Les fonctions qui servent à afficher le menus de connexion pour le client et celles
qui servent à démarrer le serveur.
A. Travail spécifique à l'hôte....................................................150
1. Définition de DevenirHôte()................................................150
Cette fonction affiche le menu qui permet à l'hôte de configurer le mode de
connexion au serveur (Ip, Port et nom d'utilisateur)
2. Définition de DémarrerServeur()............................................222
Cette fonction lance le thread du serveur, en récupérant les informations
données sur l'interface de connexion.
B. Fonctions spécifiques au client................................................266
1. Définition de DevenirClient()..............................................266
Cette fonction affiche l'interface qui permet choisir à quel serveur se
connecter
2. Définition de SeConnecter()................................................367
Fonction qui récupere les informations saisies par l'utilisateur dans la
fonction DevenirClient() et qui initie une connexion avec le serveur.
III. Connexion et envoi de messages...................................................314
Les fonctions dédiées à l'envoi et à la réception de messages au serveur
A. Connexion......................................................................314
1. Définition de Connexion()..................................................314
Cette fonction sert à se connecter au serveur et à Envoyer le nom
d'utilisateur, la clé publique, le module de chiffrement au serveur, et on
recoit les informations de chiffrement du serveur, la clé publique et le
module de chiffrement. Si le serveur demande un mot de passe, c'est cette
fonction qui le récupére auprès de l'utilisateur, le chiffre et l'envoi au
serveur.
B. Définition de AffichageConversations().........................................381
Cette fonction sert à générer l'interface de la conversation
C.Envoyer et Recevoir.............................................................481
1. Définition de Envoyer()....................................................481
Fonctions qui fonctionne avec deux mode :
- Le mode "automatique": La fonction récupere la valeur du champ de
saisie et l'envoi au serveur
- Le mode "manuel": La fonction est appellée et envoie le message au
serveur
2. Définition de Réception()..................................................607
Cette fonction est un thread (Suite d'instructions qui s'exécutent arrière
plan de l'application). Il permet de recevoir des messages du serveur.
IV. Barre d'application...............................................................687
A. Définition de RetournerMenu()..................................................687
Fonction qui efface le contenu de la fenêtre et affiche le menuPrincipal
B. Définition de InfosServeur()...................................................743
La fenêtre qui affiche les informations sur le serveur
C. Définition de Aide()...........................................................787
Fenêtre qui affiche de l'aide
D. Activer et désactiver le son...................................................828
Fonctions triviales
1. Définition de ActiverSon().................................................828
2. Définition de CouperSon()..................................................837
E. Définition de Contact()........................................................847
Fonction qui permet à l'utilisateur de reporter un bug via les Issues GitHub
avec notre bot "Kripiti"
V. Définition de fermeture()..........................................................900
Fonctions appelée quand l'utilisateur ferme la fenêtre
VI.Lancement du programme.............................................................912
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
def AfficherMenu():
""" Fonction qui affiche le menu principal de l'application """
global MessageBienvenue, CadreBouttons, Logo
Logo = Label(fen, bg="grey", image=ImageLogo)
Logo.pack()
MessageBienvenue = Label(fen, text="Bienvenue dans Kripto. Pour démarrez, dites-nous \nsi vous voulez être hôte ou bien client.", bg="grey", font=PoliceTitre)
MessageBienvenue.pack()
CadreBouttons = Frame(fen, bg="grey")
CadreBouttons.pack(pady=60)
BouttonHôte = Button(CadreBouttons, text="Être hôte", font=PoliceBoutton, command=DevenirHôte)
BouttonHôte.pack(side=LEFT, padx=7)
BouttonClient = Button(CadreBouttons, text="Être client", font=PoliceBoutton, command=DevenirClient)
BouttonClient.pack(side=LEFT, padx=7)
def DevenirHôte():
""" Fonction qui affiche l'interface qui permet de définir l'Ip et le port qui seront
utilisées par le serveur. """
global InputIp, IP, InputPort, InputNom, CadreParamètres, SousMenuCliqué
SousMenuCliqué = True
#Si l"utilisateur veut retourner au menu, on sait qu'il est dans un sous-menu
MessageBienvenue.pack_forget()
CadreBouttons.pack_forget()
Machine = socket.gethostname()
IP = socket.gethostbyname(Machine)
CadreParamètres = Frame(fen, bg="grey")
CadreParamètres.pack()
# Label de l'adresse Ip
Label(CadreParamètres, text="Votre Adresse IP", bg="Grey").pack(anchor=CENTER, pady=7)
# Pas besoin de stocker les labels dans une variable, on n'aura pas besoin de les
# récupérer plus tard
InputIp = Entry(CadreParamètres)
InputIp.insert("end", IP) #On insére l'Ip qu'on à récupéré auparavant
InputIp.pack(anchor=CENTER)
#Label du port
Label(CadreParamètres, text="Port", bg="Grey").pack(anchor=CENTER, pady=7)
InputPort = Entry(CadreParamètres)
InputPort.pack(anchor=CENTER)
if Paramètres.DicoParamètres["PortPréféré"] != "Inconnu":
# Si l'utilisateur a définit un port par défaut
Fonctions.placeholder(InputPort, Paramètres.DicoParamètres["PortPréféré"], True)
#La fonction placeholder reproduit à peu prés le même comportement que l'attribut HTML du
# même nom : Elle sert à afficher une suggestion qui s'efface de la zone de saisie au clic
# sur cette dernière.
else:
PortRecommandé = randint(49152, 65535)
#On recommande un port dans la plage de ceux les moins utilisés
Fonctions.placeholder(InputPort, PortRecommandé, True)
#Label du nom d'utilisateur
Label(CadreParamètres, text="Votre nom d'utilisateur", bg="Grey").pack(anchor=CENTER, pady=7)
InputNom = Entry(CadreParamètres)
InputNom.pack(anchor=CENTER)
if Paramètres.DicoParamètres["NomUserDéfaut"] != "Inconnu":
# Si l'utilisateur a définit un nom d'utilisateur par défaut
Fonctions.placeholder(InputNom, Paramètres.DicoParamètres["NomUserDéfaut"], True)
else:
SuggestionNom = choices(ListeNoms)
Fonctions.placeholder(InputNom, SuggestionNom[0], True)
InputNom.bind("<Button-1>", lambda z: Fonctions.placeholder(InputNom, "", False))
#On utilise une fonction anonyme lambda pour pouvoir exécuter une fonction avec des arguments
#On associe le clic gauche sur la zone de saisie du nom à la fonction placeholder, qui effacera le contenu
# de la zone si c'est la suggestion originale
InputPort.bind("<Button-1>", lambda z: Fonctions.placeholder(InputPort, "", False))
BouttonDémarrer = Button(CadreParamètres, text="Démarrer", command=DémarrerServeur)
BouttonDémarrer.pack(pady=20)
def DémarrerServeur():
""" Cette fonction récupére les coordonées du serveur saisis dans le menu d'hôte, et lance
le thread du serveur """
global InputIp, IP, InputPort, Port, Rôle, InputNom, FichierSauvegarde, MotDePasse, NomUser, SauvegardeUtilisée
if len(InputNom.get()) > 16:
tkinter.messagebox.showerror(title="Nom d'utilisateur trop long", message="Votre nom d'utilisateur doit faire moins de 16 caractères")
return False
#On stoppe l'exécution de la fonction
Rôle = "Hôte"
IP = InputIp.get()
try: Port = int(InputPort.get())
except ValueError:
tkinter.messagebox.showerror(title="Problème de port", message="Le port doit être un nombre entier entre 1 et 65535")
return False
Serveur.Démarrer(IP, Port, Paramètres.DicoParamètres["NombreUsersMax"], Paramètres.DicoParamètres["MotDePasse"])
time.sleep(0.2)
#On attend que le serveur démarre
if Connexion() == True:
#Si la connexion est une réussite, on affiche les conversations
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée":
SauvegardeUtilisée = True
MotDePasse = tkinter.simpledialog.askstring("Mot de passe", "Veuillez saisir le mot de passe de la sauvegarde", show="•")
if MotDePasse == None or MotDePasse == "":
#Si l'utilisateur annule la connexion, il faut se déconnecter du serveur
Envoyer(ModeManuel = True, MessageManuel = "/stop")
ConnexionSocket.close()
return False
ConfirmationMotDePasse = tkinter.simpledialog.askstring("Confirmation", "Veuillez confirmer le mot de passe", show="•")
if ConfirmationMotDePasse == None or ConfirmationMotDePasse == "":
Envoyer(ModeManuel = True, MessageManuel = "/stop")
ConnexionSocket.close()
return False
while ConfirmationMotDePasse != MotDePasse:
ConfirmationMotDePasse = tkinter.simpledialog.askstring("Confirmation erroné", "Les deux mots de passe ne correspondent pas. Veuillez confirmer le mot de passe", show="•")
FichierSauvegarde = Sauvegarde.InitialisationSauvegarde(MotDePasse)
AffichageConversations()
def Connexion():
""" Cette fonction sert à se connecter au serveur et à Envoyer le nom d'utilisateur, la clé publique, le module de chiffrement au serveur,
et on recoit les informations de chiffrement du serveur, la clé publique et le module de chiffrement. Si le serveur demande un mot de passe,
c'est cette fonction qui le récupére auprès de l'utilisateur, le chiffre l'envoi au serveur."
"""
global IP, Port, NomUser, InputNom, ConnexionSocket, InputIp, Rôle, CléPublique, CléPubliqueServeur, ModuleServeur, NombreConnectés
IP = InputIp.get()
try: Port = int(InputPort.get())
except ValueError:
tkinter.messagebox.showerror(title="Problème de port", message="Le port doit être un nombre entier entre 1 et 65535")
return False
NomUser = InputNom.get()
ConnexionSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#On défini notre connexion socket
# - AF_INET => Protocole IPV4
# - SOCK_STREAM => Stream veut dire cours d'eau, comme un flot continu de donnés qui est envoyé
ConnexionSocket.settimeout(5)
#Si au bout de 5secondes, il n'y pas de réponse (Délai plus que nécéssaire pour des simples paquets TCP) une exception est générée
try: ConnexionSocket.connect((IP, Port))
except (ConnectionRefusedError, socket.timeout):
#Si on arrive pas à se connecter au serveur
if Rôle != "Hôte":
#Si c'est l'hôte, il a déja recu l'erreur de la part du serveur donc affiche rien
MessageErreur = "IL semblerait que les coordonées du serveur ne soit pas valides. Réferez vous à l'Aide pour régler ce problème."
tkinter.messagebox.showerror(title = "Problème de coordonnées", message = MessageErreur)
return False
else:
InfosChiffrement = f"{NomUser}\n{CléPublique}\n{Module}"
InfosChiffrement = InfosChiffrement.encode('utf-8')
ConnexionSocket.send(bytes(InfosChiffrement))
#On formate, puis on envoi les informations de chiffrement au serveur
AutorisationEtDonnées = ConnexionSocket.recv(4096)
AutorisationEtDonnées = AutorisationEtDonnées.decode("utf-8")
#On recoit de la part du serveur l'autorisation de se connecter, et les informations de chiffrement du serveur
if AutorisationEtDonnées != "False":
#Si le serveur autorise la connexion
AutorisationEtDonnées = AutorisationEtDonnées.split("|")
#On récupere les données sous forme de le liste
CléPubliqueServeur = int(AutorisationEtDonnées[0])
ModuleServeur = int(AutorisationEtDonnées[1])
PrésenceMotDePasse = AutorisationEtDonnées[2]
NombreConnectés = int(AutorisationEtDonnées[3])
if PrésenceMotDePasse == "True" and Rôle != "Hôte":
# l'hôte n'a pas besoin de se connecter
ConnexionEnAttente = True
while ConnexionEnAttente:
MotDePasseServeur= tkinter.simpledialog.askstring("Mot de passe du serveur", "Ce serveur demande un mot de passe pour se connecter", show="•")
if MotDePasseServeur == None or MotDePasseServeur == "":
#Si l'utilisateur annule la connexion, il faut se déconnecter du serveur
ConnexionSocket.close()
return False
else:
MotDePasseServeurChiffré = ChiffrementRSA.chiffrement(MotDePasseServeur, CléPubliqueServeur, ModuleServeur)
ConnexionSocket.send(bytes(MotDePasseServeurChiffré, "utf-8"))
Autorisation = ConnexionSocket.recv(4096)
Autorisation = Autorisation.decode("utf-8")
if Autorisation == "OK":
ConnexionEnAttente = False
else:
tkinter.messagebox.showwarning(title="Mot de passe incorrect", message="Le mot de passe est incorrect")
ConnexionSocket.setblocking(0)
#On définit le mode de connexion sur non bloquant (Voir explications dans la fonction réception)
return True
#On retoune que la connexion a été validé
else:
#Si le serveur ne donne pas son autorisation
motif = ConnexionSocket.recv(4096)
#On recoit du serveur le motif du refus de
tkinter.messagebox.showerror(title="Connexion refusée par le serveur", message=motif.decode("utf-8"))
return False
def DevenirClient():
""" Cette fonction affiche l'interface qui permet choisir à quel serveur se connecter"""
global InputIp, InputPort, InputNom, CadreParamètres, SousMenuCliqué
SousMenuCliqué = True
#Si l"utilisateur veut retourner au menu, on sait qu'il est dans un sous-menu
MessageBienvenue.pack_forget()
CadreBouttons.pack_forget()
CadreParamètres = Frame(fen, bg="grey")
CadreParamètres.pack()
#Label Adresse ip du serveur
Label(CadreParamètres, text="Adresse IP du serveur", bg="Grey").pack(anchor=CENTER, pady=7)
InputIp = Entry(CadreParamètres)
InputIp.insert("end", "192.168.1.")
InputIp.pack(anchor=CENTER)
PortduServeur = Label(CadreParamètres, text="Port du serveur", bg="Grey")
PortduServeur.pack(anchor=CENTER, pady=7)
InputPort = Entry(CadreParamètres)
InputPort.pack(anchor=CENTER)
#Label de nom
Label(CadreParamètres, text="Votre nom d'utilisateur", bg="Grey").pack(anchor=CENTER, pady=7)
InputNom = Entry(CadreParamètres)
InputNom.pack(anchor=CENTER)
if Paramètres.DicoParamètres["NomUserDéfaut"] != "Inconnu":
# Si l'utilisateur a définit un nom d'utilisateur par défaut
Fonctions.placeholder(InputNom, Paramètres.DicoParamètres["NomUserDéfaut"], True)
else:
SuggestionDeNom = choices(ListeNoms)
Fonctions.placeholder(InputNom, SuggestionDeNom[0], True)
InputNom.bind("<Button-1>", lambda b: Fonctions.placeholder(InputNom, "", False))
#On utilise une fonction anonyme lambda pour pouvoir executer une fonction avec des arguments
Button(CadreParamètres, text="Se connecter", command=SeConnecter).pack(pady=20)
def SeConnecter():
""" Fonction qui affiche l'interface de discusion si la connexion au serveur est une réussite"""
global InputIp, IP, InputPort, Port, Rôle, FichierSauvegarde, MotDePasse, SauvegardeUtilisée
Rôle = "Client"
IP = InputIp.get()
try: Port = int(InputPort.get())
except ValueError:
tkinter.messagebox.showerror(title="Problème de port", message="Le port doit être un nombre entier entre 1 et 65535")
return False
if Connexion() == True:
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée":
SauvegardeUtilisée = True
MotDePasse = tkinter.simpledialog.askstring("Mot de passe", "Veuillez saisir le mot de passe de la sauvegarde", show="•")
if MotDePasse == None or MotDePasse == "":
#Si l'utilisateur annule la connexion, il faut se déconnecter du serveur
ConnexionSocket.close()
return False
ConfirmationMotDePasse = tkinter.simpledialog.askstring("Confirmation", "Veuillez confirmer le mot de passe", show="•")
if ConfirmationMotDePasse == None or ConfirmationMotDePasse == "":
#Si l'utilisateur annule la connexion, il faut se déconnecter du serveur
ConnexionSocket.close()
return False
while ConfirmationMotDePasse != MotDePasse:
ConfirmationMotDePasse = tkinter.simpledialog.askstring("Confirmation", "Confirmation erronée. Veuillez confirmer le mot de passe", show="•")
FichierSauvegarde = Sauvegarde.InitialisationSauvegarde(MotDePasse)
AffichageConversations()
def AffichageConversations():
""" Cette fonction sert à générer l'interface de la conversation"""
global CadreParamètres, SaisieMessage, NomUser, FilsMessages, BouttonEnvoyer, ConnexionEnCours, ThreadRéception
Logo.pack_forget()
CadreParamètres.pack_forget()
BarreMenu.delete(1)
BarreMenu.insert_command(1, label="Menu", command= lambda : RetournerMenu(DemandeConfirmation = True, ConversationEnCours = True))
#On remplace la commande "Menu" pour car la commande associée doit avoir l'argument "ConversationEnCours" à jour
BarreMenu.insert_command(2, label = "Couper Son", command = CouperSon)
BarreMenu.insert_command(4, label = "Infos du serveur", command = InfosServeur)
FilsMessages = Listbox(fen, width="70", height="20")
FilsMessages.pack(pady=15)
SaisieMessage = Entry(fen, width="60")
SaisieMessage.pack()
BouttonEnvoyer = Button(fen, text="Envoyer", command=Envoyer)
BouttonEnvoyer.pack(pady=15)
SaisieMessage.bind("<Button-1>", lambda a: Fonctions.placeholder(SaisieMessage, "", False))
#On utilise une lambda pour appeler une fonction avec des arguments
fen.bind_all('<Return>', lambda c: Envoyer())
#On associe l'appui a a fonction Envoyer avec une fonction lambda afin de pouvoir Envoyer aucun argument
ConnexionEnCours = True #Tant que cette variable est égale à True, le thread tournera
ThreadRéception = threading.Thread(target=Réception)
ThreadRéception.daemon = True #Cet attribut signifie que quand il ne reste que ce thread, le programme s'arrête.
ThreadRéception.start()
Fonctions.placeholder(SaisieMessage, "Saisissez votre message ici", True)
def Envoyer(ModeManuel = False, MessageManuel = None):
#Le mode manuel est un mode qui ne récupére pas l'entrée, mais le message passé en argument
"""Fonction qui chiffre et envoi les message au serveur. Les messages sont chiffrés en fonction du serveur"""
global SaisieMessage, NomUser, FilsMessages, ConnexionSocket, NombreErreurs, CléPubliqueServeur, ModuleServeur, SonActivé, EnvoiPossible
if ModeManuel == True: message = MessageManuel
else: message = SaisieMessage.get()
if len(message) > 1000: tkinter.messagebox.showerror(title="Attention au spam !", message="Afin d'éviter de surcharger le serveur, les messages de plus de 1000 caractères sont interdits")
elif message == "": pass
elif message[0] == "/":
#C'est une commande
PremierArgument = Fonctions.ParserCommande(message)
RéponseUser = None
stop = False
Permission = True
if PremierArgument == "/stop" and ModeManuel == False and Rôle == "Hôte":
RéponseUser = tkinter.messagebox.askokcancel("Kripto","Voulez vraiment arrêter le serveur ?")
stop = True
elif PremierArgument == "/stop" and ModeManuel == False and Rôle != "Hôte":
tkinter.messagebox.showerror(title = "Erreur de permission", message = "Vous ne pouvez pas arrêter le serveur, vous n'êtes pas l'hôte de la disscusion")
Permission = False
elif PremierArgument == "/lock" and Rôle == "Client" or message == "/unlock" and Rôle == "Client":
tkinter.messagebox.showerror(title = "Erreur de permission", message = "Vous ne pouvez pas verrouiler/déverrouiller le serveur, vous n'êtes pas admin de la disscusion")
Permission = False
elif PremierArgument == "/ban" and Rôle == "Client":
tkinter.messagebox.showerror(title = "Erreur de permission", message = "Vous ne pouvez pas bannir un client, vous n'êtes pas admin de la disscusion")
Permission = False
elif PremierArgument == "/kick" and Rôle == "Client":
tkinter.messagebox.showerror(title = "Erreur de permission", message = "Vous ne pouvez pas kicker un client, vous n'êtes pas admin de la disscusion")
Permission = False
elif PremierArgument == "/op" and Rôle != "Hôte":
tkinter.messagebox.showerror(title = "Erreur de permission", message = "Vous ne pouvez pas utiliser cette commande, vous n'êtes pas l'hôte de la disscusion")
Permission = False
if RéponseUser == True and Rôle == "Hôte" or ModeManuel == True or PremierArgument != "/stop" and Permission == True:
message = Fonctions.formaterPaquet("Commande", message)
message = ChiffrementRSA.chiffrement(message, CléPubliqueServeur, ModuleServeur)
messageFinal = f"{len(message)}-{message}"
messageFinal = messageFinal.encode('utf-8')
try: ConnexionSocket.send(bytes(messageFinal))
except (ConnectionResetError, ConnectionAbortedError):
#Si le serveur ne répond pas
if NombreErreurs < 3:
tkinter.messagebox.showerror(title="Erreur de serveur", message="Impossible de joindre le serveur. Veuillez réessayer.")
NombreErreurs += 1
else:
#Si il y'a plus de trois erreurs, on stoppe le programme, en invitant l'utilisateur à se reconnecter
messsageErreur = "Le serveur est injoignable pour le moment. Veuillez vous reconnecter ou bien référez vous à l'Aide"
tkinter.messagebox.showerror(title="Aïe...", message=messsageErreur)
RetournerMenu(DemandeConfirmation = False, ConversationEnCours = True)
if stop == True: RetournerMenu(DemandeConfirmation = None, ConversationEnCours = True, DemandeArrêt = False)
SaisieMessage.delete(0, 'end')
elif len(message) != 0 and EnvoiPossible:
EnvoiPossible = False
messageInterface = f"[{time.strftime('%H:%M:%S')}] {NomUser} → {message}"
#On garde de coté un message avec un formaté spécialement pour l'interface, mais on ne l'utilise que si l'envoi est réussi.
message = Fonctions.formaterPaquet("Message", message)
message = ChiffrementRSA.chiffrement(message, CléPubliqueServeur, ModuleServeur)
messageFinal = f"{len(message)}-{message}"
#On rajoute un en tête avec la longueur totale du message
messageFinal = messageFinal.encode('utf-8')
try: ConnexionSocket.send(bytes(messageFinal))
except (ConnectionResetError, ConnectionAbortedError):
#Si le serveur ne répond pas
if NombreErreurs < 3:
tkinter.messagebox.showerror(title="Aïe...", message="Impossible de joindre le serveur. Veuillez réessayer.")
NombreErreurs += 1
else:
#Si il y'a plus de trois erreurs, on stoppe le programme, en invitant l'utilisateur à se reconnecter
messsageErreur = "Le serveur est injoignable pour le moment. Veuillez vous reconnecter ou bien référez vous à l'Aide"
#On stocke le message dans un variable pour diminuer la taille de la ligne d'en dessous
tkinter.messagebox.showerror(title="Aïe...", message=messsageErreur)
RetournerMenu(DemandeConfirmation = False, ConversationEnCours = True)
else:
#Si il n'a pas eu d'execeptions
if len(messageInterface) > 70:
#Si le message à afficher fait plus de 70 caratères
LignesMessages = Fonctions.couperPhrases(messageInterface)
#On recupere plusieurs lignes de moins de 70 caractères dans une liste
for ligne in LignesMessages:
FilsMessages.insert(END, ligne)
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée" and SauvegardeUtilisée:
Sauvegarde.NouvelleLigne(FichierSauvegarde, MotDePasse, ligne)
else:
FilsMessages.insert(END, messageInterface)
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée" and SauvegardeUtilisée:
Sauvegarde.NouvelleLigne(FichierSauvegarde, MotDePasse, messageInterface)
FilsMessages.yview(END)
#On défile tout en bas cette dernière, vers le message le plus récent
if SonActivé == True:
if Paramètres.DicoParamètres["SonEnvoi"] != "Inconnu":
winsound.PlaySound("Sons/" + Paramètres.DicoParamètres["SonEnvoi"], winsound.SND_ASYNC)
else:
winsound.PlaySound("Sons/Pop.wav", winsound.SND_ASYNC)
SaisieMessage.delete(0, 'end')
def RéactivationEnvoi():
global EnvoiPossible
EnvoiPossible = True
fen.after(500, RéactivationEnvoi)
#Au bout de 500ms en asynchrone, on appelle la fonction qui rendra possible l'envoi de messages
def Réception():
"""Cette fonction est un thread (Suite d'instructions qui s'exécutent arrière plan de l'application). Il permet de recevoir
des messages du serveur."""
global FilsMessages, ConnexionSocket, CléPrivée, Module, SonActivé, ConnexionEnCours, NombreConnectés, Rôle
while ConnexionEnCours == True:
#Quand Connexion est égal à False, le Thread s'arrête
NotifSilencieuse = False
#Est égal à true si le client recoit un messsage qui ne doit pas s'afficher (connexion/déconnexion par exemple)
try: MessageReçu = ConnexionSocket.recv(32768)
#Cette partie du code est dans un bloc "try, except" car "ConnexionSocket.setblocking(0)" a été défini sur False
#Au lieu d'attendre un message, si rien n'est envoyé cela va générer une exception, ce qui permet un fonctionnement asynchrone.
except BlockingIOError:
#Si aucun message n'a été envoyé, on ne fait rien et on attend pour préserver les ressources la machine
time.sleep(0.1)
except (ConnectionAbortedError, ConnectionResetError):
#Le serveur a crashé
tkinter.messagebox.showerror(title="Problème de serveur", message="Le serveur a crashé...")
RetournerMenu(ConversationEnCours = True)
#32768 est la limite d'octets recevables
else:
#Un message a été reçu
MessageReçu = MessageReçu.decode("utf-8")
if MessageReçu != "":
MessageReçu = MessageReçu.split("-")
#Le message comporte un petit entête
#Exemple = 564-6646464/65656/4564564654, 564 est içi la longueur totale du message.
LongeurMessage = int(MessageReçu[0])
while len(MessageReçu[1]) < LongeurMessage:
#Tant que le message recu est plus petit que la longueur totale du message
SuiteDuMessage = ConnexionSocket.recv(32768)
SuiteDuMessage = SuiteDuMessage.decode("utf-8")
MessageReçu[1] += SuiteDuMessage
#On ajoute la suite du message reçu
MessageReçu = ChiffrementRSA.déchiffrement(MessageReçu[1], CléPrivée, Module)
#On ne déchiffre que l'index 1 du message, qui est le messge en lui même
#0 étant la longueur de ce message
if MessageReçu == "ban":
tkinter.messagebox.showinfo(title = "Vous avez été banni", message = "Vous avez été banni du serveur, vous ne pouvez plus vous reconnecter.")
ConnexionEnCours = False
RetournerMenu(ConversationEnCours = True)
NotifSilencieuse = True
elif MessageReçu == "kick":
tkinter.messagebox.showinfo(title = "Vous avez été kické", message = "Vous avez été kické du serveur.")
ConnexionEnCours = False
RetournerMenu(ConversationEnCours = True)
NotifSilencieuse = True
if MessageReçu == "connexion":
NombreConnectés += 1
NotifSilencieuse = True
elif MessageReçu == "déconnexion":
NombreConnectés -= 1
NotifSilencieuse = True
elif MessageReçu == "promotion":
Rôle = "Admin"
NotifSilencieuse = True
elif MessageReçu == "rétrogradé":
Rôle = "Client"
NotifSilencieuse = True
elif len(MessageReçu) > 70:
#Si le message à afficher fait plus de 70 caratères
LignesMessages = Fonctions.couperPhrases(MessageReçu)
#On recupére plusieurs lignes de moins de 70 caractères dans une liste
for ligne in LignesMessages:
FilsMessages.insert(END, ligne)
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée":
NouvelleLigne(FichierSauvegarde, MotDePasse, ligne)
else:
FilsMessages.insert(END, MessageReçu)
if Paramètres.DicoParamètres["Sauvegarde"] == "Activée":
Sauvegarde.NouvelleLigne(FichierSauvegarde, MotDePasse, MessageReçu)
FilsMessages.yview(END)
#On force le défilement tout en bas de cette dernière
if FenêtreALeFocus == False and NotifSilencieuse == False and Paramètres.DicoParamètres["Notification"] == "Activée":
Fonctions.AfficherNotification("Kripto", MessageReçu)
if SonActivé == True and NotifSilencieuse == False:
if Paramètres.DicoParamètres["SonRéception"] != "Inconnu":
winsound.PlaySound("Sons/" + Paramètres.DicoParamètres["SonRéception"], winsound.SND_ASYNC)
else:
winsound.PlaySound("Sons/Dong.wav", winsound.SND_ASYNC)
def RetournerMenu(DemandeConfirmation = None, ConversationEnCours = None, DepuisMenu = None, DemandeArrêt = True):
global FilsMessages, SaisieMessage, BouttonEnvoyer, SousMenuCliqué, ConnexionEnCours
Confirmation = None
if DemandeConfirmation == True:
Confirmation = messagebox.askquestion (f"Vous partez déja {NomUser} ?","Vous voulez vraiment retourner au menu ?", icon = "warning")
if Confirmation == "yes" or DemandeConfirmation == None:
if ConversationEnCours:
#Si l'utilisateur était dans la fenêtre de conversation
SousMenuCliqué = False
if Rôle == "Hôte" and DemandeArrêt == True:
Envoyer(True, "/stop") #L'envoi du /stop permet d'éviter au serveur de crasher / tourner dans le vide
time.sleep(0.3)
BarreMenu.delete(1)
BarreMenu.insert_command(1, label="Menu", command= lambda : RetournerMenu(DepuisMenu = True))
#On remplace la commande "Menu" pour car la commande associée doit avoir l'argument "ConversationEnCours" à jour
FilsMessages.pack_forget()
SaisieMessage.pack_forget()
BouttonEnvoyer.pack_forget()
fen.unbind_all(ALL)
fen.bind("<FocusIn>", lambda x: PasserEnTrue())
fen.bind("<FocusOut>", lambda x: PasserEnFalse())
BarreMenu.delete(2)
BarreMenu.delete(3)
#On efface les commandes "Couper Son" et "Infos Serveur" du menu
ConnexionEnCours = False #Le thread de réception est arrêté
ConnexionSocket.close()
if DepuisMenu:
#Si l'utilisateur était dans la fenêtre de menu
if SousMenuCliqué:
#Si l'utilisateur était dans le sous menu (Démarrage du serveur ou connexion)
Logo.pack_forget()
CadreParamètres.pack_forget()
if SousMenuCliqué or ConversationEnCours:
#Si l"utilisateur n'est pas dans le menu principal
if SousMenuCliqué:
SousMenuCliqué = False
AfficherMenu()
def InfosServeur():
""" Cette fonction affiches les informations du serveur dans une fenêtre en top level"""
global IP, Port, NombreConnectés
fenInfos = Toplevel()
fenInfos.geometry("300x280")
fenInfos.configure(bg="grey")
fenInfos.resizable(width=False, height=False)
fenInfos.iconbitmap(bitmap="Médias/information.ico")
fenInfos.title("Infos du serveur")
TitreAdresseServeur = Label(fenInfos, text="Adresse du serveur", bg="Grey", font=PoliceTitre)
TitreAdresseServeur.pack(pady=10)
AdresseServeur = Label(fenInfos, text=IP, bg="Grey", font=PoliceSousTitre)
AdresseServeur.pack()
TitrePortServeur = Label(fenInfos, text="Port du serveur", bg="Grey", font=PoliceTitre)
TitrePortServeur.pack(pady=10)
PortServeur = Label(fenInfos, text=Port, bg="Grey", font=PoliceSousTitre)
PortServeur.pack()
TitreUtilisateursCo = Label(fenInfos, text="Utiliseurs connectées", bg="Grey", font=PoliceTitre)
TitreUtilisateursCo.pack(pady=10)
UtilisateurCo = Label(fenInfos, text = str(NombreConnectés), bg="Grey", font=PoliceSousTitre)
UtilisateurCo.pack()
BouttonFermer = Button(fenInfos, text="Fermer", command = lambda: fenInfos.destroy())
BouttonFermer.pack(pady=20, side=BOTTOM)
fenInfos.focus_force()
#On affiche la fenêtre au premier plan
fenInfos.mainloop()
def Aide():
""" Cette fonction affiche l'Aide dans une fenêtre en top level"""
def QuitterAide():
"""Fonction qui détruit la fenêtre d'Aide"""
fenAide.destroy()
fenAide = Toplevel()
fenAide.geometry("300x280")
fenAide.configure(bg="grey")
fenAide.resizable(width=False, height=False)
fenAide.iconbitmap(bitmap="Médias/information.ico")
fenAide.title("Aide")
#Définition de l'apparence de la fenêtre
TitreAideIP = Label(fenAide, text="Si votre IP n'est pas valide", bg="Grey", font=PoliceTitre)
TitreAideIP.pack(pady=10)
AideIP0 = Label(fenAide, text="Entrez vous même l'adresse IPv4.\nPour la trouver :", bg="Grey", font=PoliceSousTitre)
AideIP0.pack()
AideIP1 = Label(fenAide, text="le-routeur-wifi.com/adresse-ip-mac/", bg="Grey", font=PoliceSousTitre, fg="blue")
AideIP1.pack()
AideIP1.bind("<Button-1>", lambda e: Fonctions.callback("https://le-routeur-wifi.com/adresse-ip-mac/"))
TitreAidePort0 = Label(fenAide, text="Si votre port n'est pas valide", bg="Grey", font=PoliceTitre)
TitreAidePort0.pack(pady=10)
AidePort0 = Label(fenAide, text="Veillez à choisir un nombre entier\nentre 0 et 65535", bg="Grey", font=PoliceSousTitre)
AidePort0.pack()
BouttonFermer = Button(fenAide, text="Fermer", command=QuitterAide)
BouttonFermer.pack(pady=20, side=BOTTOM)
fenAide.focus_force()
#On affiche la fenêtre au premier plan
fenAide.mainloop()
def ActiverSon():
global SonActivé
SonActivé = True
BarreMenu.delete(2)
BarreMenu.insert_command(2, label="Couper le son", command=CouperSon)
#On supprime la commande à l'index 2 du menu pour y ajouter la commande CouperSon à la même position
def CouperSon():
global SonActivé
SonActivé = False
BarreMenu.delete(2)
BarreMenu.insert_command(2, label="Activer le son", command=ActiverSon)
#On supprime la commande à l'index 2 du menu pour y ajouter la commande ActiverSon à la même position
def Contact():
""" Cette fonction affiches les informations du serveur dans une fenêtre en top level"""
def EnvoiAPI():
TitreIssue = InputObjet.get()
Message = InputMessage.get("1.0", tkinter.END)
Plateforme = platform.system() + " " + platform.release() + " " + platform.version()
if Kripiti.CréerUneIssue(TitreIssue, Message, Plateforme) == True:
tkinter.messagebox.showinfo(
title = "Succès !",
message = "Votre bug a été reporté à nos équipes avec succès. Merci de votre contribution !"
)
fenContact.destroy()
else:
tkinter.messagebox.showerror(
title = "Oups...",
message = "Impossible de reporter le bug pour le moment. Merci de retenter."
)
fenContact = Toplevel()
fenContact.geometry("300x280")
fenContact.configure(bg="grey")
fenContact.resizable(width=False, height=False)
fenContact.iconbitmap(bitmap="Médias/information.ico")
fenContact.title("Contact")
#Label d'objet
Label(fenContact, text="Quel est le problème ?", bg="Grey", font=PoliceTitre).pack(pady=10)
InputObjet = Entry(fenContact, width = 50, bg="White", font=PoliceSousTitre)
InputObjet.pack(padx=20)
#Label de message
Label(fenContact, text="Un peu plus de détails ?", bg="Grey", font=PoliceTitre).pack(pady=10)
InputMessage = ScrolledText(fenContact, width = 50, height = 5, bg="White", font=PoliceSousTitre)
InputMessage.pack(padx=20)
Button(fenContact, text="Envoyer votre message", command=EnvoiAPI).pack(pady=20, side=BOTTOM)
fenContact.focus_force()
#On affiche la fenêtre au premier plan
fenContact.mainloop()
def fermeture():
""" Fonction appellée quand l'utilisateur veut fermer la fenêtre """
RéponseUser = tkinter.messagebox.askokcancel("Kripto","Vous partez déja ?")
if RéponseUser == True:
sys.exit()
#On utilise sys.exit() plutôt que exit() car cela éviter au threads de tourner en arrière plan
def PasserEnTrue():
global FenêtreALeFocus
FenêtreALeFocus = True
def PasserEnFalse():
global FenêtreALeFocus
FenêtreALeFocus = False
#Code exécuté au démarage de l'application
Paramètres.LectureParamètres()
ListeNoms = ["Autruche", "Bob", "AmiralBenson", "TomNook", "Karamazov", "PatéEnCroute", "Risitas", "Clown"]
#La liste des noms qui seront suggérés à l'utilisateur.
FichierSauvegarde = None
MotDePasse = None
#Initilisation du mot de passe de la sauvegarde et le fichier de sauvegarde
Module, CléPublique, CléPrivée = ChiffrementRSA.génération(16)
#On génére une clé publique et une clé publique et on garde en mémoire le module de chiffrement
NombreErreurs = 0
NombreConnectés = 1 #On se compte
EnvoiPossible = True
SonActivé = True
SousMenuCliqué = False
SauvegardeUtilisée = None #On ne sait pas à ce stade si la sauvegarde sera utilsée
FenêtreALeFocus = True
#Permet d'envoyer des notifcations uniquement quand la fenêtre est en arrière plan
fen = Tk()
fen.geometry("550x460")
fen.title("Kripto - Un chat chiffré")
fen.configure(bg="grey")
fen.resizable(width=False, height=False)
fen.iconbitmap(bitmap="Médias/icone.ico")
fen.bind("<FocusIn>", lambda x: PasserEnTrue())
fen.bind("<FocusOut>", lambda x: PasserEnFalse())
fen.protocol("WM_DELETE_WINDOW", fermeture)
BarreMenu = Menu(fen)
BarreMenu.add_command(label="Menu", command= lambda: RetournerMenu(DepuisMenu = True))
BarreMenu.add_command(label="Aide", command=Aide)
BarreMenu.add_command(label="Sauvegardes", command=LecteurSauvegarde.LecteurSauvegarde)
BarreMenu.add_command(label="Paramètres", command=Paramètres.InterfaceParamètres)
BarreMenu.add_command(label="Contact", command=Contact)
fen.configure(menu=BarreMenu)
PoliceTitreBienvenue = tkFont.Font(family="Verdanna",size=16,weight="bold")
PoliceBoutton = tkFont.Font(family="Arial",size=12,weight="bold")
PoliceTitre = tkFont.Font(size=14,weight="bold")
PoliceSousTitre = tkFont.Font(size=12)
ImageLogo = PhotoImage(file="Médias/Logo.png")
AfficherMenu()
fen.mainloop() | 0.167593 | 0.260472 |
import matplotlib as mat
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import scipy.stats as stats
import os
sns.set(rc={"figure.figsize":(8,4)})
sns.set_context('paper',font_scale=1.5,rc={'lines.linewidth':1.5})
sns.set_style('ticks')
mat.rc('text',usetex=True)
mat.rcParams['text.latex.preamble']=[r'\usepackage[utf8]{inputenc}',r'\usepackage[T1]{fontenc}',r'\usepackage[spanish]{babel}',r'\usepackage[scaled]{helvet}',r'\renewcommand\familydefault{\sfdefault}',r'\usepackage{amsmath,amsfonts,amssymb}',r'\usepackage{siunitx}']
home=os.environ['HOME']
dir='proyectos/scicrt/simulation/resultados-sim/pulse-shape'
nda0='{0}/{1}/19nov_2phe-t90.csv'.format(home,dir)
nda1='{0}/{1}/19aug_7phe-t90.csv'.format(home,dir)
nsim='{0}/{1}/ptimes_t9011.csv'.format(home,dir)
data0=np.loadtxt(nda0,dtype=np.float)
data1=np.loadtxt(nda1,dtype=np.float)
t90_exp=np.hstack((data0,data1))
#time_test=t90_exp>=100
#t90_exp=t90_exp[time_test]
t90_sim=np.loadtxt(nsim,dtype=np.float)
print(np.amax(t90_exp),np.amin(t90_exp))
print(np.amax(t90_sim),np.amin(t90_sim))
print(np.size(t90_exp,0),(1.0-np.size(t90_exp,0)/10220.0)*275.3)
tbins=np.arange(-200,300.0,0.1)
bins=np.arange(-100,230)
kde=stats.gaussian_kde(t90_exp,bw_method='silverman')
thist_exp=kde.evaluate(tbins)
kde=stats.gaussian_kde(t90_sim,bw_method='silverman')
thist_sim=kde.evaluate(tbins)
logic=np.logical_and(tbins<100.0,tbins>-200)
print(0.1*np.sum(thist_exp[logic]))
print(0.1*np.sum(thist_sim[logic])*256.9)
tm_exp=tbins[np.argmax(thist_exp)]
tm_sim=tbins[np.argmax(thist_sim)]
c=sns.color_palette(sns.cubehelix_palette(2,start=2,rot=0,dark=0,light=0.5,reverse=True))
sns.set_palette(c)
fig,ax=plt.subplots(nrows=1,ncols=1,sharex=False,sharey=False)
ax.plot(tbins,thist_exp,color=c[0])
ax.hist(t90_exp,bins=bins,density=True,histtype='stepfilled',color=c[0])
ax.plot(tbins,thist_sim,color=c[1])
ax.hist(t90_sim,bins=bins,density=True,histtype='stepfilled',color=c[1])
plt.xlabel(r'$t_{90}-t_{10}$ $[\si{\nano\second}]$',x=0.9,horizontalalignment='right')
plt.ylabel(r'Probability density')
plt.xlim(0,160)
plt.tight_layout(pad=1.0)
plt.savefig('t90_dist.pdf')
plt.show() | scibar-t90.py |
import matplotlib as mat
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import scipy.stats as stats
import os
sns.set(rc={"figure.figsize":(8,4)})
sns.set_context('paper',font_scale=1.5,rc={'lines.linewidth':1.5})
sns.set_style('ticks')
mat.rc('text',usetex=True)
mat.rcParams['text.latex.preamble']=[r'\usepackage[utf8]{inputenc}',r'\usepackage[T1]{fontenc}',r'\usepackage[spanish]{babel}',r'\usepackage[scaled]{helvet}',r'\renewcommand\familydefault{\sfdefault}',r'\usepackage{amsmath,amsfonts,amssymb}',r'\usepackage{siunitx}']
home=os.environ['HOME']
dir='proyectos/scicrt/simulation/resultados-sim/pulse-shape'
nda0='{0}/{1}/19nov_2phe-t90.csv'.format(home,dir)
nda1='{0}/{1}/19aug_7phe-t90.csv'.format(home,dir)
nsim='{0}/{1}/ptimes_t9011.csv'.format(home,dir)
data0=np.loadtxt(nda0,dtype=np.float)
data1=np.loadtxt(nda1,dtype=np.float)
t90_exp=np.hstack((data0,data1))
#time_test=t90_exp>=100
#t90_exp=t90_exp[time_test]
t90_sim=np.loadtxt(nsim,dtype=np.float)
print(np.amax(t90_exp),np.amin(t90_exp))
print(np.amax(t90_sim),np.amin(t90_sim))
print(np.size(t90_exp,0),(1.0-np.size(t90_exp,0)/10220.0)*275.3)
tbins=np.arange(-200,300.0,0.1)
bins=np.arange(-100,230)
kde=stats.gaussian_kde(t90_exp,bw_method='silverman')
thist_exp=kde.evaluate(tbins)
kde=stats.gaussian_kde(t90_sim,bw_method='silverman')
thist_sim=kde.evaluate(tbins)
logic=np.logical_and(tbins<100.0,tbins>-200)
print(0.1*np.sum(thist_exp[logic]))
print(0.1*np.sum(thist_sim[logic])*256.9)
tm_exp=tbins[np.argmax(thist_exp)]
tm_sim=tbins[np.argmax(thist_sim)]
c=sns.color_palette(sns.cubehelix_palette(2,start=2,rot=0,dark=0,light=0.5,reverse=True))
sns.set_palette(c)
fig,ax=plt.subplots(nrows=1,ncols=1,sharex=False,sharey=False)
ax.plot(tbins,thist_exp,color=c[0])
ax.hist(t90_exp,bins=bins,density=True,histtype='stepfilled',color=c[0])
ax.plot(tbins,thist_sim,color=c[1])
ax.hist(t90_sim,bins=bins,density=True,histtype='stepfilled',color=c[1])
plt.xlabel(r'$t_{90}-t_{10}$ $[\si{\nano\second}]$',x=0.9,horizontalalignment='right')
plt.ylabel(r'Probability density')
plt.xlim(0,160)
plt.tight_layout(pad=1.0)
plt.savefig('t90_dist.pdf')
plt.show() | 0.329823 | 0.380701 |
import argparse
import numpy as np
import pandas as pd
import pathlib
import tqdm
CC_THRESH = 0.75 # Minimum cross-correlation value to retain.
DTT_MAX = 5 # Maximum differential travel-time to retain.
NCC_MIN = 4 # Minimum number of cross-correlation observations
# per event pair.
def parse_argc():
"""
Parse and return command line arguments.
"""
parser = argparse.ArgumentParser()
parser.add_argument(
"catalog",
type=str,
help="Input catalog."
)
parser.add_argument(
"input_root",
type=str,
help="Input data directory."
)
parser.add_argument(
"output_root",
type=str,
help="Output data directory."
)
parser.add_argument(
"-f",
"--format",
type=str,
default="HDF5",
help="Input database format."
)
return (parser.parse_args())
def read_catalog(argc):
if argc.format.upper() == "HDF5":
return (_read_catalog_hdf5(argc.catalog))
elif argc.format.upper() == "ANTELOPE":
return (_read_catalog_antelope(argc.catalog))
else:
raise (NotImplementedError)
def read_cc(argc, events):
dataf = pd.DataFrame()
desc = "Loading differential travel times"
for path in tqdm.tqdm(sorted(pathlib.Path(argc.input_root).iterdir()), desc=desc):
try:
df = pd.read_hdf(path, key="differentials")
except Exception as err:
continue
df = df[
(df["ccmax"].abs() > CC_THRESH)
]
df = df.groupby(["event_id_A", "event_id_B", "network", "station", "phase"])
df = df.mean()
df = df.reset_index()
dataf = dataf.append(df, ignore_index=True)
try:
dataf["origin_time_A"] = events.loc[dataf["event_id_A"].values, "time"].values
dataf["origin_time_B"] = events.loc[dataf["event_id_B"].values, "time"].values
except KeyError:
print(
"Event IDs are missing from the input database; make sure it is"
"the same database used to extract waveforms."
)
raise
dataf["dtt"] = dataf["dt"] - (dataf["origin_time_B"] - dataf["origin_time_A"])
dataf = dataf[
(dataf["ccmax"].abs() > CC_THRESH)
]
dataf = dataf.groupby(["event_id_A", "event_id_B", "network", "station", "phase"])
dataf = dataf.mean()
dataf = dataf[dataf["dtt"].abs() < DTT_MAX]
dataf.reset_index(inplace=True)
dataf.set_index(["event_id_A", "event_id_B"], inplace=True)
dataf.sort_index(inplace=True)
return (dataf)
def subset_observations(dataf, events, ncc_min=NCC_MIN):
group = dataf.groupby(["event_id_A", "event_id_B"])
group = group.size()
group = group[group >= ncc_min]
dataf = dataf.loc[group.index]
event_ids = dataf.reset_index()[["event_id_A", "event_id_B"]].values.flatten()
event_ids = np.sort(np.unique(event_ids))
events = events.loc[event_ids]
return (dataf, events)
def write_dtcc(dataf, path):
path = pathlib.Path(path).joinpath("dt.cc")
path.parent.mkdir(parents=True, exist_ok=True)
dataf["station"] = dataf["station"].map("{:>5s}".format)
dataf["dtt"] = dataf["dtt"].map("{:>6.3f}".format)
dataf["ccmax"] = dataf["ccmax"].abs().map("{:>5.3f}".format)
desc = "Writing dt.cc file."
with open(path, "w") as outfile:
for event_id_A, event_id_B in tqdm.tqdm(dataf.index.unique(), desc=desc):
chunk = f"# {event_id_A:>6d} {event_id_B:>6d} 0.0\n"
df = dataf.loc[(event_id_A, event_id_B)]
chunk += "\n".join(
" " + df["station"]
+ " " + df["dtt"]
+ " " + df["ccmax"]
+ " " + df["phase"]
)
outfile.write(chunk + "\n")
def write_events(events, path):
path = pathlib.Path(path).joinpath("events.gc")
path.parent.mkdir(parents=True, exist_ok=True)
desc = "Writing events.gc file"
with open(path, mode="w") as outfile:
for event_id, event in tqdm.tqdm(events.iterrows(), total=len(events), desc=desc):
latitude, longitude, depth, time = event
time = pd.to_datetime(time*1e9)
line = f"{event_id:>6d} {time.year:>4d} {time.month:>2d} {time.day:>2d} {time.hour:>2d} "
line += f"{time.minute:>2d} {time.second:>2d}.{time.microsecond:06d} "
line += f"{latitude:>9.6f} {longitude:>10.6f} {depth:>6.3f} -1 -1 -1 -1"
outfile.write(line + "\n")
def _read_catalog_antelope(path):
TABLES = dict(
event=[
"evid", "evname", "prefor", "auth", "commid", "lddate"
],
origin=[
"lat", "lon", "depth", "time", "orid", "evid", "jdate", "nass", "ndef",
"ndp", "grn", "srn", "etype", "UNKNOWN", "depdp", "dtype", "mb", "mbid", "ms",
"msid", "ml", "mlid", "algorithm", "auth", "commid", "lddate"
]
)
db = dict()
for table in TABLES:
db[table] = pd.read_csv(
f"{path}.{table}",
header=None,
delim_whitespace=True,
names=TABLES[table]
)
events = db["event"].merge(
db["origin"][["lat", "lon", "depth", "time", "orid"]],
left_on="prefor",
right_on="orid"
)
events = events[["lat", "lon", "depth", "time", "evid"]]
events = events.rename(
columns=dict(
lat="latitude",
lon="longitude",
evid="event_id"
)
)
events = events.sort_values("event_id")
events = events.reset_index(drop=True)
return (events)
def _read_catalog_hdf5(path):
events = pd.read_hdf(path, key="events")
events = events.reset_index(drop=True)
return (events)
def main():
argc = parse_argc()
events = read_catalog(argc)
events = events.sort_values("event_id")
events = events.set_index("event_id")
dataf = read_cc(argc, events)
dataf, events = subset_observations(dataf, events)
write_dtcc(dataf, argc.output_root)
write_events(events, argc.output_root)
if __name__ == "__main__":
main() | src/420_reformat_cc_output.py | import argparse
import numpy as np
import pandas as pd
import pathlib
import tqdm
CC_THRESH = 0.75 # Minimum cross-correlation value to retain.
DTT_MAX = 5 # Maximum differential travel-time to retain.
NCC_MIN = 4 # Minimum number of cross-correlation observations
# per event pair.
def parse_argc():
"""
Parse and return command line arguments.
"""
parser = argparse.ArgumentParser()
parser.add_argument(
"catalog",
type=str,
help="Input catalog."
)
parser.add_argument(
"input_root",
type=str,
help="Input data directory."
)
parser.add_argument(
"output_root",
type=str,
help="Output data directory."
)
parser.add_argument(
"-f",
"--format",
type=str,
default="HDF5",
help="Input database format."
)
return (parser.parse_args())
def read_catalog(argc):
if argc.format.upper() == "HDF5":
return (_read_catalog_hdf5(argc.catalog))
elif argc.format.upper() == "ANTELOPE":
return (_read_catalog_antelope(argc.catalog))
else:
raise (NotImplementedError)
def read_cc(argc, events):
dataf = pd.DataFrame()
desc = "Loading differential travel times"
for path in tqdm.tqdm(sorted(pathlib.Path(argc.input_root).iterdir()), desc=desc):
try:
df = pd.read_hdf(path, key="differentials")
except Exception as err:
continue
df = df[
(df["ccmax"].abs() > CC_THRESH)
]
df = df.groupby(["event_id_A", "event_id_B", "network", "station", "phase"])
df = df.mean()
df = df.reset_index()
dataf = dataf.append(df, ignore_index=True)
try:
dataf["origin_time_A"] = events.loc[dataf["event_id_A"].values, "time"].values
dataf["origin_time_B"] = events.loc[dataf["event_id_B"].values, "time"].values
except KeyError:
print(
"Event IDs are missing from the input database; make sure it is"
"the same database used to extract waveforms."
)
raise
dataf["dtt"] = dataf["dt"] - (dataf["origin_time_B"] - dataf["origin_time_A"])
dataf = dataf[
(dataf["ccmax"].abs() > CC_THRESH)
]
dataf = dataf.groupby(["event_id_A", "event_id_B", "network", "station", "phase"])
dataf = dataf.mean()
dataf = dataf[dataf["dtt"].abs() < DTT_MAX]
dataf.reset_index(inplace=True)
dataf.set_index(["event_id_A", "event_id_B"], inplace=True)
dataf.sort_index(inplace=True)
return (dataf)
def subset_observations(dataf, events, ncc_min=NCC_MIN):
group = dataf.groupby(["event_id_A", "event_id_B"])
group = group.size()
group = group[group >= ncc_min]
dataf = dataf.loc[group.index]
event_ids = dataf.reset_index()[["event_id_A", "event_id_B"]].values.flatten()
event_ids = np.sort(np.unique(event_ids))
events = events.loc[event_ids]
return (dataf, events)
def write_dtcc(dataf, path):
path = pathlib.Path(path).joinpath("dt.cc")
path.parent.mkdir(parents=True, exist_ok=True)
dataf["station"] = dataf["station"].map("{:>5s}".format)
dataf["dtt"] = dataf["dtt"].map("{:>6.3f}".format)
dataf["ccmax"] = dataf["ccmax"].abs().map("{:>5.3f}".format)
desc = "Writing dt.cc file."
with open(path, "w") as outfile:
for event_id_A, event_id_B in tqdm.tqdm(dataf.index.unique(), desc=desc):
chunk = f"# {event_id_A:>6d} {event_id_B:>6d} 0.0\n"
df = dataf.loc[(event_id_A, event_id_B)]
chunk += "\n".join(
" " + df["station"]
+ " " + df["dtt"]
+ " " + df["ccmax"]
+ " " + df["phase"]
)
outfile.write(chunk + "\n")
def write_events(events, path):
path = pathlib.Path(path).joinpath("events.gc")
path.parent.mkdir(parents=True, exist_ok=True)
desc = "Writing events.gc file"
with open(path, mode="w") as outfile:
for event_id, event in tqdm.tqdm(events.iterrows(), total=len(events), desc=desc):
latitude, longitude, depth, time = event
time = pd.to_datetime(time*1e9)
line = f"{event_id:>6d} {time.year:>4d} {time.month:>2d} {time.day:>2d} {time.hour:>2d} "
line += f"{time.minute:>2d} {time.second:>2d}.{time.microsecond:06d} "
line += f"{latitude:>9.6f} {longitude:>10.6f} {depth:>6.3f} -1 -1 -1 -1"
outfile.write(line + "\n")
def _read_catalog_antelope(path):
TABLES = dict(
event=[
"evid", "evname", "prefor", "auth", "commid", "lddate"
],
origin=[
"lat", "lon", "depth", "time", "orid", "evid", "jdate", "nass", "ndef",
"ndp", "grn", "srn", "etype", "UNKNOWN", "depdp", "dtype", "mb", "mbid", "ms",
"msid", "ml", "mlid", "algorithm", "auth", "commid", "lddate"
]
)
db = dict()
for table in TABLES:
db[table] = pd.read_csv(
f"{path}.{table}",
header=None,
delim_whitespace=True,
names=TABLES[table]
)
events = db["event"].merge(
db["origin"][["lat", "lon", "depth", "time", "orid"]],
left_on="prefor",
right_on="orid"
)
events = events[["lat", "lon", "depth", "time", "evid"]]
events = events.rename(
columns=dict(
lat="latitude",
lon="longitude",
evid="event_id"
)
)
events = events.sort_values("event_id")
events = events.reset_index(drop=True)
return (events)
def _read_catalog_hdf5(path):
events = pd.read_hdf(path, key="events")
events = events.reset_index(drop=True)
return (events)
def main():
argc = parse_argc()
events = read_catalog(argc)
events = events.sort_values("event_id")
events = events.set_index("event_id")
dataf = read_cc(argc, events)
dataf, events = subset_observations(dataf, events)
write_dtcc(dataf, argc.output_root)
write_events(events, argc.output_root)
if __name__ == "__main__":
main() | 0.37502 | 0.290352 |
import time
from unittest.mock import Mock, patch
from flask.testing import FlaskClient
from lms.lmsweb.config import CONFIRMATION_TIME
from lms.lmsdb.models import Course, User
from lms.models.users import generate_user_token
from tests import conftest
class TestRegistration:
@staticmethod
def test_invalid_username(
client: FlaskClient, student_user: User, captured_templates,
):
conftest.signup_client_user(
client, '<EMAIL>', student_user.username,
'some_name', 'some_password', '<PASSWORD>',
)
template, _ = captured_templates[-1]
assert template.name == 'signup.html'
conftest.login_client_user(
client, student_user.username, '<PASSWORD>',
)
fail_login_response = client.get('/exercises')
assert fail_login_response.status_code == 302
@staticmethod
def test_invalid_email(
client: FlaskClient, student_user: User, captured_templates,
):
conftest.signup_client_user(
client, student_user.mail_address, 'some_user',
'some_name', 'some_password', '<PASSWORD>',
)
client.post('/signup', data={
'email': student_user.mail_address,
'username': 'some_user',
'fullname': 'some_name',
'password': '<PASSWORD>',
'confirm': '<PASSWORD>',
}, follow_redirects=True)
template, _ = captured_templates[-1]
assert template.name == 'signup.html'
conftest.login_client_user(client, 'some_user', 'some_password')
fail_login_response = client.get('/exercises')
assert fail_login_response.status_code == 302
@staticmethod
def test_bad_token_or_id(client: FlaskClient):
conftest.signup_client_user(
client, '<EMAIL>', 'some_user',
'some_name', 'some_password', '<PASSWORD>',
)
user = User.get_or_none(User.username == 'some_user')
bad_token = '<PASSWORD>@$@' # noqa: S105
fail_confirm_response = client.get(
f'/confirm-email/{user.id}/{bad_token}', follow_redirects=True,
)
assert fail_confirm_response.status_code == 404
# No such 999 user id
another_fail_response = client.get(
f'/confirm-email/999/{bad_token}', follow_redirects=True,
)
assert another_fail_response.status_code == 404
@staticmethod
def test_use_token_twice(client: FlaskClient):
conftest.signup_client_user(
client, '<EMAIL>', 'some_user',
'some_name', 'some_password', '<PASSWORD>',
)
user = User.get_or_none(User.username == 'some_user')
token = generate_user_token(user)
success_token_response = client.get(
f'/confirm-email/{user.id}/{token}', follow_redirects=True,
)
assert success_token_response.status_code == 200
fail_token_response = client.get(
f'/confirm-email/{user.id}/{token}', follow_redirects=True,
)
assert fail_token_response.status_code == 403
@staticmethod
def test_expired_token(client: FlaskClient):
conftest.signup_client_user(
client, '<EMAIL>', 'some_user',
'some_name', '<PASSWORD>password', '<PASSWORD>',
)
user = User.get_or_none(User.username == 'some_user')
token = generate_user_token(user)
fake_time = time.time() + CONFIRMATION_TIME + 1
with patch('time.time', Mock(return_value=fake_time)):
client.get(
f'/confirm-email/{user.id}/{token}', follow_redirects=True,
)
conftest.login_client_user(client, 'some_user', 'some_password')
fail_login_response = client.get('/exercises')
assert fail_login_response.status_code == 302
token = generate_user_token(user)
client.get(
f'/confirm-email/{user.id}/{token}', follow_redirects=True,
)
conftest.login_client_user(client, 'some_user', 'some_password')
success_login_response = client.get('/exercises')
assert success_login_response.status_code == 200
@staticmethod
def test_successful_registration(client: FlaskClient, captured_templates):
conftest.signup_client_user(
client, '<EMAIL>', 'some_user',
'some_name', 'some_password', '<PASSWORD>',
)
template, _ = captured_templates[-1]
assert template.name == 'login.html'
conftest.login_client_user(client, 'some_user', 'some_password')
fail_login_response = client.get('/exercises')
assert fail_login_response.status_code == 302
user = User.get_or_none(User.username == 'some_user')
token = generate_user_token(user)
client.get(f'/confirm-email/{user.id}/{token}', follow_redirects=True)
conftest.login_client_user(client, 'some_user', 'some_password')
success_login_response = client.get('/exercises')
assert success_login_response.status_code == 200
@staticmethod
def test_registartion_closed(client: FlaskClient, captured_templates):
conftest.disable_registration()
conftest.signup_client_user(
client, '<EMAIL>', 'some_user',
'some_name', 'some_password', '<PASSWORD>',
)
user = User.get_or_none(User.username == 'some_user')
assert user is None
response = client.get('/signup')
template, _ = captured_templates[-1]
assert template.name == 'login.html'
assert '/signup' not in response.get_data(as_text=True)
@staticmethod
def test_register_public_course(
student_user: User, course: Course, captured_templates,
):
client = conftest.get_logged_user(username=student_user.username)
not_public_course_response = client.get(f'/course/join/{course.id}')
assert not_public_course_response.status_code == 403
unknown_course_response = client.get('/course/join/123456')
assert unknown_course_response.status_code == 404
course.is_public = True
course.save()
course = Course.get_by_id(course.id)
client.get(f'/course/join/{course.id}')
template, _ = captured_templates[-1]
assert template.name == 'exercises.html'
already_registered_response = client.get(f'/course/join/{course.id}')
assert already_registered_response.status_code == 409 | tests/test_registration.py | import time
from unittest.mock import Mock, patch
from flask.testing import FlaskClient
from lms.lmsweb.config import CONFIRMATION_TIME
from lms.lmsdb.models import Course, User
from lms.models.users import generate_user_token
from tests import conftest
class TestRegistration:
@staticmethod
def test_invalid_username(
client: FlaskClient, student_user: User, captured_templates,
):
conftest.signup_client_user(
client, '<EMAIL>', student_user.username,
'some_name', 'some_password', '<PASSWORD>',
)
template, _ = captured_templates[-1]
assert template.name == 'signup.html'
conftest.login_client_user(
client, student_user.username, '<PASSWORD>',
)
fail_login_response = client.get('/exercises')
assert fail_login_response.status_code == 302
@staticmethod
def test_invalid_email(
client: FlaskClient, student_user: User, captured_templates,
):
conftest.signup_client_user(
client, student_user.mail_address, 'some_user',
'some_name', 'some_password', '<PASSWORD>',
)
client.post('/signup', data={
'email': student_user.mail_address,
'username': 'some_user',
'fullname': 'some_name',
'password': '<PASSWORD>',
'confirm': '<PASSWORD>',
}, follow_redirects=True)
template, _ = captured_templates[-1]
assert template.name == 'signup.html'
conftest.login_client_user(client, 'some_user', 'some_password')
fail_login_response = client.get('/exercises')
assert fail_login_response.status_code == 302
@staticmethod
def test_bad_token_or_id(client: FlaskClient):
conftest.signup_client_user(
client, '<EMAIL>', 'some_user',
'some_name', 'some_password', '<PASSWORD>',
)
user = User.get_or_none(User.username == 'some_user')
bad_token = '<PASSWORD>@$@' # noqa: S105
fail_confirm_response = client.get(
f'/confirm-email/{user.id}/{bad_token}', follow_redirects=True,
)
assert fail_confirm_response.status_code == 404
# No such 999 user id
another_fail_response = client.get(
f'/confirm-email/999/{bad_token}', follow_redirects=True,
)
assert another_fail_response.status_code == 404
@staticmethod
def test_use_token_twice(client: FlaskClient):
conftest.signup_client_user(
client, '<EMAIL>', 'some_user',
'some_name', 'some_password', '<PASSWORD>',
)
user = User.get_or_none(User.username == 'some_user')
token = generate_user_token(user)
success_token_response = client.get(
f'/confirm-email/{user.id}/{token}', follow_redirects=True,
)
assert success_token_response.status_code == 200
fail_token_response = client.get(
f'/confirm-email/{user.id}/{token}', follow_redirects=True,
)
assert fail_token_response.status_code == 403
@staticmethod
def test_expired_token(client: FlaskClient):
conftest.signup_client_user(
client, '<EMAIL>', 'some_user',
'some_name', '<PASSWORD>password', '<PASSWORD>',
)
user = User.get_or_none(User.username == 'some_user')
token = generate_user_token(user)
fake_time = time.time() + CONFIRMATION_TIME + 1
with patch('time.time', Mock(return_value=fake_time)):
client.get(
f'/confirm-email/{user.id}/{token}', follow_redirects=True,
)
conftest.login_client_user(client, 'some_user', 'some_password')
fail_login_response = client.get('/exercises')
assert fail_login_response.status_code == 302
token = generate_user_token(user)
client.get(
f'/confirm-email/{user.id}/{token}', follow_redirects=True,
)
conftest.login_client_user(client, 'some_user', 'some_password')
success_login_response = client.get('/exercises')
assert success_login_response.status_code == 200
@staticmethod
def test_successful_registration(client: FlaskClient, captured_templates):
conftest.signup_client_user(
client, '<EMAIL>', 'some_user',
'some_name', 'some_password', '<PASSWORD>',
)
template, _ = captured_templates[-1]
assert template.name == 'login.html'
conftest.login_client_user(client, 'some_user', 'some_password')
fail_login_response = client.get('/exercises')
assert fail_login_response.status_code == 302
user = User.get_or_none(User.username == 'some_user')
token = generate_user_token(user)
client.get(f'/confirm-email/{user.id}/{token}', follow_redirects=True)
conftest.login_client_user(client, 'some_user', 'some_password')
success_login_response = client.get('/exercises')
assert success_login_response.status_code == 200
@staticmethod
def test_registartion_closed(client: FlaskClient, captured_templates):
conftest.disable_registration()
conftest.signup_client_user(
client, '<EMAIL>', 'some_user',
'some_name', 'some_password', '<PASSWORD>',
)
user = User.get_or_none(User.username == 'some_user')
assert user is None
response = client.get('/signup')
template, _ = captured_templates[-1]
assert template.name == 'login.html'
assert '/signup' not in response.get_data(as_text=True)
@staticmethod
def test_register_public_course(
student_user: User, course: Course, captured_templates,
):
client = conftest.get_logged_user(username=student_user.username)
not_public_course_response = client.get(f'/course/join/{course.id}')
assert not_public_course_response.status_code == 403
unknown_course_response = client.get('/course/join/123456')
assert unknown_course_response.status_code == 404
course.is_public = True
course.save()
course = Course.get_by_id(course.id)
client.get(f'/course/join/{course.id}')
template, _ = captured_templates[-1]
assert template.name == 'exercises.html'
already_registered_response = client.get(f'/course/join/{course.id}')
assert already_registered_response.status_code == 409 | 0.461745 | 0.230292 |
import sys,os,time
def processTimeTable(filename):
f = open(filename,'r')
lines = f.readlines()
f.close
parts = filename.split("/")
part = parts [ len(parts) -1]
parts = part.split(".")
part = parts[0]
parts = part.split("_")
part = parts[2]
linia=""
statia=""
directia=""
zi=""
ora=""
for line in lines:
if "LINIA=" in line:
linia=line.split("=")[1].strip()
continue
if "STATIA=" in line:
statia=line.split("=")[1].strip()
continue
if "DIRECTIA=" in line:
directia=line.split("=")[1].strip()
continue
if "LUCRU" in line:
zi="LUCRU"
continue
if "SAMBATA" in line:
zi="SAMBATA"
continue
if "DUMINICA" in line:
zi="DUMINICA"
continue
ora = line.strip()
msg=linia+","+part+","+statia+","+directia+","+zi + "," + ora.strip()
f3.write( msg + "\n")
f3.close
def processLines(donelines, processedlines):
for doneline in donelines:
line = doneline.strip()
if line == "":
continue
if doneline in processedlines:
continue
line = doneline.strip()
toks = line.split("/")
lastok = toks[len(toks)-1].strip()
if lastok in processedlines:
continue
dirname = "orare/linii-ore/"+lastok + "/"
for timetable in os.listdir(dirname):
if timetable.startswith("detaliu_"):
fullpath=dirname+timetable
processTimeTable(fullpath)
f = open("processedlines.txt","a")
f.write(lastok + "\n")
f.close()
f = open("linii.txt","r")
linii = f.readlines()
f.close()
f = open("donelines.txt","r")
donelines = f.readlines()
f.close()
if not os.path.exists("processedlines.txt"):
tmp= open("processedlines.txt","a")
tmp.close()
f2 = open("processedlines.txt","r")
processedlines = f2.readlines()
f2.close()
numar = 0
for linie in linii:
if linie not in donelines:
continue
if linie in processedlines:
continue
f3=open("timetable.csv","a")
msg = "linia,nr_statie,statia,directia,ziua,ora"
f3.write( msg + "\n")
processLines(donelines, processedlines) | web/convertdata.py | import sys,os,time
def processTimeTable(filename):
f = open(filename,'r')
lines = f.readlines()
f.close
parts = filename.split("/")
part = parts [ len(parts) -1]
parts = part.split(".")
part = parts[0]
parts = part.split("_")
part = parts[2]
linia=""
statia=""
directia=""
zi=""
ora=""
for line in lines:
if "LINIA=" in line:
linia=line.split("=")[1].strip()
continue
if "STATIA=" in line:
statia=line.split("=")[1].strip()
continue
if "DIRECTIA=" in line:
directia=line.split("=")[1].strip()
continue
if "LUCRU" in line:
zi="LUCRU"
continue
if "SAMBATA" in line:
zi="SAMBATA"
continue
if "DUMINICA" in line:
zi="DUMINICA"
continue
ora = line.strip()
msg=linia+","+part+","+statia+","+directia+","+zi + "," + ora.strip()
f3.write( msg + "\n")
f3.close
def processLines(donelines, processedlines):
for doneline in donelines:
line = doneline.strip()
if line == "":
continue
if doneline in processedlines:
continue
line = doneline.strip()
toks = line.split("/")
lastok = toks[len(toks)-1].strip()
if lastok in processedlines:
continue
dirname = "orare/linii-ore/"+lastok + "/"
for timetable in os.listdir(dirname):
if timetable.startswith("detaliu_"):
fullpath=dirname+timetable
processTimeTable(fullpath)
f = open("processedlines.txt","a")
f.write(lastok + "\n")
f.close()
f = open("linii.txt","r")
linii = f.readlines()
f.close()
f = open("donelines.txt","r")
donelines = f.readlines()
f.close()
if not os.path.exists("processedlines.txt"):
tmp= open("processedlines.txt","a")
tmp.close()
f2 = open("processedlines.txt","r")
processedlines = f2.readlines()
f2.close()
numar = 0
for linie in linii:
if linie not in donelines:
continue
if linie in processedlines:
continue
f3=open("timetable.csv","a")
msg = "linia,nr_statie,statia,directia,ziua,ora"
f3.write( msg + "\n")
processLines(donelines, processedlines) | 0.056438 | 0.114319 |
from collections import defaultdict
class Computer:
def __init__(self):
def retzero(): return 0
self.registers = defaultdict(retzero)
self.max_ever_register = None
def condition_true(self, look_reg, condition, value):
look_value = self.registers[look_reg]
if condition == "==":
return look_value == value
elif condition == ">=":
return look_value >= value
elif condition == ">":
return look_value > value
elif condition == "<":
return look_value < value
elif condition == "<=":
return look_value <= value
elif condition == "!=":
return look_value != value
assert False
def execute(self, ins):
action_reg, action, amount, _, look_reg, condition, value = ins.split(" ")
amount = int(amount)
value = int(value)
if not self.condition_true(look_reg, condition, value):
return
delta = 0
if action == "inc":
delta = amount
elif action == "dec":
delta = -1 * amount
else:
assert False
self.registers[action_reg] += delta
if self.max_ever_register == None:
self.max_ever_register = self.max_register()
else:
self.max_ever_register = max(self.max_ever_register, self.max_register())
def max_register(self):
return max(self.registers.values())
def run_program(name):
program = open(name).readlines()
computer = Computer()
for line in program:
computer.execute(line)
return computer.max_register()
assert 1 == run_program("8.sample")
print(run_program("8.txt")) # 5102
def run_program_max_ever(name):
program = open(name).readlines()
computer = Computer()
for line in program:
computer.execute(line)
return computer.max_ever_register
assert 10 == run_program_max_ever("8.sample")
print(run_program_max_ever("8.txt")) # 6056 | 8.py | from collections import defaultdict
class Computer:
def __init__(self):
def retzero(): return 0
self.registers = defaultdict(retzero)
self.max_ever_register = None
def condition_true(self, look_reg, condition, value):
look_value = self.registers[look_reg]
if condition == "==":
return look_value == value
elif condition == ">=":
return look_value >= value
elif condition == ">":
return look_value > value
elif condition == "<":
return look_value < value
elif condition == "<=":
return look_value <= value
elif condition == "!=":
return look_value != value
assert False
def execute(self, ins):
action_reg, action, amount, _, look_reg, condition, value = ins.split(" ")
amount = int(amount)
value = int(value)
if not self.condition_true(look_reg, condition, value):
return
delta = 0
if action == "inc":
delta = amount
elif action == "dec":
delta = -1 * amount
else:
assert False
self.registers[action_reg] += delta
if self.max_ever_register == None:
self.max_ever_register = self.max_register()
else:
self.max_ever_register = max(self.max_ever_register, self.max_register())
def max_register(self):
return max(self.registers.values())
def run_program(name):
program = open(name).readlines()
computer = Computer()
for line in program:
computer.execute(line)
return computer.max_register()
assert 1 == run_program("8.sample")
print(run_program("8.txt")) # 5102
def run_program_max_ever(name):
program = open(name).readlines()
computer = Computer()
for line in program:
computer.execute(line)
return computer.max_ever_register
assert 10 == run_program_max_ever("8.sample")
print(run_program_max_ever("8.txt")) # 6056 | 0.392337 | 0.452959 |
# here put the import lib
import time
import numpy as np
import tensorflow as tf
import random
import paddlehub as hub
from sklearn.metrics import accuracy_score
import math
from keras.layers import Dense, Subtract, Lambda
import keras.backend as K
from keras.regularizers import l2
import nni
import data_input
from config import Config
from .base_model import BaseModel
random.seed(9102)
def cosine_similarity(a, b):
c = tf.sqrt(tf.reduce_sum(tf.multiply(a, a), axis=1))
d = tf.sqrt(tf.reduce_sum(tf.multiply(b, b), axis=1))
e = tf.reduce_sum(tf.multiply(a, b), axis=1)
f = tf.multiply(c, d)
r = tf.divide(e, f)
return r
def variable_summaries(var, name):
"""Attach a lot of summaries to a Tensor."""
with tf.name_scope('summaries'):
mean = tf.reduce_mean(var)
tf.summary.scalar('mean/' + name, mean)
with tf.name_scope('stddev'):
stddev = tf.sqrt(tf.reduce_sum(tf.square(var - mean)))
tf.summary.scalar('sttdev/' + name, stddev)
tf.summary.scalar('max/' + name, tf.reduce_max(var))
tf.summary.scalar('min/' + name, tf.reduce_min(var))
tf.summary.histogram(name, var)
class BertClassifier(BaseModel):
def __init__(self, cfg, is_training=1):
super(BertClassifier, self).__init__(cfg, is_training)
pass
def add_placeholder(self):
# 预测时只用输入query即可,将其embedding为向量。
self.q_ids = tf.placeholder(
tf.int32, shape=[None, None], name='query_batch')
self.q_mask_ids = tf.placeholder(
tf.int32, shape=[None, None], name='q_mask_ids')
self.q_seg_ids = tf.placeholder(
tf.int32, shape=[None, None], name='q_seg_ids')
self.q_seq_length = tf.placeholder(
tf.int32, shape=[None], name='query_sequence_length')
self.is_train_place = tf.placeholder(
dtype=tf.bool, name='is_train_place')
# label
self.sim_labels = tf.placeholder(
tf.float32, shape=[None], name="sim_labels")
def forward(self):
# 获取cls的输出
q_emb, _, self.q_e = self.share_bert_layer(
self.is_train_place, self.q_ids, self.q_mask_ids, self.q_seg_ids, use_bert_pre=1)
predict_prob = Dense(units=1, activation='sigmoid')(q_emb)
self.predict_prob = tf.reshape(predict_prob, [-1])
self.predict_idx = tf.cast(tf.greater_equal(predict_prob, 0.5), tf.int32)
with tf.name_scope('Loss'):
# Train Loss
loss = tf.losses.log_loss(self.sim_labels, self.predict_prob)
self.loss = tf.reduce_mean(loss)
tf.summary.scalar('loss', self.loss)
def build(self):
self.add_placeholder()
self.forward()
self.add_train_op(self.cfg['optimizer'],
self.cfg['learning_rate'], self.loss)
self._init_session()
self._add_summary()
pass
def feed_batch(self, out_ids1, m_ids1, seg_ids1, seq_len1, label=None, is_test=0):
is_train = 0 if is_test else 1
fd = {
self.q_ids: out_ids1, self.q_mask_ids: m_ids1,
self.q_seg_ids: seg_ids1,
self.q_seq_length: seq_len1,
self.is_train_place: is_train}
if label:
fd[self.sim_labels] = label
return fd
def run_epoch(self, epoch, d_train, d_val):
steps = int(math.ceil(float(len(d_train)) / self.cfg['batch_size']))
progbar = tf.keras.utils.Progbar(steps)
# 每个 epoch 分batch训练
batch_iter = data_input.get_batch(
d_train, batch_size=self.cfg['batch_size'])
for i, (out_ids1, m_ids1, seg_ids1, seq_len1, label) in enumerate(batch_iter):
fd = self.feed_batch(out_ids1, m_ids1, seg_ids1, seq_len1, label)
# a = self.sess.run([self.is_train_place, self.q_e], feed_dict=fd)
_, cur_loss = self.sess.run(
[self.train_op, self.loss], feed_dict=fd)
progbar.update(i + 1, [("loss", cur_loss)])
# 训练完一个epoch之后,使用验证集评估,然后预测, 然后评估准确率
dev_acc = self.eval(d_val)
nni.report_intermediate_result(dev_acc)
print("dev set acc:", dev_acc)
return dev_acc
def eval(self, test_data):
pbar = data_input.get_batch(
test_data, batch_size=self.cfg['batch_size'], is_test=1)
val_label, val_pred = [], []
for (out_ids1, m_ids1, seg_ids1, seq_len1, label) in pbar:
val_label.extend(label)
fd = self.feed_batch(out_ids1, m_ids1, seg_ids1, seq_len1, is_test=1)
pred_labels, pred_prob = self.sess.run(
[self.predict_idx, self.predict_prob], feed_dict=fd)
val_pred.extend(pred_labels)
test_acc = accuracy_score(val_label, val_pred)
return test_acc
def predict(self, test_data):
pbar = data_input.get_batch(
test_data, batch_size=self.cfg['batch_size'], is_test=1)
val_pred, val_prob = [], []
for (t1_ids, t1_len, t2_ids, t2_len) in pbar:
fd = self.feed_batch(t1_ids, t1_len, t2_ids, t2_len, is_test=1)
pred_labels, pred_prob = self.sess.run(
[self.predict_idx, self.predict_prob], feed_dict=fd)
val_pred.extend(pred_labels)
val_prob.extend(pred_prob)
return val_pred, val_prob
if __name__ == "__main__":
start = time.time()
# 读取配置
conf = Config()
# 读取数据
dataset = hub.dataset.LCQMC()
data_train, data_val, data_test = data_input.get_lcqmc()
# data_train = data_train[:10000]
print("train size:{},val size:{}, test size:{}".format(
len(data_train), len(data_val), len(data_test)))
model = SiamenseRNN(conf)
model.fit(data_train, data_val, data_test)
pass | dssm/model/bert_classifier.py | # here put the import lib
import time
import numpy as np
import tensorflow as tf
import random
import paddlehub as hub
from sklearn.metrics import accuracy_score
import math
from keras.layers import Dense, Subtract, Lambda
import keras.backend as K
from keras.regularizers import l2
import nni
import data_input
from config import Config
from .base_model import BaseModel
random.seed(9102)
def cosine_similarity(a, b):
c = tf.sqrt(tf.reduce_sum(tf.multiply(a, a), axis=1))
d = tf.sqrt(tf.reduce_sum(tf.multiply(b, b), axis=1))
e = tf.reduce_sum(tf.multiply(a, b), axis=1)
f = tf.multiply(c, d)
r = tf.divide(e, f)
return r
def variable_summaries(var, name):
"""Attach a lot of summaries to a Tensor."""
with tf.name_scope('summaries'):
mean = tf.reduce_mean(var)
tf.summary.scalar('mean/' + name, mean)
with tf.name_scope('stddev'):
stddev = tf.sqrt(tf.reduce_sum(tf.square(var - mean)))
tf.summary.scalar('sttdev/' + name, stddev)
tf.summary.scalar('max/' + name, tf.reduce_max(var))
tf.summary.scalar('min/' + name, tf.reduce_min(var))
tf.summary.histogram(name, var)
class BertClassifier(BaseModel):
def __init__(self, cfg, is_training=1):
super(BertClassifier, self).__init__(cfg, is_training)
pass
def add_placeholder(self):
# 预测时只用输入query即可,将其embedding为向量。
self.q_ids = tf.placeholder(
tf.int32, shape=[None, None], name='query_batch')
self.q_mask_ids = tf.placeholder(
tf.int32, shape=[None, None], name='q_mask_ids')
self.q_seg_ids = tf.placeholder(
tf.int32, shape=[None, None], name='q_seg_ids')
self.q_seq_length = tf.placeholder(
tf.int32, shape=[None], name='query_sequence_length')
self.is_train_place = tf.placeholder(
dtype=tf.bool, name='is_train_place')
# label
self.sim_labels = tf.placeholder(
tf.float32, shape=[None], name="sim_labels")
def forward(self):
# 获取cls的输出
q_emb, _, self.q_e = self.share_bert_layer(
self.is_train_place, self.q_ids, self.q_mask_ids, self.q_seg_ids, use_bert_pre=1)
predict_prob = Dense(units=1, activation='sigmoid')(q_emb)
self.predict_prob = tf.reshape(predict_prob, [-1])
self.predict_idx = tf.cast(tf.greater_equal(predict_prob, 0.5), tf.int32)
with tf.name_scope('Loss'):
# Train Loss
loss = tf.losses.log_loss(self.sim_labels, self.predict_prob)
self.loss = tf.reduce_mean(loss)
tf.summary.scalar('loss', self.loss)
def build(self):
self.add_placeholder()
self.forward()
self.add_train_op(self.cfg['optimizer'],
self.cfg['learning_rate'], self.loss)
self._init_session()
self._add_summary()
pass
def feed_batch(self, out_ids1, m_ids1, seg_ids1, seq_len1, label=None, is_test=0):
is_train = 0 if is_test else 1
fd = {
self.q_ids: out_ids1, self.q_mask_ids: m_ids1,
self.q_seg_ids: seg_ids1,
self.q_seq_length: seq_len1,
self.is_train_place: is_train}
if label:
fd[self.sim_labels] = label
return fd
def run_epoch(self, epoch, d_train, d_val):
steps = int(math.ceil(float(len(d_train)) / self.cfg['batch_size']))
progbar = tf.keras.utils.Progbar(steps)
# 每个 epoch 分batch训练
batch_iter = data_input.get_batch(
d_train, batch_size=self.cfg['batch_size'])
for i, (out_ids1, m_ids1, seg_ids1, seq_len1, label) in enumerate(batch_iter):
fd = self.feed_batch(out_ids1, m_ids1, seg_ids1, seq_len1, label)
# a = self.sess.run([self.is_train_place, self.q_e], feed_dict=fd)
_, cur_loss = self.sess.run(
[self.train_op, self.loss], feed_dict=fd)
progbar.update(i + 1, [("loss", cur_loss)])
# 训练完一个epoch之后,使用验证集评估,然后预测, 然后评估准确率
dev_acc = self.eval(d_val)
nni.report_intermediate_result(dev_acc)
print("dev set acc:", dev_acc)
return dev_acc
def eval(self, test_data):
pbar = data_input.get_batch(
test_data, batch_size=self.cfg['batch_size'], is_test=1)
val_label, val_pred = [], []
for (out_ids1, m_ids1, seg_ids1, seq_len1, label) in pbar:
val_label.extend(label)
fd = self.feed_batch(out_ids1, m_ids1, seg_ids1, seq_len1, is_test=1)
pred_labels, pred_prob = self.sess.run(
[self.predict_idx, self.predict_prob], feed_dict=fd)
val_pred.extend(pred_labels)
test_acc = accuracy_score(val_label, val_pred)
return test_acc
def predict(self, test_data):
pbar = data_input.get_batch(
test_data, batch_size=self.cfg['batch_size'], is_test=1)
val_pred, val_prob = [], []
for (t1_ids, t1_len, t2_ids, t2_len) in pbar:
fd = self.feed_batch(t1_ids, t1_len, t2_ids, t2_len, is_test=1)
pred_labels, pred_prob = self.sess.run(
[self.predict_idx, self.predict_prob], feed_dict=fd)
val_pred.extend(pred_labels)
val_prob.extend(pred_prob)
return val_pred, val_prob
if __name__ == "__main__":
start = time.time()
# 读取配置
conf = Config()
# 读取数据
dataset = hub.dataset.LCQMC()
data_train, data_val, data_test = data_input.get_lcqmc()
# data_train = data_train[:10000]
print("train size:{},val size:{}, test size:{}".format(
len(data_train), len(data_val), len(data_test)))
model = SiamenseRNN(conf)
model.fit(data_train, data_val, data_test)
pass | 0.724481 | 0.206714 |
import numpy as np
import re
def dict_cut(text, entities):
"""
根据词库抽取文本中的实体
text = '企业提供部门经理的身份证和身份证明'
entities = ['企业', '部门经理', '身份证', '身份证明']
return = ['企业', '提', '供', '部门经理', '的', '身份证', '和', '身份证明']
:param text:文本
:param entities:实体列表,前面包含后面
:return:实体抽取结果
"""
pattern_text = '|'.join(entities)
# 对原始文本的特殊字符做转义
for i in ['(', ')', '[', ']', '?', '.']:
pattern_text = re.sub(pattern='\\' + i, repl='\(', string=pattern_text)
pattern = re.compile(pattern_text + '|.')
text_entities = pattern.findall(text)
return text_entities
def dict_locate(text, dictionary={}):
"""
根据词库做实体定位
text = '企业提供部门经理的身份证和身份证明'
dictionary = {
'主体': ['企业'],
'客体': ['部门经理'],
'材料': ['身份证', '身份证明']
}
return = [
{'text': '企业', 'type': '主体', 'location': [2, 3]},
{'text': '部门经理', 'type': '客体', 'location': [20, 21, 22, 23]},
{'text': '身份证', 'type': '材料', 'location': [27, 28, 29]},
{'text': '身份证明', 'type': '材料', 'location': [39, 40, 41, 42]}
]
:param text:文本
:param dictionary:实体库
:return:实体定位结果
"""
entities = list(dictionary.values())
entities_all = []
entities_len_all = []
for i in entities:
for j in i:
entities_all.append(j)
entities_len_all.append(len(j))
entities_index = sorted(range(len(entities_all)),
key=lambda x: entities_len_all[x],
reverse=True)
entities_all = np.array(entities_all)[entities_index]
text_entities = dict_cut(text, entities_all)
text_location = []
num = 0
for text_entity in text_entities:
for title in dictionary:
if text_entity in dictionary[title]:
location = list(range(num, num + len(text_entity)))
num += len(text_entity)
text_location.append({'text': text_entity,
'location': location,
'type': title})
break
else:
num += 1
return text_location
def dict_label(text, dictionary={}, regulation=[['U', [10]]]):
"""
根据词库做实体标注
text = '企业提供部门经理的身份证和身份证明'
dictionary = {
'主体': ['企业'],
'客体': ['部门经理'],
'材料': ['身份证', '身份证明']
}
regulation = [
['主体', [1, 2, 3]],
['客体', [4, 5, 6]],
['材料', [7, 8, 9]],
['U', [10]]]
return = [1, 3, 10, 10, 4, 5, 5, 6, 10, 7, 8, 9, 10, 7, 8, 8, 9]
:param text:文本
:param regulation:标注规则
:return:实体标注结果
"""
regulation = {i[0]: i[1] for i in regulation}
entities = list(dictionary.values())
entities_all = []
entities_len_all = []
for i in entities:
for j in i:
entities_all.append(j)
entities_len_all.append(len(j))
entities_index = sorted(range(len(entities_all)),
key=lambda x: entities_len_all[x],
reverse=True)
entities_all = np.array(entities_all)[entities_index]
text_entities = dict_cut(text, entities_all)
text_annotation = []
for text_entity in text_entities:
for title in dictionary:
if text_entity in dictionary[title]:
if len(text_entity) == 2:
text_annotation += [regulation[title][0], regulation[title][2]]
else:
text_annotation += (regulation[title][0:1] +
regulation[title][1:2] * (len(text_entity) - 2) +
regulation[title][2:])
if text_entity not in entities_all:
text_annotation += regulation['U']
return text_annotation
def dict_locate_label(text, dictionary={}, regulation=[['U', [10]]]):
"""
根据词库做 实体定位+标注
text = '企业提供部门经理的身份证和身份证明'
dictionary = {
'主体': ['企业'],
'客体': ['部门经理'],
'材料': ['身份证', '身份证明']
}
regulation = [
['主体', [1, 2, 3]],
['客体', [4, 5, 6]],
['材料', [7, 8, 9]],
['U', [10]]]
return = (
[
{'location': [0, 1], 'text': '企业', 'type': '主体'},
{'location': [2, 3, 4, 5], 'text': '部门经理', 'type': '客体'},
{'location': [6, 7, 8], 'text': '身份证', 'type': '材料'},
{'location': [9, 10, 11, 12], 'text': '身份证明', 'type': '材料'}
],
[1, 3, 10, 10, 10, 10, 10, 10, 4, 5, 5, 6, 10, 10, 10, 7, 8, 9, 10, 10, 10, 7, 8, 8, 9])
:param text:文本
:param dictionary:实体库
:param regulation:标注规则
:return:[实体定位结果, 实体标注结果]
"""
regulation = {i[0]: i[1] for i in regulation}
entities = list(dictionary.values())
entities_all = []
entities_len_all = []
for i in entities:
for j in i:
entities_all.append(j)
entities_len_all.append(len(j))
entities_index = sorted(range(len(entities_all)),
key=lambda x: entities_len_all[x],
reverse=True)
entities_all = np.array(entities_all)[entities_index]
text_entities = dict_cut(text, entities_all)
text_location = []
text_annotation = []
num = 0
for text_entity in text_entities:
for title in dictionary:
if text_entity in dictionary[title]:
location = list(range(num, num + len(text_entity)))
num += len(text_entity)
text_location.append({'text': text_entity,
'location': location,
'type': title})
if len(text_entity) == 2:
text_annotation += [regulation[title][0], regulation[title][2]]
else:
text_annotation += (regulation[title][0:1] +
regulation[title][1:2] * (len(text_entity) - 2) +
regulation[title][2:])
if text_entity not in entities_all:
text_annotation += regulation['U']
num += 1
return text_location, text_annotation
if __name__ == '__main__':
_text = '企业提供部门经理的身份证和身份证明'
_dictionary = {
'主体': ['企业'],
'客体': ['部门经理'],
'材料': ['身份证', '身份证明']
}
_entities = ['企业', '部门经理', '身份证明', '身份证']
_regulation = [
['主体', [1, 2, 3]],
['客体', [4, 5, 6]],
['材料', [7, 8, 9]],
['U', [10]]]
print('文本:', _text)
print('词库:', _dictionary)
print('实体抽取:\n', dict_cut(_text, _entities))
print('\n实体定位:\n')
for i in dict_locate(_text, _dictionary):
print(i)
print('\n实体标注:\n', dict_label(_text, _dictionary, _regulation))
print('\n实体定位+标注:\n', dict_locate_label(_text, _dictionary, _regulation)) | Text_Annotation/annotate/dict_annotate.py | import numpy as np
import re
def dict_cut(text, entities):
"""
根据词库抽取文本中的实体
text = '企业提供部门经理的身份证和身份证明'
entities = ['企业', '部门经理', '身份证', '身份证明']
return = ['企业', '提', '供', '部门经理', '的', '身份证', '和', '身份证明']
:param text:文本
:param entities:实体列表,前面包含后面
:return:实体抽取结果
"""
pattern_text = '|'.join(entities)
# 对原始文本的特殊字符做转义
for i in ['(', ')', '[', ']', '?', '.']:
pattern_text = re.sub(pattern='\\' + i, repl='\(', string=pattern_text)
pattern = re.compile(pattern_text + '|.')
text_entities = pattern.findall(text)
return text_entities
def dict_locate(text, dictionary={}):
"""
根据词库做实体定位
text = '企业提供部门经理的身份证和身份证明'
dictionary = {
'主体': ['企业'],
'客体': ['部门经理'],
'材料': ['身份证', '身份证明']
}
return = [
{'text': '企业', 'type': '主体', 'location': [2, 3]},
{'text': '部门经理', 'type': '客体', 'location': [20, 21, 22, 23]},
{'text': '身份证', 'type': '材料', 'location': [27, 28, 29]},
{'text': '身份证明', 'type': '材料', 'location': [39, 40, 41, 42]}
]
:param text:文本
:param dictionary:实体库
:return:实体定位结果
"""
entities = list(dictionary.values())
entities_all = []
entities_len_all = []
for i in entities:
for j in i:
entities_all.append(j)
entities_len_all.append(len(j))
entities_index = sorted(range(len(entities_all)),
key=lambda x: entities_len_all[x],
reverse=True)
entities_all = np.array(entities_all)[entities_index]
text_entities = dict_cut(text, entities_all)
text_location = []
num = 0
for text_entity in text_entities:
for title in dictionary:
if text_entity in dictionary[title]:
location = list(range(num, num + len(text_entity)))
num += len(text_entity)
text_location.append({'text': text_entity,
'location': location,
'type': title})
break
else:
num += 1
return text_location
def dict_label(text, dictionary={}, regulation=[['U', [10]]]):
"""
根据词库做实体标注
text = '企业提供部门经理的身份证和身份证明'
dictionary = {
'主体': ['企业'],
'客体': ['部门经理'],
'材料': ['身份证', '身份证明']
}
regulation = [
['主体', [1, 2, 3]],
['客体', [4, 5, 6]],
['材料', [7, 8, 9]],
['U', [10]]]
return = [1, 3, 10, 10, 4, 5, 5, 6, 10, 7, 8, 9, 10, 7, 8, 8, 9]
:param text:文本
:param regulation:标注规则
:return:实体标注结果
"""
regulation = {i[0]: i[1] for i in regulation}
entities = list(dictionary.values())
entities_all = []
entities_len_all = []
for i in entities:
for j in i:
entities_all.append(j)
entities_len_all.append(len(j))
entities_index = sorted(range(len(entities_all)),
key=lambda x: entities_len_all[x],
reverse=True)
entities_all = np.array(entities_all)[entities_index]
text_entities = dict_cut(text, entities_all)
text_annotation = []
for text_entity in text_entities:
for title in dictionary:
if text_entity in dictionary[title]:
if len(text_entity) == 2:
text_annotation += [regulation[title][0], regulation[title][2]]
else:
text_annotation += (regulation[title][0:1] +
regulation[title][1:2] * (len(text_entity) - 2) +
regulation[title][2:])
if text_entity not in entities_all:
text_annotation += regulation['U']
return text_annotation
def dict_locate_label(text, dictionary={}, regulation=[['U', [10]]]):
"""
根据词库做 实体定位+标注
text = '企业提供部门经理的身份证和身份证明'
dictionary = {
'主体': ['企业'],
'客体': ['部门经理'],
'材料': ['身份证', '身份证明']
}
regulation = [
['主体', [1, 2, 3]],
['客体', [4, 5, 6]],
['材料', [7, 8, 9]],
['U', [10]]]
return = (
[
{'location': [0, 1], 'text': '企业', 'type': '主体'},
{'location': [2, 3, 4, 5], 'text': '部门经理', 'type': '客体'},
{'location': [6, 7, 8], 'text': '身份证', 'type': '材料'},
{'location': [9, 10, 11, 12], 'text': '身份证明', 'type': '材料'}
],
[1, 3, 10, 10, 10, 10, 10, 10, 4, 5, 5, 6, 10, 10, 10, 7, 8, 9, 10, 10, 10, 7, 8, 8, 9])
:param text:文本
:param dictionary:实体库
:param regulation:标注规则
:return:[实体定位结果, 实体标注结果]
"""
regulation = {i[0]: i[1] for i in regulation}
entities = list(dictionary.values())
entities_all = []
entities_len_all = []
for i in entities:
for j in i:
entities_all.append(j)
entities_len_all.append(len(j))
entities_index = sorted(range(len(entities_all)),
key=lambda x: entities_len_all[x],
reverse=True)
entities_all = np.array(entities_all)[entities_index]
text_entities = dict_cut(text, entities_all)
text_location = []
text_annotation = []
num = 0
for text_entity in text_entities:
for title in dictionary:
if text_entity in dictionary[title]:
location = list(range(num, num + len(text_entity)))
num += len(text_entity)
text_location.append({'text': text_entity,
'location': location,
'type': title})
if len(text_entity) == 2:
text_annotation += [regulation[title][0], regulation[title][2]]
else:
text_annotation += (regulation[title][0:1] +
regulation[title][1:2] * (len(text_entity) - 2) +
regulation[title][2:])
if text_entity not in entities_all:
text_annotation += regulation['U']
num += 1
return text_location, text_annotation
if __name__ == '__main__':
_text = '企业提供部门经理的身份证和身份证明'
_dictionary = {
'主体': ['企业'],
'客体': ['部门经理'],
'材料': ['身份证', '身份证明']
}
_entities = ['企业', '部门经理', '身份证明', '身份证']
_regulation = [
['主体', [1, 2, 3]],
['客体', [4, 5, 6]],
['材料', [7, 8, 9]],
['U', [10]]]
print('文本:', _text)
print('词库:', _dictionary)
print('实体抽取:\n', dict_cut(_text, _entities))
print('\n实体定位:\n')
for i in dict_locate(_text, _dictionary):
print(i)
print('\n实体标注:\n', dict_label(_text, _dictionary, _regulation))
print('\n实体定位+标注:\n', dict_locate_label(_text, _dictionary, _regulation)) | 0.340047 | 0.427994 |
import os
from pywps import Process
from pywps import LiteralInput, LiteralOutput
from pywps import ComplexInput, ComplexOutput
from pywps import Format, FORMATS
from pywps.app.Common import Metadata
from copernicus import runner
from copernicus import util
import logging
LOGGER = logging.getLogger("PYWPS")
class RainFarm(Process):
def __init__(self):
inputs = [
LiteralInput('model', 'Model',
abstract='Choose a model like MPI-ESM-LR.',
data_type='string',
allowed_values=['ACCESS1-0', ],
default='ACCESS1-0'),
LiteralInput('experiment', 'Experiment',
abstract='Choose an experiment like historical.',
data_type='string',
allowed_values=['historical', ],
default='historical'),
LiteralInput('start_year', 'Start year', data_type='integer',
abstract='Start year of model data.',
default="1997"),
LiteralInput('end_year', 'End year', data_type='integer',
abstract='End year of model data.',
default="1997"),
LiteralInput('subset', 'Geographical subset',
abstract='Choose a geographical subset with a Bounding Box: 4,13,44,53',
data_type='string',
default='4,13,44,53'),
LiteralInput('regridding', 'Regridding',
abstract='Flag for regridding.',
data_type='boolean',
default='0'),
LiteralInput('slope', 'Slope',
abstract='Flag for slope.',
data_type='boolean',
default='0'),
LiteralInput('num_ens_members', 'Number of ensemble members',
abstract='Choose a number of ensemble members.',
data_type='integer',
default='2'),
LiteralInput('num_subdivs', 'Number of subdivisions',
abstract='Choose a number of subdivisions.',
data_type='integer',
default='8'),
]
outputs = [
ComplexOutput('output', 'Output plot',
abstract='Generated output plot of ESMValTool processing.',
as_reference=True,
supported_formats=[Format('image/png')]),
]
super(RainFarm, self).__init__(
self._handler,
identifier="rainfarm",
title="RainFARM stochastic downscaling",
version=runner.VERSION,
abstract="Tool to perform stochastic precipitation downscaling, generating an ensemble of fine-scale "
" precipitation fields from information simulated by climate models at regional scale.",
metadata=[
Metadata('ESMValTool', 'http://www.esmvaltool.org/'),
Metadata('Documentation',
'https://copernicus-wps-demo.readthedocs.io/en/latest/processes.html#rainfarm',
role=util.WPS_ROLE_DOC),
Metadata('Media',
util.diagdata_url() + '/rainfarm/rainfarm_thumbnail.png',
role=util.WPS_ROLE_MEDIA),
Metadata('Diagnostic Description',
util.diagdata_url() + '/rainfarm/description.md',
role=util.MAGIC_ROLE_DOC),
Metadata('Diagnostic Metadata',
util.diagdata_url() + '/rainfarm/rainfarm.yml',
role=util.MAGIC_ROLE_METADATA),
],
inputs=inputs,
outputs=outputs,
status_supported=True,
store_supported=True)
def _handler(self, request, response):
response.update_status("starting ...", 0)
# run diag
response.update_status("running diag ...", 20)
# result plot
response.update_status("collect output plot ...", 90)
response.outputs['output'].output_format = Format('image/png')
response.outputs['output'].file = util.diagdata_file(os.path.join('rainfarm', 'RainFARM_example_64x64.png'))
response.update_status("done.", 100)
return response | copernicus/processes/wps_rainfarm.py | import os
from pywps import Process
from pywps import LiteralInput, LiteralOutput
from pywps import ComplexInput, ComplexOutput
from pywps import Format, FORMATS
from pywps.app.Common import Metadata
from copernicus import runner
from copernicus import util
import logging
LOGGER = logging.getLogger("PYWPS")
class RainFarm(Process):
def __init__(self):
inputs = [
LiteralInput('model', 'Model',
abstract='Choose a model like MPI-ESM-LR.',
data_type='string',
allowed_values=['ACCESS1-0', ],
default='ACCESS1-0'),
LiteralInput('experiment', 'Experiment',
abstract='Choose an experiment like historical.',
data_type='string',
allowed_values=['historical', ],
default='historical'),
LiteralInput('start_year', 'Start year', data_type='integer',
abstract='Start year of model data.',
default="1997"),
LiteralInput('end_year', 'End year', data_type='integer',
abstract='End year of model data.',
default="1997"),
LiteralInput('subset', 'Geographical subset',
abstract='Choose a geographical subset with a Bounding Box: 4,13,44,53',
data_type='string',
default='4,13,44,53'),
LiteralInput('regridding', 'Regridding',
abstract='Flag for regridding.',
data_type='boolean',
default='0'),
LiteralInput('slope', 'Slope',
abstract='Flag for slope.',
data_type='boolean',
default='0'),
LiteralInput('num_ens_members', 'Number of ensemble members',
abstract='Choose a number of ensemble members.',
data_type='integer',
default='2'),
LiteralInput('num_subdivs', 'Number of subdivisions',
abstract='Choose a number of subdivisions.',
data_type='integer',
default='8'),
]
outputs = [
ComplexOutput('output', 'Output plot',
abstract='Generated output plot of ESMValTool processing.',
as_reference=True,
supported_formats=[Format('image/png')]),
]
super(RainFarm, self).__init__(
self._handler,
identifier="rainfarm",
title="RainFARM stochastic downscaling",
version=runner.VERSION,
abstract="Tool to perform stochastic precipitation downscaling, generating an ensemble of fine-scale "
" precipitation fields from information simulated by climate models at regional scale.",
metadata=[
Metadata('ESMValTool', 'http://www.esmvaltool.org/'),
Metadata('Documentation',
'https://copernicus-wps-demo.readthedocs.io/en/latest/processes.html#rainfarm',
role=util.WPS_ROLE_DOC),
Metadata('Media',
util.diagdata_url() + '/rainfarm/rainfarm_thumbnail.png',
role=util.WPS_ROLE_MEDIA),
Metadata('Diagnostic Description',
util.diagdata_url() + '/rainfarm/description.md',
role=util.MAGIC_ROLE_DOC),
Metadata('Diagnostic Metadata',
util.diagdata_url() + '/rainfarm/rainfarm.yml',
role=util.MAGIC_ROLE_METADATA),
],
inputs=inputs,
outputs=outputs,
status_supported=True,
store_supported=True)
def _handler(self, request, response):
response.update_status("starting ...", 0)
# run diag
response.update_status("running diag ...", 20)
# result plot
response.update_status("collect output plot ...", 90)
response.outputs['output'].output_format = Format('image/png')
response.outputs['output'].file = util.diagdata_file(os.path.join('rainfarm', 'RainFARM_example_64x64.png'))
response.update_status("done.", 100)
return response | 0.698741 | 0.311204 |
import os
# Own Modules
from Dados.ScriptInfo.ScriptInfo import ScriptInfo
from Dados.V4Styles.V4Styles import V4Styles
from Dados.Events.Events import Events
from Dados.SimpleLine.SimpleLine import SimpleLine
# Search for Epydoc, MIT LICENSE, Python Packages, PEP 0440 and RestructuredText
# Reminder: The only uses this object has for v4styles are the methods readline, __str__ and __repr__
# Time to rewrite the whole module
__author__ = "<NAME>"
__copyright__ = "Copyright (C) 2020 <NAME>"
__license__ = "MIT"
__credits__ = []
__version__ = "0.2.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = (["Prototype", "Development", "Production"])[2]
class SubPackage:
def __init__(self) -> None:
self.scriptinfo = ScriptInfo()
self.v4styles = V4Styles()
self.events = Events()
self.__defaulttitles__ = [f"{_}" for _ in SimpleLine().__defaulttitles__]
self.__lowertitles__ = [f"{_}" for _ in SimpleLine().__lowertitles__]
self.__readers__ = (self.scriptinfo.readline, self.v4styles.readline, self.events.readline)
self.__readerpos__ = None
def __repr__(self) -> str:
""" Called when printing the formatted version of this object.
Used for saving. Called with f'{NAME!r}'
:return: String.
"""
return f"{self.scriptinfo!r}\n{self.v4styles!r}\n{self.events!r}\n"
# Must change this later to check for absolute paths instead of just strings
def savefile(self, arg: str, overwrite: bool = False) -> bool:
""" Save file into location 'arg'.
:param arg: String. File Path to save. Will not replace existing file. Unless overwrite == True.
:param overwrite: Forces method to overwrite existing file.
:return: True if Save was successful. False if file already exists.
"""
if isinstance(arg, str) is False:
raise TypeError(f"{arg} must be a file address (String).")
try:
with open(arg, "x") as f:
f.write(f"{self!r}")
return True
except FileExistsError:
if overwrite:
if os.path.exists(arg):
os.remove(arg)
with open(arg, "x") as f:
f.write(f"{self!r}")
return True
else:
raise ValueError(f"File exists, but file doesn't exist. This exception shouldn't ever happen...")
else:
return False
def __str__(self) -> str:
""" Unformatted string Version of the file.
Used for checking what lines to edit. Called with f'{NAME!s}'
:return: String.
"""
return f"{self.scriptinfo!s}\n{self.v4styles!s}\n{self.events!s}\n"
# Must change this later to check for absolute paths instead of just strings
def loadfile(self, arg: str) -> 'SubPackage':
""" Load an SSA text file into this object.
:param arg: String. Local Address of file.
:return: self.
"""
if isinstance(arg, str) is False:
raise TypeError(f"{arg} must be a file address (String).")
try:
with open(arg, "r") as f:
for _ in f:
__line = SimpleLine(_)
__linelower = f"{__line}".lower()
__checking__ = [__ in __linelower for __ in self.__lowertitles__]
if True in __checking__:
self.__readerpos__ = __checking__.index(True)
else:
if self.__readerpos__ is not None:
# Call the reading function for the current section
(self.__readers__[self.__readerpos__])(__line)
except FileNotFoundError:
raise ValueError(f"{arg} file could not be found.")
return self | Dados/SubPackage.py | import os
# Own Modules
from Dados.ScriptInfo.ScriptInfo import ScriptInfo
from Dados.V4Styles.V4Styles import V4Styles
from Dados.Events.Events import Events
from Dados.SimpleLine.SimpleLine import SimpleLine
# Search for Epydoc, MIT LICENSE, Python Packages, PEP 0440 and RestructuredText
# Reminder: The only uses this object has for v4styles are the methods readline, __str__ and __repr__
# Time to rewrite the whole module
__author__ = "<NAME>"
__copyright__ = "Copyright (C) 2020 <NAME>"
__license__ = "MIT"
__credits__ = []
__version__ = "0.2.1"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = (["Prototype", "Development", "Production"])[2]
class SubPackage:
def __init__(self) -> None:
self.scriptinfo = ScriptInfo()
self.v4styles = V4Styles()
self.events = Events()
self.__defaulttitles__ = [f"{_}" for _ in SimpleLine().__defaulttitles__]
self.__lowertitles__ = [f"{_}" for _ in SimpleLine().__lowertitles__]
self.__readers__ = (self.scriptinfo.readline, self.v4styles.readline, self.events.readline)
self.__readerpos__ = None
def __repr__(self) -> str:
""" Called when printing the formatted version of this object.
Used for saving. Called with f'{NAME!r}'
:return: String.
"""
return f"{self.scriptinfo!r}\n{self.v4styles!r}\n{self.events!r}\n"
# Must change this later to check for absolute paths instead of just strings
def savefile(self, arg: str, overwrite: bool = False) -> bool:
""" Save file into location 'arg'.
:param arg: String. File Path to save. Will not replace existing file. Unless overwrite == True.
:param overwrite: Forces method to overwrite existing file.
:return: True if Save was successful. False if file already exists.
"""
if isinstance(arg, str) is False:
raise TypeError(f"{arg} must be a file address (String).")
try:
with open(arg, "x") as f:
f.write(f"{self!r}")
return True
except FileExistsError:
if overwrite:
if os.path.exists(arg):
os.remove(arg)
with open(arg, "x") as f:
f.write(f"{self!r}")
return True
else:
raise ValueError(f"File exists, but file doesn't exist. This exception shouldn't ever happen...")
else:
return False
def __str__(self) -> str:
""" Unformatted string Version of the file.
Used for checking what lines to edit. Called with f'{NAME!s}'
:return: String.
"""
return f"{self.scriptinfo!s}\n{self.v4styles!s}\n{self.events!s}\n"
# Must change this later to check for absolute paths instead of just strings
def loadfile(self, arg: str) -> 'SubPackage':
""" Load an SSA text file into this object.
:param arg: String. Local Address of file.
:return: self.
"""
if isinstance(arg, str) is False:
raise TypeError(f"{arg} must be a file address (String).")
try:
with open(arg, "r") as f:
for _ in f:
__line = SimpleLine(_)
__linelower = f"{__line}".lower()
__checking__ = [__ in __linelower for __ in self.__lowertitles__]
if True in __checking__:
self.__readerpos__ = __checking__.index(True)
else:
if self.__readerpos__ is not None:
# Call the reading function for the current section
(self.__readers__[self.__readerpos__])(__line)
except FileNotFoundError:
raise ValueError(f"{arg} file could not be found.")
return self | 0.52902 | 0.067209 |
import onceml.types.channel as channel
from onceml.orchestration import KubeflowRunner, Pipeline
import onceml.utils.json_utils as json_utils
import onceml.global_config as global_config
import os
from cycle_component import myComponent1, myExecutor1
command = [
'python3', '-m', '{}.orchestration.kubeflow.container_entrypoint'.format(
global_config.project_name)
]
def test_container_entrypoint():
a = myComponent1(executor=myExecutor1,
a=1,
b=2,
resulta=channel.OutputChannel(str),
resultb=channel.OutputChannel(int))
p = Pipeline(task_name='task1',
model_name='modelA',
components={
'a': a,
})
for component in p.components:
arguments = [
'--pipeline_root', [p._task_name, p._model_name],
'--serialized_component',
json_utils.componentDumps(component)
]
d_channels = {
} # 获取依赖的Do类型的组件的channel输出路径
d_artifact = {
} # 获取依赖的组件的artifact输出路径
arguments = arguments + [
'--d_channels', d_channels, '--d_artifact', d_artifact
]
s = ''
for i in arguments:
s += ' '
if type(i) == str:
s += "'" + i + "'"
else:
s += "'" + json_utils.simpleDumps(i) + "'"
print('------------')
os.system(' '.join(command) + s)
def test_container_entrypoint_receiver():
c = myComponent1(executor=myExecutor1,
a=1,
b=2,
resulta=channel.OutputChannel(str),
resultb=channel.OutputChannel(int))
p = Pipeline(task_name='task1',
model_name='modelA',
components={
'c': c,
})
for component in p.components:
arguments = [
'--pipeline_root', [p._task_name, p._model_name],
'--serialized_component',
json_utils.componentDumps(component)
]
d_channels = {
'a': 'task1/modela/a/result.json',
} # 获取依赖的Do类型的组件的channel输出路径
d_artifact = {
'a': 'task1/modela/a/artifact',
'b': 'task1/modela/b/artifact',
} # 获取依赖的组件的artifact输出路径
arguments = arguments + [
'--d_channels', d_channels, '--d_artifact', d_artifact
]
s = ''
for i in arguments:
s += ' '
if type(i) == str:
s += "'" + i + "'"
else:
s += "'" + json_utils.simpleDumps(i) + "'"
print('------------')
os.system(' '.join(command) + s)
if __name__ == "__main__":
test_container_entrypoint()
#test_container_entrypoint_receiver() | tests/test_container_cycle.py | import onceml.types.channel as channel
from onceml.orchestration import KubeflowRunner, Pipeline
import onceml.utils.json_utils as json_utils
import onceml.global_config as global_config
import os
from cycle_component import myComponent1, myExecutor1
command = [
'python3', '-m', '{}.orchestration.kubeflow.container_entrypoint'.format(
global_config.project_name)
]
def test_container_entrypoint():
a = myComponent1(executor=myExecutor1,
a=1,
b=2,
resulta=channel.OutputChannel(str),
resultb=channel.OutputChannel(int))
p = Pipeline(task_name='task1',
model_name='modelA',
components={
'a': a,
})
for component in p.components:
arguments = [
'--pipeline_root', [p._task_name, p._model_name],
'--serialized_component',
json_utils.componentDumps(component)
]
d_channels = {
} # 获取依赖的Do类型的组件的channel输出路径
d_artifact = {
} # 获取依赖的组件的artifact输出路径
arguments = arguments + [
'--d_channels', d_channels, '--d_artifact', d_artifact
]
s = ''
for i in arguments:
s += ' '
if type(i) == str:
s += "'" + i + "'"
else:
s += "'" + json_utils.simpleDumps(i) + "'"
print('------------')
os.system(' '.join(command) + s)
def test_container_entrypoint_receiver():
c = myComponent1(executor=myExecutor1,
a=1,
b=2,
resulta=channel.OutputChannel(str),
resultb=channel.OutputChannel(int))
p = Pipeline(task_name='task1',
model_name='modelA',
components={
'c': c,
})
for component in p.components:
arguments = [
'--pipeline_root', [p._task_name, p._model_name],
'--serialized_component',
json_utils.componentDumps(component)
]
d_channels = {
'a': 'task1/modela/a/result.json',
} # 获取依赖的Do类型的组件的channel输出路径
d_artifact = {
'a': 'task1/modela/a/artifact',
'b': 'task1/modela/b/artifact',
} # 获取依赖的组件的artifact输出路径
arguments = arguments + [
'--d_channels', d_channels, '--d_artifact', d_artifact
]
s = ''
for i in arguments:
s += ' '
if type(i) == str:
s += "'" + i + "'"
else:
s += "'" + json_utils.simpleDumps(i) + "'"
print('------------')
os.system(' '.join(command) + s)
if __name__ == "__main__":
test_container_entrypoint()
#test_container_entrypoint_receiver() | 0.230486 | 0.127245 |
import json
import time
import tkinter
from tkinter import RIGHT, END
try:
import tkinter
except ImportError:
import Tkinter as tk
import requests
from boltiot import Bolt, Sms, Email
from alert import conf
def toggle_state(*_):
if e1.var.get():
button1['state'] = 'normal'
else:
button1['state'] = 'disabled'
def send_telegram_message(message):
url = "https://api.telegram.org/" + conf.telegram_bot_id + "/sendMessage"
data = {"chat_id": conf.telegram_chat_id,
"text": message
}
try:
response = requests.request(
"GET",
url,
params=data
)
print("This is the Telegram response")
print(response.text)
telegram_data = json.loads(response.text)
return telegram_data["ok"]
except Exception as e:
print("An error occurred in sending the alert message via Telegram")
print(e)
return False
def get_bitcoin_price():
URL = "https://min-api.cryptocompare.com/data/price?fsym=BTC&tsyms=USD,JPY,EUR,INR" # REPLACE WITH CORRECT URL
respons = requests.request("GET", URL)
respons = json.loads(respons.text)
current_price = respons["USD"]
return current_price
mybolt = Bolt(conf.api_key, conf.device_id)
sms = Sms(conf.SSID, conf.AUTH_TOKEN, conf.TO_NUMBER, conf.FROM_NUMBER)
mailer = Email(conf.MAILGUN_API_KEY, conf.SANDBOX_URL, conf.SENDER_MAIL, conf.RECIPIENT_MAIL)
def testVal(inStr, acttyp):
if acttyp == '1': # insert
if not inStr.isdigit():
return False
return True
def printSomething():
while True:
textbox.update()
c_price = get_bitcoin_price()
textbox.update()
_time = time.ctime()
textbox.insert(END, "The Current Bitcoin Price is: " + str(get_bitcoin_price()) + " USD" + ", at " + str(
time.ctime()) + ".\n")
print(get_bitcoin_price(), str(time.ctime()))
textbox.update()
if c_price >= int(e1.get()):
textbox.insert(END, "Alert!!!, The Current Bitcoin Price is: " + str(
get_bitcoin_price()) + " USD" + ", at " + str(
time.ctime()) + ".\n")
# Enable Buzzer
response_buzzer = mybolt.digitalWrite('0', 'HIGH')
print(response_buzzer)
buzzer_data = json.loads(response_buzzer)
if buzzer_data["success"] == 1:
textbox.insert(END, "Buzzer is now active.\n")
else:
textbox.insert(END, "Unable to activate buzzer due to " + str(buzzer_data["value"]) + ".\n")
textbox.update()
# Send SMS
textbox.insert(END, "Sending an SMS.....\n")
textbox.update()
response_SMS = sms.send_sms(
"Alert! The Bitcoin selling price is now : " + str(c_price) + " USD at " + str(_time) + ".")
textbox.insert(END, "This is the response " + str(response_SMS) + ".\n")
textbox.update()
# Send Mail
textbox.insert(END, "Making request to Mailgun to send an email.....\n")
textbox.update()
response_mail = mailer.send_email("PRICE ALERT", "Alert! The Bitcoin selling price is now : " + str(
c_price) + " USD at " + str(_time))
response_text = json.loads(response_mail.text)
textbox.insert(END, "Response received from Mailgun is:" + str(response_text['message']) + ".\n")
textbox.update()
# Send Telegram Alert
message = 'Alert! The Bitcoin selling price is now : ' + str(c_price) + ' USD at ' + str(_time)
telegram_status = send_telegram_message(message)
textbox.insert(END, "This is the Telegram status: " + str(telegram_status) + ".\n")
textbox.update()
else:
response = mybolt.digitalWrite('0', 'LOW')
textbox.insert(END, "Bitcoin price is still down, please try after sometime." + "\n")
textbox.update()
textbox.update()
time.sleep(20)
master = tkinter.Tk()
master.title("-Bitcoin Price Alert and Prediction System-")
master.geometry('1440x900')
scrollbar = tkinter.Scrollbar(master)
scrollbar.pack(side=RIGHT, fill=tkinter.Y)
textbox = tkinter.Text(master)
textbox.place(relx=0.5, rely=0.5, anchor=tkinter.CENTER)
# attach textbox to scrollbar
tkinter.Label(master, text="Enter the selling price of bitcoin in USD (only numerics)").place(relx=0.3, rely=0.1,
anchor=tkinter.CENTER)
e1 = tkinter.Entry(master, validate="key")
e1['validatecommand'] = (e1.register(testVal), '%P', '%d')
e1.var = tkinter.StringVar()
e1['textvariable'] = e1.var
e1.var.trace_add('write', toggle_state)
e1.place(relx=0.7, rely=0.1, anchor=tkinter.CENTER)
tkinter.Button(master,
text='Quit',
command=master.quit).place(relx=0.3, rely=0.9, anchor=tkinter.CENTER)
button1 = tkinter.Button(master,
text='Submit', command=printSomething, state='disabled')
button1.place(relx=0.7, rely=0.9, anchor=tkinter.CENTER)
textbox.config(yscrollcommand=scrollbar.set)
scrollbar.config(command=textbox.yview)
tkinter.mainloop() | alert/__init__.py | import json
import time
import tkinter
from tkinter import RIGHT, END
try:
import tkinter
except ImportError:
import Tkinter as tk
import requests
from boltiot import Bolt, Sms, Email
from alert import conf
def toggle_state(*_):
if e1.var.get():
button1['state'] = 'normal'
else:
button1['state'] = 'disabled'
def send_telegram_message(message):
url = "https://api.telegram.org/" + conf.telegram_bot_id + "/sendMessage"
data = {"chat_id": conf.telegram_chat_id,
"text": message
}
try:
response = requests.request(
"GET",
url,
params=data
)
print("This is the Telegram response")
print(response.text)
telegram_data = json.loads(response.text)
return telegram_data["ok"]
except Exception as e:
print("An error occurred in sending the alert message via Telegram")
print(e)
return False
def get_bitcoin_price():
URL = "https://min-api.cryptocompare.com/data/price?fsym=BTC&tsyms=USD,JPY,EUR,INR" # REPLACE WITH CORRECT URL
respons = requests.request("GET", URL)
respons = json.loads(respons.text)
current_price = respons["USD"]
return current_price
mybolt = Bolt(conf.api_key, conf.device_id)
sms = Sms(conf.SSID, conf.AUTH_TOKEN, conf.TO_NUMBER, conf.FROM_NUMBER)
mailer = Email(conf.MAILGUN_API_KEY, conf.SANDBOX_URL, conf.SENDER_MAIL, conf.RECIPIENT_MAIL)
def testVal(inStr, acttyp):
if acttyp == '1': # insert
if not inStr.isdigit():
return False
return True
def printSomething():
while True:
textbox.update()
c_price = get_bitcoin_price()
textbox.update()
_time = time.ctime()
textbox.insert(END, "The Current Bitcoin Price is: " + str(get_bitcoin_price()) + " USD" + ", at " + str(
time.ctime()) + ".\n")
print(get_bitcoin_price(), str(time.ctime()))
textbox.update()
if c_price >= int(e1.get()):
textbox.insert(END, "Alert!!!, The Current Bitcoin Price is: " + str(
get_bitcoin_price()) + " USD" + ", at " + str(
time.ctime()) + ".\n")
# Enable Buzzer
response_buzzer = mybolt.digitalWrite('0', 'HIGH')
print(response_buzzer)
buzzer_data = json.loads(response_buzzer)
if buzzer_data["success"] == 1:
textbox.insert(END, "Buzzer is now active.\n")
else:
textbox.insert(END, "Unable to activate buzzer due to " + str(buzzer_data["value"]) + ".\n")
textbox.update()
# Send SMS
textbox.insert(END, "Sending an SMS.....\n")
textbox.update()
response_SMS = sms.send_sms(
"Alert! The Bitcoin selling price is now : " + str(c_price) + " USD at " + str(_time) + ".")
textbox.insert(END, "This is the response " + str(response_SMS) + ".\n")
textbox.update()
# Send Mail
textbox.insert(END, "Making request to Mailgun to send an email.....\n")
textbox.update()
response_mail = mailer.send_email("PRICE ALERT", "Alert! The Bitcoin selling price is now : " + str(
c_price) + " USD at " + str(_time))
response_text = json.loads(response_mail.text)
textbox.insert(END, "Response received from Mailgun is:" + str(response_text['message']) + ".\n")
textbox.update()
# Send Telegram Alert
message = 'Alert! The Bitcoin selling price is now : ' + str(c_price) + ' USD at ' + str(_time)
telegram_status = send_telegram_message(message)
textbox.insert(END, "This is the Telegram status: " + str(telegram_status) + ".\n")
textbox.update()
else:
response = mybolt.digitalWrite('0', 'LOW')
textbox.insert(END, "Bitcoin price is still down, please try after sometime." + "\n")
textbox.update()
textbox.update()
time.sleep(20)
master = tkinter.Tk()
master.title("-Bitcoin Price Alert and Prediction System-")
master.geometry('1440x900')
scrollbar = tkinter.Scrollbar(master)
scrollbar.pack(side=RIGHT, fill=tkinter.Y)
textbox = tkinter.Text(master)
textbox.place(relx=0.5, rely=0.5, anchor=tkinter.CENTER)
# attach textbox to scrollbar
tkinter.Label(master, text="Enter the selling price of bitcoin in USD (only numerics)").place(relx=0.3, rely=0.1,
anchor=tkinter.CENTER)
e1 = tkinter.Entry(master, validate="key")
e1['validatecommand'] = (e1.register(testVal), '%P', '%d')
e1.var = tkinter.StringVar()
e1['textvariable'] = e1.var
e1.var.trace_add('write', toggle_state)
e1.place(relx=0.7, rely=0.1, anchor=tkinter.CENTER)
tkinter.Button(master,
text='Quit',
command=master.quit).place(relx=0.3, rely=0.9, anchor=tkinter.CENTER)
button1 = tkinter.Button(master,
text='Submit', command=printSomething, state='disabled')
button1.place(relx=0.7, rely=0.9, anchor=tkinter.CENTER)
textbox.config(yscrollcommand=scrollbar.set)
scrollbar.config(command=textbox.yview)
tkinter.mainloop() | 0.260201 | 0.060808 |
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class Netconf(_Entity_):
"""
Top\-level element in the notification namespace
.. attribute:: streams
The list of event streams supported by the system. When a query is issued, the returned set of streams is determined based on user privileges
**type**\: :py:class:`Streams <ydk.models.cisco_ios_xr.nc_notifications.Netconf.Streams>`
**config**\: False
"""
_prefix = 'manageEvent'
_revision = '2008-07-14'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Netconf, self).__init__()
self._top_entity = None
self.yang_name = "netconf"
self.yang_parent_name = "nc-notifications"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("streams", ("streams", Netconf.Streams))])
self._leafs = OrderedDict()
self.streams = Netconf.Streams()
self.streams.parent = self
self._children_name_map["streams"] = "streams"
self._segment_path = lambda: "nc-notifications:netconf"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Netconf, [], name, value)
class Streams(_Entity_):
"""
The list of event streams supported by the system. When
a query is issued, the returned set of streams is
determined based on user privileges.
.. attribute:: stream
Stream name, description and other information
**type**\: list of :py:class:`Stream <ydk.models.cisco_ios_xr.nc_notifications.Netconf.Streams.Stream>`
**config**\: False
"""
_prefix = 'manageEvent'
_revision = '2008-07-14'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Netconf.Streams, self).__init__()
self.yang_name = "streams"
self.yang_parent_name = "netconf"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("stream", ("stream", Netconf.Streams.Stream))])
self._leafs = OrderedDict()
self.stream = YList(self)
self._segment_path = lambda: "streams"
self._absolute_path = lambda: "nc-notifications:netconf/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Netconf.Streams, [], name, value)
class Stream(_Entity_):
"""
Stream name, description and other information.
.. attribute:: name (key)
The name of the event stream. If this is the default NETCONF stream, this must have the value 'NETCONF'
**type**\: str
**config**\: False
.. attribute:: description
A description of the event stream, including such information as the type of events that are sent over this stream
**type**\: str
**mandatory**\: True
**config**\: False
.. attribute:: replaysupport
A description of the event stream, including such information as the type of events that are sent over this stream
**type**\: bool
**mandatory**\: True
**config**\: False
.. attribute:: replaylogcreationtime
The timestamp of the creation of the log used to support the replay function on this stream. Note that this might be earlier then the earliest available notification in the log. This object is updated if the log resets for some reason. This object MUST be present if replay is supported
**type**\: str
**pattern:** \\d{4}\-\\d{2}\-\\d{2}T\\d{2}\:\\d{2}\:\\d{2}(\\.\\d+)?(Z\|[\\+\\\-]\\d{2}\:\\d{2})
**config**\: False
"""
_prefix = 'manageEvent'
_revision = '2008-07-14'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Netconf.Streams.Stream, self).__init__()
self.yang_name = "stream"
self.yang_parent_name = "streams"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('replaysupport', (YLeaf(YType.boolean, 'replaySupport'), ['bool'])),
('replaylogcreationtime', (YLeaf(YType.str, 'replayLogCreationTime'), ['str'])),
])
self.name = None
self.description = None
self.replaysupport = None
self.replaylogcreationtime = None
self._segment_path = lambda: "stream" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "nc-notifications:netconf/streams/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Netconf.Streams.Stream, ['name', 'description', 'replaysupport', 'replaylogcreationtime'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _nc_notifications as meta
return meta._meta_table['Netconf.Streams.Stream']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _nc_notifications as meta
return meta._meta_table['Netconf.Streams']['meta_info']
def clone_ptr(self):
self._top_entity = Netconf()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _nc_notifications as meta
return meta._meta_table['Netconf']['meta_info'] | cisco-ios-xr/ydk/models/cisco_ios_xr/nc_notifications.py | import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class Netconf(_Entity_):
"""
Top\-level element in the notification namespace
.. attribute:: streams
The list of event streams supported by the system. When a query is issued, the returned set of streams is determined based on user privileges
**type**\: :py:class:`Streams <ydk.models.cisco_ios_xr.nc_notifications.Netconf.Streams>`
**config**\: False
"""
_prefix = 'manageEvent'
_revision = '2008-07-14'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Netconf, self).__init__()
self._top_entity = None
self.yang_name = "netconf"
self.yang_parent_name = "nc-notifications"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("streams", ("streams", Netconf.Streams))])
self._leafs = OrderedDict()
self.streams = Netconf.Streams()
self.streams.parent = self
self._children_name_map["streams"] = "streams"
self._segment_path = lambda: "nc-notifications:netconf"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Netconf, [], name, value)
class Streams(_Entity_):
"""
The list of event streams supported by the system. When
a query is issued, the returned set of streams is
determined based on user privileges.
.. attribute:: stream
Stream name, description and other information
**type**\: list of :py:class:`Stream <ydk.models.cisco_ios_xr.nc_notifications.Netconf.Streams.Stream>`
**config**\: False
"""
_prefix = 'manageEvent'
_revision = '2008-07-14'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Netconf.Streams, self).__init__()
self.yang_name = "streams"
self.yang_parent_name = "netconf"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("stream", ("stream", Netconf.Streams.Stream))])
self._leafs = OrderedDict()
self.stream = YList(self)
self._segment_path = lambda: "streams"
self._absolute_path = lambda: "nc-notifications:netconf/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Netconf.Streams, [], name, value)
class Stream(_Entity_):
"""
Stream name, description and other information.
.. attribute:: name (key)
The name of the event stream. If this is the default NETCONF stream, this must have the value 'NETCONF'
**type**\: str
**config**\: False
.. attribute:: description
A description of the event stream, including such information as the type of events that are sent over this stream
**type**\: str
**mandatory**\: True
**config**\: False
.. attribute:: replaysupport
A description of the event stream, including such information as the type of events that are sent over this stream
**type**\: bool
**mandatory**\: True
**config**\: False
.. attribute:: replaylogcreationtime
The timestamp of the creation of the log used to support the replay function on this stream. Note that this might be earlier then the earliest available notification in the log. This object is updated if the log resets for some reason. This object MUST be present if replay is supported
**type**\: str
**pattern:** \\d{4}\-\\d{2}\-\\d{2}T\\d{2}\:\\d{2}\:\\d{2}(\\.\\d+)?(Z\|[\\+\\\-]\\d{2}\:\\d{2})
**config**\: False
"""
_prefix = 'manageEvent'
_revision = '2008-07-14'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Netconf.Streams.Stream, self).__init__()
self.yang_name = "stream"
self.yang_parent_name = "streams"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['name']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('name', (YLeaf(YType.str, 'name'), ['str'])),
('description', (YLeaf(YType.str, 'description'), ['str'])),
('replaysupport', (YLeaf(YType.boolean, 'replaySupport'), ['bool'])),
('replaylogcreationtime', (YLeaf(YType.str, 'replayLogCreationTime'), ['str'])),
])
self.name = None
self.description = None
self.replaysupport = None
self.replaylogcreationtime = None
self._segment_path = lambda: "stream" + "[name='" + str(self.name) + "']"
self._absolute_path = lambda: "nc-notifications:netconf/streams/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Netconf.Streams.Stream, ['name', 'description', 'replaysupport', 'replaylogcreationtime'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _nc_notifications as meta
return meta._meta_table['Netconf.Streams.Stream']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _nc_notifications as meta
return meta._meta_table['Netconf.Streams']['meta_info']
def clone_ptr(self):
self._top_entity = Netconf()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _nc_notifications as meta
return meta._meta_table['Netconf']['meta_info'] | 0.363986 | 0.10466 |
import torch
import numpy as np
from plato.config import Config
from plato.trainers import basic
def flatten_weights_from_model(model):
""" Return the weights of the given model as a 1-D tensor """
weights = torch.tensor([], requires_grad=False)
for param in model.parameters():
weights = torch.cat((weights, torch.flatten(param)))
return weights
class FedProxLocalObjective:
""" Representing the local objective of FedProx clients. """
def __init__(self, model):
self.model = model
self.init_global_weights = flatten_weights_from_model(model)
def compute_objective(self, outputs, labels):
""" Compute the objective the FedProx client wishes to minimize. """
cur_weights = flatten_weights_from_model(self.model)
mu = Config().clients.proximal_term_penalty_constant
prox_term = mu / 2 * torch.linalg.norm(
cur_weights - self.init_global_weights, ord=2)
local_function = torch.nn.CrossEntropyLoss()
h = local_function(outputs, labels) + prox_term
return h
class Trainer(basic.Trainer):
""" The federated learning trainer for the FedProx client. """
def train_process(self, config, trainset, sampler, cut_layer=None):
"""The main training loop in FedProx framework. """
# For FedProx, the server will accept partial solutions from straggling clients
# after waiting them for a certain amount of time. To re-create this scenario in
# an experiment, a proportion of the selected clients will train for a smaller
# number of epochs to simulate the stragglers that return with partial solutions,
# as mentioned in Section 5.2
if hasattr(Config().clients, 'straggler_simulation') and Config(
).clients.straggler_simulation:
np.random.seed(self.client_id)
# Determine whether this selected client is a straggler
strag_prop = Config().clients.straggler_percentage / 100
is_straggler = np.random.choice([True, False],
p=[strag_prop, 1 - strag_prop])
if is_straggler:
# Choose the epoch uniformly as mentioned in Section 5.2 of the paper
global_epochs = Config().trainer.epochs
config['epochs'] = np.random.choice(np.arange(
1, global_epochs))
super(Trainer, self).train_process(config, trainset, sampler,
cut_layer)
def loss_criterion(self, model):
""" Return the loss criterion for FedProx clients. """
local_obj = FedProxLocalObjective(model)
return local_obj.compute_objective | examples/fedprox/fedprox_trainer.py | import torch
import numpy as np
from plato.config import Config
from plato.trainers import basic
def flatten_weights_from_model(model):
""" Return the weights of the given model as a 1-D tensor """
weights = torch.tensor([], requires_grad=False)
for param in model.parameters():
weights = torch.cat((weights, torch.flatten(param)))
return weights
class FedProxLocalObjective:
""" Representing the local objective of FedProx clients. """
def __init__(self, model):
self.model = model
self.init_global_weights = flatten_weights_from_model(model)
def compute_objective(self, outputs, labels):
""" Compute the objective the FedProx client wishes to minimize. """
cur_weights = flatten_weights_from_model(self.model)
mu = Config().clients.proximal_term_penalty_constant
prox_term = mu / 2 * torch.linalg.norm(
cur_weights - self.init_global_weights, ord=2)
local_function = torch.nn.CrossEntropyLoss()
h = local_function(outputs, labels) + prox_term
return h
class Trainer(basic.Trainer):
""" The federated learning trainer for the FedProx client. """
def train_process(self, config, trainset, sampler, cut_layer=None):
"""The main training loop in FedProx framework. """
# For FedProx, the server will accept partial solutions from straggling clients
# after waiting them for a certain amount of time. To re-create this scenario in
# an experiment, a proportion of the selected clients will train for a smaller
# number of epochs to simulate the stragglers that return with partial solutions,
# as mentioned in Section 5.2
if hasattr(Config().clients, 'straggler_simulation') and Config(
).clients.straggler_simulation:
np.random.seed(self.client_id)
# Determine whether this selected client is a straggler
strag_prop = Config().clients.straggler_percentage / 100
is_straggler = np.random.choice([True, False],
p=[strag_prop, 1 - strag_prop])
if is_straggler:
# Choose the epoch uniformly as mentioned in Section 5.2 of the paper
global_epochs = Config().trainer.epochs
config['epochs'] = np.random.choice(np.arange(
1, global_epochs))
super(Trainer, self).train_process(config, trainset, sampler,
cut_layer)
def loss_criterion(self, model):
""" Return the loss criterion for FedProx clients. """
local_obj = FedProxLocalObjective(model)
return local_obj.compute_objective | 0.865551 | 0.612686 |
class Node:
def __init__(self, val):
self.val = val
self.next = None
def iterative_linked_list_values(head):
values = []
current = head
while current is not None:
values.append(current.val)
current = current.next
return values
def recursive_linked_list_values(head):
values = []
_recursive_linked_list_values(head, values)
return values
def _recursive_linked_list_values(head, values):
if head is None:
return
values.append(head.val)
_recursive_linked_list_values(head.next, values)
def iterative_get_node_value(head, index):
current = head
count = 0
while current is not None:
if count is not index:
current = current.next
count += 1
else:
return current.val
return None
def recursive_get_node_value(head, index):
if head is None:
return None
if index == 0:
return head.val
return recursive_get_node_value(head.next, index - 1)
def iterative_sum_linked_list(head):
total_sum = 0
current = head
while current is not None:
total_sum += current.val
current = current.next
return total_sum
def recursive_sum_linked_list(head):
if head is None:
return 0
return head.val + recursive_sum_linked_list(head.next)
def iterative_reverse_linked_list(head):
prev = None
current = head
while current is not None:
next = current.next
current.next = prev
prev = current
current = next
return prev
def recursive_reverse_linked_list(head, prev=None):
if head is None:
return prev
next = head.next
head.next = prev
return recursive_reverse_linked_list(next, head)
a = Node(2)
b = Node(5)
c = Node(-1)
d = Node(3)
e = Node(8)
a.next = b
b.next = c
c.next = d
d.next = e
print(iterative_linked_list_values(a))
print(recursive_linked_list_values(a))
print(iterative_sum_linked_list(a))
print(recursive_sum_linked_list(a))
print(iterative_get_node_value(a, 4))
print(recursive_get_node_value(a, 4))
iterative_reverse_linked_list(a)
print(iterative_linked_list_values(e))
recursive_reverse_linked_list(e)
print(recursive_linked_list_values(a)) | linked-list/linked-list.py | class Node:
def __init__(self, val):
self.val = val
self.next = None
def iterative_linked_list_values(head):
values = []
current = head
while current is not None:
values.append(current.val)
current = current.next
return values
def recursive_linked_list_values(head):
values = []
_recursive_linked_list_values(head, values)
return values
def _recursive_linked_list_values(head, values):
if head is None:
return
values.append(head.val)
_recursive_linked_list_values(head.next, values)
def iterative_get_node_value(head, index):
current = head
count = 0
while current is not None:
if count is not index:
current = current.next
count += 1
else:
return current.val
return None
def recursive_get_node_value(head, index):
if head is None:
return None
if index == 0:
return head.val
return recursive_get_node_value(head.next, index - 1)
def iterative_sum_linked_list(head):
total_sum = 0
current = head
while current is not None:
total_sum += current.val
current = current.next
return total_sum
def recursive_sum_linked_list(head):
if head is None:
return 0
return head.val + recursive_sum_linked_list(head.next)
def iterative_reverse_linked_list(head):
prev = None
current = head
while current is not None:
next = current.next
current.next = prev
prev = current
current = next
return prev
def recursive_reverse_linked_list(head, prev=None):
if head is None:
return prev
next = head.next
head.next = prev
return recursive_reverse_linked_list(next, head)
a = Node(2)
b = Node(5)
c = Node(-1)
d = Node(3)
e = Node(8)
a.next = b
b.next = c
c.next = d
d.next = e
print(iterative_linked_list_values(a))
print(recursive_linked_list_values(a))
print(iterative_sum_linked_list(a))
print(recursive_sum_linked_list(a))
print(iterative_get_node_value(a, 4))
print(recursive_get_node_value(a, 4))
iterative_reverse_linked_list(a)
print(iterative_linked_list_values(e))
recursive_reverse_linked_list(e)
print(recursive_linked_list_values(a)) | 0.593374 | 0.348285 |
import numpy as np
from collections import deque
import random
from abc import ABC, abstractmethod
# Ornstein-Ulhenbeck Process
# Taken from #https://github.com/vitchyr/rlkit/blob/master/rlkit/exploration_strategies/ou_strategy.py
class OUNoise(object):
def __init__(self, action_dim, action_low, action_high, mu=0.0, theta=0.15, max_sigma=0.5, min_sigma=0.5,
decay_period=100000):
self.mu = mu
self.theta = theta
self.sigma = max_sigma
self.max_sigma = max_sigma
self.min_sigma = min_sigma
self.decay_period = decay_period
self.action_dim = action_dim
self.low = action_low
self.high = action_high
self.reset()
def reset(self):
self.state = np.ones(self.action_dim) * self.mu
def evolve_state(self):
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * np.random.randn(self.action_dim)
self.state = x + dx
return self.state
def get_action(self, action, t=0):
ou_state = self.evolve_state()
self.sigma = self.max_sigma - (self.max_sigma - self.min_sigma) * min(1.0, t / self.decay_period)
return np.clip(action + ou_state, self.low, self.high)
class Memory(ABC):
@abstractmethod
def push(self, state, action, reward, next_state, done, *args, **kwargs):
pass
@abstractmethod
def sample(self, batch_size):
pass
class MemorySeq(Memory):
def __init__(self, max_size):
self.max_size = max_size
self.buffer = deque(maxlen=max_size)
def push(self, state, action, reward, next_state, done, *args, **kwargs):
experience = (state, action, np.array([reward]), next_state, done)
self.buffer.append(experience)
def sample(self, batch_size):
state_batch = []
action_batch = []
reward_batch = []
next_state_batch = []
done_batch = []
# this line is the leading term for time complexity!!
batch = random.sample(self.buffer, min(len(self.buffer), batch_size))
# batch = random.sample(self.buffer, batch_size)
for experience in batch:
state, action, reward, next_state, done = experience
state_batch.append(state)
action_batch.append(action)
reward_batch.append(reward)
next_state_batch.append(next_state)
done_batch.append(done)
return state_batch, action_batch, reward_batch, next_state_batch, done_batch
def __len__(self):
return len(self.buffer)
class MemoryRank(Memory):
def __init__(self, max_size, sort_period=1):
self.max_size = max_size
self.buffer = []
self.sort_period = sort_period
self.sort_ctr = sort_period
self.weights = [1 / i for i in range(1, self.max_size + 1)]
def push(self, state, action, reward, next_state, done, *args, **kwargs):
priority = kwargs.get('priority', 0)
experience = (state, action, np.array([reward]), next_state, done)
self.buffer.append((priority, experience))
def sort(self):
self.buffer.sort(key=lambda x: x[0], reverse=True)
self.buffer = self.buffer[:self.max_size]
def sample(self, batch_size):
state_batch = []
action_batch = []
reward_batch = []
next_state_batch = []
done_batch = []
# increment counter and sort periodically
if self.sort_ctr == self.sort_period:
self.sort()
self.sort_ctr = 0
self.sort_ctr += 1
# sampling is weighed using TD loss and rank-based representation
batch = random.choices(population=self.buffer, weights=self.weights[:len(self.buffer)], k=batch_size)
for _, experience in batch:
state, action, reward, next_state, done = experience
state_batch.append(state)
action_batch.append(action)
reward_batch.append(reward)
next_state_batch.append(next_state)
done_batch.append(done)
return state_batch, action_batch, reward_batch, next_state_batch, done_batch
def __len__(self):
return len(self.buffer) | simulation/dm_control_cur/ddpg/ddpg_classes/utils.py | import numpy as np
from collections import deque
import random
from abc import ABC, abstractmethod
# Ornstein-Ulhenbeck Process
# Taken from #https://github.com/vitchyr/rlkit/blob/master/rlkit/exploration_strategies/ou_strategy.py
class OUNoise(object):
def __init__(self, action_dim, action_low, action_high, mu=0.0, theta=0.15, max_sigma=0.5, min_sigma=0.5,
decay_period=100000):
self.mu = mu
self.theta = theta
self.sigma = max_sigma
self.max_sigma = max_sigma
self.min_sigma = min_sigma
self.decay_period = decay_period
self.action_dim = action_dim
self.low = action_low
self.high = action_high
self.reset()
def reset(self):
self.state = np.ones(self.action_dim) * self.mu
def evolve_state(self):
x = self.state
dx = self.theta * (self.mu - x) + self.sigma * np.random.randn(self.action_dim)
self.state = x + dx
return self.state
def get_action(self, action, t=0):
ou_state = self.evolve_state()
self.sigma = self.max_sigma - (self.max_sigma - self.min_sigma) * min(1.0, t / self.decay_period)
return np.clip(action + ou_state, self.low, self.high)
class Memory(ABC):
@abstractmethod
def push(self, state, action, reward, next_state, done, *args, **kwargs):
pass
@abstractmethod
def sample(self, batch_size):
pass
class MemorySeq(Memory):
def __init__(self, max_size):
self.max_size = max_size
self.buffer = deque(maxlen=max_size)
def push(self, state, action, reward, next_state, done, *args, **kwargs):
experience = (state, action, np.array([reward]), next_state, done)
self.buffer.append(experience)
def sample(self, batch_size):
state_batch = []
action_batch = []
reward_batch = []
next_state_batch = []
done_batch = []
# this line is the leading term for time complexity!!
batch = random.sample(self.buffer, min(len(self.buffer), batch_size))
# batch = random.sample(self.buffer, batch_size)
for experience in batch:
state, action, reward, next_state, done = experience
state_batch.append(state)
action_batch.append(action)
reward_batch.append(reward)
next_state_batch.append(next_state)
done_batch.append(done)
return state_batch, action_batch, reward_batch, next_state_batch, done_batch
def __len__(self):
return len(self.buffer)
class MemoryRank(Memory):
def __init__(self, max_size, sort_period=1):
self.max_size = max_size
self.buffer = []
self.sort_period = sort_period
self.sort_ctr = sort_period
self.weights = [1 / i for i in range(1, self.max_size + 1)]
def push(self, state, action, reward, next_state, done, *args, **kwargs):
priority = kwargs.get('priority', 0)
experience = (state, action, np.array([reward]), next_state, done)
self.buffer.append((priority, experience))
def sort(self):
self.buffer.sort(key=lambda x: x[0], reverse=True)
self.buffer = self.buffer[:self.max_size]
def sample(self, batch_size):
state_batch = []
action_batch = []
reward_batch = []
next_state_batch = []
done_batch = []
# increment counter and sort periodically
if self.sort_ctr == self.sort_period:
self.sort()
self.sort_ctr = 0
self.sort_ctr += 1
# sampling is weighed using TD loss and rank-based representation
batch = random.choices(population=self.buffer, weights=self.weights[:len(self.buffer)], k=batch_size)
for _, experience in batch:
state, action, reward, next_state, done = experience
state_batch.append(state)
action_batch.append(action)
reward_batch.append(reward)
next_state_batch.append(next_state)
done_batch.append(done)
return state_batch, action_batch, reward_batch, next_state_batch, done_batch
def __len__(self):
return len(self.buffer) | 0.818592 | 0.354238 |
from __future__ import print_function
from six import string_types
import json
import os
from sys import exit
class Router(object):
"""
"""
_dictURLPathToClassName = None
"""
* @param array arrURLPathToClassName. Associative array. URLs as keys.
* Class names should be fully qualified with namespace.
* URLs must be absolute, yet must not start or end with a slash.
* @param string strRequestURI. Should be read from os.environ["REQUEST_URI"].
* @return None.
"""
def __init__(self, dictURLPathToClassName, strRequestURI):
"""Not validating class names to not invoke the autoloader, if any. """
self._dictURLPathToClassName = dictURLPathToClassName
self._route(strRequestURI)
"""
* @param string strRequestURI. Should be os.environ["REQUEST_URI"].
* @return null.
"""
def _route(self, strRequestURI):
if not isinstance(strRequestURI, string_types):
raise Exception("strRequestURI must be a string.")
"""WARNING: Remove slash error-prone. May not behave as expected"""
strRequestURI.strip("/")
"""WARNING: Check 2"""
arrURLParts = strRequestURI.split("/", 2)
"""clean GET params from source URL aka get what's before the ?"""
strRequestURI = strRequestURI.split("?", 2)[0]
"""clean trailing slash"""
strRequestURI.rstrip("/")
if strRequestURI in self._dictURLPathToClassName:
endpoint = self._dictURLPathToClassName[strRequestURI]()
elif strRequestURI + "/" in self._dictURLPathToClassName:
endpoint = self._dictURLPathToClassName[strRequestURI + "/"]()
else:
if "REQUEST_METHOD" in os.environ and os.environ.get("REQUEST_METHOD") == "GET":
"""
header("Content-type: text/html")
"""
print("Page not found. Unknown JSON-RPC endpoint URL: " + strRequestURI)
else:
"""
header("HTTP/1.1 404 Not Found", true, 404)
//header("Content-Type: text/plain charset=utf-8")
header("Content-type: application/json")
header("Cache-Control: no-cache, must-revalidate")
header("Expires: Mon, 26 Jul 1991 05:00:00 GMT")
header("Accept-Ranges: none")
header("Connection: close")
"""
print(json.loads(
'"jsonrpc": "2.0", "error": {"code": -32099, "message":' + os.environ.get("HTTP_HOST") + \
'...Unknown JSON-RPC endpoint URL: ' + strRequestURI + '), "id": None'))
exit(1) | jsonrpc2_base/endpoint/router.py | from __future__ import print_function
from six import string_types
import json
import os
from sys import exit
class Router(object):
"""
"""
_dictURLPathToClassName = None
"""
* @param array arrURLPathToClassName. Associative array. URLs as keys.
* Class names should be fully qualified with namespace.
* URLs must be absolute, yet must not start or end with a slash.
* @param string strRequestURI. Should be read from os.environ["REQUEST_URI"].
* @return None.
"""
def __init__(self, dictURLPathToClassName, strRequestURI):
"""Not validating class names to not invoke the autoloader, if any. """
self._dictURLPathToClassName = dictURLPathToClassName
self._route(strRequestURI)
"""
* @param string strRequestURI. Should be os.environ["REQUEST_URI"].
* @return null.
"""
def _route(self, strRequestURI):
if not isinstance(strRequestURI, string_types):
raise Exception("strRequestURI must be a string.")
"""WARNING: Remove slash error-prone. May not behave as expected"""
strRequestURI.strip("/")
"""WARNING: Check 2"""
arrURLParts = strRequestURI.split("/", 2)
"""clean GET params from source URL aka get what's before the ?"""
strRequestURI = strRequestURI.split("?", 2)[0]
"""clean trailing slash"""
strRequestURI.rstrip("/")
if strRequestURI in self._dictURLPathToClassName:
endpoint = self._dictURLPathToClassName[strRequestURI]()
elif strRequestURI + "/" in self._dictURLPathToClassName:
endpoint = self._dictURLPathToClassName[strRequestURI + "/"]()
else:
if "REQUEST_METHOD" in os.environ and os.environ.get("REQUEST_METHOD") == "GET":
"""
header("Content-type: text/html")
"""
print("Page not found. Unknown JSON-RPC endpoint URL: " + strRequestURI)
else:
"""
header("HTTP/1.1 404 Not Found", true, 404)
//header("Content-Type: text/plain charset=utf-8")
header("Content-type: application/json")
header("Cache-Control: no-cache, must-revalidate")
header("Expires: Mon, 26 Jul 1991 05:00:00 GMT")
header("Accept-Ranges: none")
header("Connection: close")
"""
print(json.loads(
'"jsonrpc": "2.0", "error": {"code": -32099, "message":' + os.environ.get("HTTP_HOST") + \
'...Unknown JSON-RPC endpoint URL: ' + strRequestURI + '), "id": None'))
exit(1) | 0.445771 | 0.098469 |
__author__ = "<NAME> <<EMAIL>>"
__version__ = "0.2"
__date__ = "26/11/19"
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import datetime
import configparser
import os
import logging
import argparse
import glob
from time import gmtime, strftime
from astropy.table import Table
from astropy.io import fits
from astropy.time import Time
from des_stacks import des_stack as stack
from des_stacks.utils.loop_stack import iterate_source_loop, init_source_loop
sns.set_color_codes(palette='colorblind')
# define some DES specific lists
all_years = ['none','1','2','3','4'] # add 5 when available
all_fields = ['SN-X1','SN-X2','SN-X3','SN-C1','SN-C2','SN-C3','SN-E1','SN-E2','SN-S1','SN-S2']
all_chips = np.arange(1,62)
all_bands = ['g','r','i','z']
class optimiser():
def __init__(self):
parsed = self._parser()
def _parser(self):
parser = argparse.ArgumentParser(description='Stack some DES SN images')
parser.add_argument('-f','--field', help = 'Field(s) to stack. Separate with space or comma (e.g. X2 X3)',nargs='?',required=False,default='X2')
parser.add_argument('-b', '--band', help = 'Bands(s) to stack. Separate with space or comma (e.g. g r)',nargs='?',required=False,default='r')
parser.add_argument('-my','--minusyears', help = 'Which minus years to stack (e.g. 1,2,3,4,none)',nargs='?',required=False,default='1')
parser.add_argument('-ch','--chips', help = 'Which chips to stack (e.g. [1,5] = 1,3,4)',nargs=1,required=False,default='All')
parser.add_argument('-wd','--workdir', help = 'Working directory [coadding]', default = 'coadding')
parser.add_argument('-l','--looptype', help ='Parameters to optimize (can be "psf", "depth", or a comma separated list of those")',required = False, default = 'depth')
parser.add_argument('-pr','--psfrange',help = 'Range to optimize psf in (min,max): [1.5,3]',required=False,default= '1.5,3.0')
parser.add_argument('-tr','--teffrange',help = 'Range to optimize teff in (min,max): [0,0.5]',required=False,default= '0.0,0.5')
parser.add_argument('-st','--step',help = 'Size of step in the cut you want to optimize over (psf,teff): [0.25,0.01]',required = False, default = '0.25,0.01')
parser.add_argument('-pl','--plot',help='Plot a heatmap of where the best cuts are?',required = False,action = 'store_true')
parser.add_argument('-t','--tidy',help = 'Tidy up temporary files after?',action = 'store_true')
args=parser.parse_args()
parsed = {}
try:
fields = args.field.split(',')
except:
try:
fields = args.field[0].split(' ')
except:
fields =args.field
for i in range(len(fields)):
try:
field = fields[i]
field = 'SN-'+field
fields[i]=field
except:
fields = 'SN-'+fields[0]
parsed['fields']=fields
try:
bands = args.band.split(',')
except:
try:
bands = args.band[0].split(' ')
except:
bands = args.band
parsed['bands']=bands
try:
mys = args.minusyears.split(',')
except:
try:
mys = args.minusyears[0].split(' ')
except:
mys = args.minusyears
parsed['mys']=mys
if args.chips != 'All':
try:
chips = args.chips[0].split(',')
except:
if args.chips[0][0]== '[':
chip_bounds = args.chips[0][1:-1].split(',')
chips = np.arange(int(chip_bounds[0]), int(chip_bounds[-1]))
else:
chips = args.chips[0].split(' ')
else:
chips = args.chips
parsed['chips']=chips
print ('Parsed chips as %s'%chips)
if not args.workdir:
workdir = 'current'
else:
workdir = args.workdir
parsed['workdir']=workdir
try:
loop_types = args.looptype.split(',')
parsed['looptype']=loop_types
except:
parsed['looptype']='depth'
try:
parsed['teffrange'] = args.teffrange.split(',')
except:
parsed['teffrange'] = [0.0,0.5]
try:
parsed['psfrange'] = args.psfrange.split(',')
except:
parsed['psfrange'] = [1.5,3.0]
try:
parsed['step'] = args.step.split(',')
except:
parsed['step'] = [0.25,0.01]
parsed['tidy']=args.tidy
self.parsed = parsed
self.plot = args.plot
def optimise(self,f,b,y,ch):
# a function that iterates through stacks until the best one is reached
t0,t1,ts = float(self.parsed['teffrange'][0]),float(self.parsed['teffrange'][1]),float(self.parsed['step'][1])
p0,p1,ps = float(self.parsed['psfrange'][0]),float(self.parsed['psfrange'][1]),float(self.parsed['step'][0])
wd,lt = self.parsed['workdir'],self.parsed['looptype'][0]
print(t0,t1,ts)
print(p0,p1,ps)
print (lt)
teff_range = np.arange(t0,t1,ts)
psf_range = np.arange(p0,p1,ps)
lim_df = pd.DataFrame(index = [str(r) for r in psf_range],columns=[str(r) for r in teff_range])
psf_df = pd.DataFrame(index = [str(r) for r in psf_range],columns=[str(r) for r in teff_range])#create the DataFrame to put the quality measurements in
lim_df.name = 'depth'
psf_df.name = 'psf'
for psf_cut in psf_range:
for teff_cut in teff_range:
lim,psf = self.do_stack(f,b,y,ch,wd,cuts = {'zp':None,'teff':teff_cut,'psf':psf_cut})
lim_df.loc[str(psf_cut),str(teff_cut)] = lim
psf_df.loc[str(psf_cut),str(teff_cut)] = psf
best={'depth':None,'psf':None}
'''smaller_teff_step = ts/5
smaller_psf_step = ps/5
if lt=='depth':
teff_start = best['depth'][1]
psf_start = best['depth'][0]
elif lt=='psf':
teff_start = best['psf'][1]
psf_start = best['psf'][0]
elif lt=='both':
teff_start = np.mean(best['depth'][1],best['psf'][1])
psf_start = np.mean(best['depth'][0],best['psf'][0])
zoomed_teffrange = np.arange(teff_start-float(ts)*5,teff_start+float(ts)*5,smaller_teff_step)
zoomed_psfrange = np.arange(psf_start-float(ps)*5,psf_start+float(ps)*5,smaller_psf_step)
for newpsf in zoomed_psfrange:
lim_df = lim_df.append(pd.DataFrame(index=[str(newpsf)],columns=lim_df.columns))
psf_df = psf_df.append(pd.DataFrame(index=[str(newpsf)],columns=psf_df.columns))
for newteff in zoomed_teffrange:
lim_df[str(newteff)] = ''
psf_df[str(newteff)] = ''
lim,psf = do_stack(f,b,y,ch,wd,cuts = {'zp':None,'teff':newteff,'psf':newpsf})
lim_df.loc[str(newpsf),str(newteff)] = lim
psf_df.loc[str(newpsf),str(newteff)] = psf'''
for df in [lim_df,psf_df]:
best[df.name] = [np.float(np.argmax(df.max(axis=1))),np.float(np.argmax(df.max(axis=0)))]
# ADD TO PLOT!
if self.plot:
f1,ax1 = plt.subplots()
depthmin = np.min(lim_df.min().values)
depthmax = np.max(lim_df.max().values)
depthrang = depthmax-depthmin
lim_df = lim_df.astype(float)
psf_df = psf_df.astype(float)
sns.heatmap(lim_df,ax=ax1,cmap='Oranges',cbar_kws={'label': 'Limiting Magnitude'})
ax1.set_xlabel('$\\tau_{effective} cut$')
ax1.set_ylabel('PSF cut')
plt.savefig('/media/data3/wiseman/des/coadding/optimise/optimize_teff_%s_%s_%s_%s.pdf'%(f,b,y,ch[0]))
plt.close()
f2,ax2 = plt.subplots()
sns.heatmap(psf_df,ax=ax2,cmap='Blues',cbar_kws={'label': 'Limiting Magnitude'})
ax2.set_xlabel('$\\tau_{effective} cut$')
ax2.set_ylabel('PSF cut')
plt.savefig('/media/data3/wiseman/des/coadding/optimise/optimize_psf_%s_%s_%s_%s.pdf'%(f,b,y,ch[0]))
return best
def do_stack(self,f,b,y,ch,wd,cuts):
#Performs the actual stack for a given set of cuts, and returns the limiting magnitudes and psf
print ('Making stack of',f,b,y,ch,wd,cuts)
s = stack.Stack(f,b,y,ch,wd,cuts,db=True)
scifile = os.path.join(s.band_dir,'ccd_%s_%s_%.2f_%s_clipweighted_sci.fits'%(ch[0],b,cuts['teff'],cuts['psf']))
if not os.path.isfile(scifile):
print ('Did not find a file for these cuts; doing stack')
s.do_my_stack(cuts=cuts,final=True)
else:
print ('Found a stacked file for these cuts; going to source')
s.ana_dir = os.path.join(s.band_dir,ch[0],'ana')
sourcename = os.path.join(s.ana_dir,'MY%s_%s_%s_%s_%.2f_%s_clipweighted_sci.sourcecat' %(y,f,b,ch[0],cuts['teff'],cuts['psf']))
print ('Looking for file under the name: %s'%sourcename)
if os.path.isfile(sourcename):
print ('Found a sourcecat for these cuts at: %s'%sourcename)
s.sourcecats = [sourcename]
s.cuts=cuts
else:
print ('No sourcecat yet; running source extractor')
print ('Sending %s to run_stack_source'%cuts)
s.run_stack_source(cuts=cuts,final=True)
s.cutstring = '%s_%s'%(cuts['teff'],cuts['psf'])
#lim = np.median(s.init_phot()[ch[0]][-1])
skylim = s.init_phot()[ch[0]][2]
psf = np.loadtxt(os.path.join(s.band_dir,ch[0],'ana','%s_ana.qual'%s.cutstring))[2]
psf_err = np.loadtxt(os.path.join(s.band_dir,ch[0],'ana','%s_ana.qual'%s.cutstring))[3]
np.savetxt(os.path.join(s.ana_dir,'%s_limmags.txt'%s.cutstring),np.array([skylim,psf,psf_err]))
return (skylim,psf)
def main():
o = optimiser()
parsed = o.parsed
chips = [[str(chip)] for chip in parsed['chips'][0].split(',')]
best_teff_df = pd.read_csv('/media/data3/wiseman/des/coadding/optimise/best_teff.csv',header=0)
best_psf_df = pd.read_csv('/media/data3/wiseman/des/coadding/optimise/best_psf.csv',header=0)
for y in parsed['mys']:
for f in parsed['fields']:
for b in parsed['bands']:
for ch in chips:
print ('Sending chip %s to optimize'%ch)
best = o.optimise(f,b,y,ch)
best_teff_df = best_teff_df.append(pd.DataFrame([[f,b,ch,best['depth'][0],best['depth'][1]]],columns=best_teff_df.columns))
best_psf_df = best_psf_df.append(pd.DataFrame([[f,b,ch,best['psf'][0],best['psf'][1]]],columns=best_psf_df.columns))
best_teff_df.to_csv('/media/data3/wiseman/des/coadding/optimise/best_teff.csv',index=False)
best_psf_df.to_csv('/media/data3/wiseman/des/coadding/optimise/best_psf.csv',index=False)
if __name__=="__main__":
main() | bin/optimise_stack.py |
__author__ = "<NAME> <<EMAIL>>"
__version__ = "0.2"
__date__ = "26/11/19"
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import datetime
import configparser
import os
import logging
import argparse
import glob
from time import gmtime, strftime
from astropy.table import Table
from astropy.io import fits
from astropy.time import Time
from des_stacks import des_stack as stack
from des_stacks.utils.loop_stack import iterate_source_loop, init_source_loop
sns.set_color_codes(palette='colorblind')
# define some DES specific lists
all_years = ['none','1','2','3','4'] # add 5 when available
all_fields = ['SN-X1','SN-X2','SN-X3','SN-C1','SN-C2','SN-C3','SN-E1','SN-E2','SN-S1','SN-S2']
all_chips = np.arange(1,62)
all_bands = ['g','r','i','z']
class optimiser():
def __init__(self):
parsed = self._parser()
def _parser(self):
parser = argparse.ArgumentParser(description='Stack some DES SN images')
parser.add_argument('-f','--field', help = 'Field(s) to stack. Separate with space or comma (e.g. X2 X3)',nargs='?',required=False,default='X2')
parser.add_argument('-b', '--band', help = 'Bands(s) to stack. Separate with space or comma (e.g. g r)',nargs='?',required=False,default='r')
parser.add_argument('-my','--minusyears', help = 'Which minus years to stack (e.g. 1,2,3,4,none)',nargs='?',required=False,default='1')
parser.add_argument('-ch','--chips', help = 'Which chips to stack (e.g. [1,5] = 1,3,4)',nargs=1,required=False,default='All')
parser.add_argument('-wd','--workdir', help = 'Working directory [coadding]', default = 'coadding')
parser.add_argument('-l','--looptype', help ='Parameters to optimize (can be "psf", "depth", or a comma separated list of those")',required = False, default = 'depth')
parser.add_argument('-pr','--psfrange',help = 'Range to optimize psf in (min,max): [1.5,3]',required=False,default= '1.5,3.0')
parser.add_argument('-tr','--teffrange',help = 'Range to optimize teff in (min,max): [0,0.5]',required=False,default= '0.0,0.5')
parser.add_argument('-st','--step',help = 'Size of step in the cut you want to optimize over (psf,teff): [0.25,0.01]',required = False, default = '0.25,0.01')
parser.add_argument('-pl','--plot',help='Plot a heatmap of where the best cuts are?',required = False,action = 'store_true')
parser.add_argument('-t','--tidy',help = 'Tidy up temporary files after?',action = 'store_true')
args=parser.parse_args()
parsed = {}
try:
fields = args.field.split(',')
except:
try:
fields = args.field[0].split(' ')
except:
fields =args.field
for i in range(len(fields)):
try:
field = fields[i]
field = 'SN-'+field
fields[i]=field
except:
fields = 'SN-'+fields[0]
parsed['fields']=fields
try:
bands = args.band.split(',')
except:
try:
bands = args.band[0].split(' ')
except:
bands = args.band
parsed['bands']=bands
try:
mys = args.minusyears.split(',')
except:
try:
mys = args.minusyears[0].split(' ')
except:
mys = args.minusyears
parsed['mys']=mys
if args.chips != 'All':
try:
chips = args.chips[0].split(',')
except:
if args.chips[0][0]== '[':
chip_bounds = args.chips[0][1:-1].split(',')
chips = np.arange(int(chip_bounds[0]), int(chip_bounds[-1]))
else:
chips = args.chips[0].split(' ')
else:
chips = args.chips
parsed['chips']=chips
print ('Parsed chips as %s'%chips)
if not args.workdir:
workdir = 'current'
else:
workdir = args.workdir
parsed['workdir']=workdir
try:
loop_types = args.looptype.split(',')
parsed['looptype']=loop_types
except:
parsed['looptype']='depth'
try:
parsed['teffrange'] = args.teffrange.split(',')
except:
parsed['teffrange'] = [0.0,0.5]
try:
parsed['psfrange'] = args.psfrange.split(',')
except:
parsed['psfrange'] = [1.5,3.0]
try:
parsed['step'] = args.step.split(',')
except:
parsed['step'] = [0.25,0.01]
parsed['tidy']=args.tidy
self.parsed = parsed
self.plot = args.plot
def optimise(self,f,b,y,ch):
# a function that iterates through stacks until the best one is reached
t0,t1,ts = float(self.parsed['teffrange'][0]),float(self.parsed['teffrange'][1]),float(self.parsed['step'][1])
p0,p1,ps = float(self.parsed['psfrange'][0]),float(self.parsed['psfrange'][1]),float(self.parsed['step'][0])
wd,lt = self.parsed['workdir'],self.parsed['looptype'][0]
print(t0,t1,ts)
print(p0,p1,ps)
print (lt)
teff_range = np.arange(t0,t1,ts)
psf_range = np.arange(p0,p1,ps)
lim_df = pd.DataFrame(index = [str(r) for r in psf_range],columns=[str(r) for r in teff_range])
psf_df = pd.DataFrame(index = [str(r) for r in psf_range],columns=[str(r) for r in teff_range])#create the DataFrame to put the quality measurements in
lim_df.name = 'depth'
psf_df.name = 'psf'
for psf_cut in psf_range:
for teff_cut in teff_range:
lim,psf = self.do_stack(f,b,y,ch,wd,cuts = {'zp':None,'teff':teff_cut,'psf':psf_cut})
lim_df.loc[str(psf_cut),str(teff_cut)] = lim
psf_df.loc[str(psf_cut),str(teff_cut)] = psf
best={'depth':None,'psf':None}
'''smaller_teff_step = ts/5
smaller_psf_step = ps/5
if lt=='depth':
teff_start = best['depth'][1]
psf_start = best['depth'][0]
elif lt=='psf':
teff_start = best['psf'][1]
psf_start = best['psf'][0]
elif lt=='both':
teff_start = np.mean(best['depth'][1],best['psf'][1])
psf_start = np.mean(best['depth'][0],best['psf'][0])
zoomed_teffrange = np.arange(teff_start-float(ts)*5,teff_start+float(ts)*5,smaller_teff_step)
zoomed_psfrange = np.arange(psf_start-float(ps)*5,psf_start+float(ps)*5,smaller_psf_step)
for newpsf in zoomed_psfrange:
lim_df = lim_df.append(pd.DataFrame(index=[str(newpsf)],columns=lim_df.columns))
psf_df = psf_df.append(pd.DataFrame(index=[str(newpsf)],columns=psf_df.columns))
for newteff in zoomed_teffrange:
lim_df[str(newteff)] = ''
psf_df[str(newteff)] = ''
lim,psf = do_stack(f,b,y,ch,wd,cuts = {'zp':None,'teff':newteff,'psf':newpsf})
lim_df.loc[str(newpsf),str(newteff)] = lim
psf_df.loc[str(newpsf),str(newteff)] = psf'''
for df in [lim_df,psf_df]:
best[df.name] = [np.float(np.argmax(df.max(axis=1))),np.float(np.argmax(df.max(axis=0)))]
# ADD TO PLOT!
if self.plot:
f1,ax1 = plt.subplots()
depthmin = np.min(lim_df.min().values)
depthmax = np.max(lim_df.max().values)
depthrang = depthmax-depthmin
lim_df = lim_df.astype(float)
psf_df = psf_df.astype(float)
sns.heatmap(lim_df,ax=ax1,cmap='Oranges',cbar_kws={'label': 'Limiting Magnitude'})
ax1.set_xlabel('$\\tau_{effective} cut$')
ax1.set_ylabel('PSF cut')
plt.savefig('/media/data3/wiseman/des/coadding/optimise/optimize_teff_%s_%s_%s_%s.pdf'%(f,b,y,ch[0]))
plt.close()
f2,ax2 = plt.subplots()
sns.heatmap(psf_df,ax=ax2,cmap='Blues',cbar_kws={'label': 'Limiting Magnitude'})
ax2.set_xlabel('$\\tau_{effective} cut$')
ax2.set_ylabel('PSF cut')
plt.savefig('/media/data3/wiseman/des/coadding/optimise/optimize_psf_%s_%s_%s_%s.pdf'%(f,b,y,ch[0]))
return best
def do_stack(self,f,b,y,ch,wd,cuts):
#Performs the actual stack for a given set of cuts, and returns the limiting magnitudes and psf
print ('Making stack of',f,b,y,ch,wd,cuts)
s = stack.Stack(f,b,y,ch,wd,cuts,db=True)
scifile = os.path.join(s.band_dir,'ccd_%s_%s_%.2f_%s_clipweighted_sci.fits'%(ch[0],b,cuts['teff'],cuts['psf']))
if not os.path.isfile(scifile):
print ('Did not find a file for these cuts; doing stack')
s.do_my_stack(cuts=cuts,final=True)
else:
print ('Found a stacked file for these cuts; going to source')
s.ana_dir = os.path.join(s.band_dir,ch[0],'ana')
sourcename = os.path.join(s.ana_dir,'MY%s_%s_%s_%s_%.2f_%s_clipweighted_sci.sourcecat' %(y,f,b,ch[0],cuts['teff'],cuts['psf']))
print ('Looking for file under the name: %s'%sourcename)
if os.path.isfile(sourcename):
print ('Found a sourcecat for these cuts at: %s'%sourcename)
s.sourcecats = [sourcename]
s.cuts=cuts
else:
print ('No sourcecat yet; running source extractor')
print ('Sending %s to run_stack_source'%cuts)
s.run_stack_source(cuts=cuts,final=True)
s.cutstring = '%s_%s'%(cuts['teff'],cuts['psf'])
#lim = np.median(s.init_phot()[ch[0]][-1])
skylim = s.init_phot()[ch[0]][2]
psf = np.loadtxt(os.path.join(s.band_dir,ch[0],'ana','%s_ana.qual'%s.cutstring))[2]
psf_err = np.loadtxt(os.path.join(s.band_dir,ch[0],'ana','%s_ana.qual'%s.cutstring))[3]
np.savetxt(os.path.join(s.ana_dir,'%s_limmags.txt'%s.cutstring),np.array([skylim,psf,psf_err]))
return (skylim,psf)
def main():
o = optimiser()
parsed = o.parsed
chips = [[str(chip)] for chip in parsed['chips'][0].split(',')]
best_teff_df = pd.read_csv('/media/data3/wiseman/des/coadding/optimise/best_teff.csv',header=0)
best_psf_df = pd.read_csv('/media/data3/wiseman/des/coadding/optimise/best_psf.csv',header=0)
for y in parsed['mys']:
for f in parsed['fields']:
for b in parsed['bands']:
for ch in chips:
print ('Sending chip %s to optimize'%ch)
best = o.optimise(f,b,y,ch)
best_teff_df = best_teff_df.append(pd.DataFrame([[f,b,ch,best['depth'][0],best['depth'][1]]],columns=best_teff_df.columns))
best_psf_df = best_psf_df.append(pd.DataFrame([[f,b,ch,best['psf'][0],best['psf'][1]]],columns=best_psf_df.columns))
best_teff_df.to_csv('/media/data3/wiseman/des/coadding/optimise/best_teff.csv',index=False)
best_psf_df.to_csv('/media/data3/wiseman/des/coadding/optimise/best_psf.csv',index=False)
if __name__=="__main__":
main() | 0.358353 | 0.140513 |
from typing import Dict, Optional, Union
from authentication import auth_model
from fire_watch.errorfactory import DuplicationError, InvalidUid
from fire_watch.utils import pagination_utils
import fire_watch
from .base_model import BaseModel
class ApiModel(BaseModel):
def register_user(self, *args, **kwargs):
auth_model.register_user(*args, **kwargs)
def check_existing(self, doc: Dict[str, Union[str, int]]):
"""Check existing users against the entered `email` only.
Args:
doc (Dict[str, Union[str, int]]): user data
Raises:
DuplicationError: If user exists
"""
user = self.db.users.find_one({"email": doc["email"]})
if user:
raise DuplicationError({"error": "User exists!"})
def credentials(self, *args, **kwargs):
return auth_model.credentials(*args, **kwargs)
def insert_data(self, unit_id: str, data: Dict[str, Union[str, int]]):
"""Insert collected data into respective unit documents.
Insert into document if current insertions are less than
200 else create new unit document with same unit id and
insert.
Can be used as a standalone function to insert data or
through available `upload` api route
Args:
unit_id (str): unique unit identifier
data (Dict[str, Union[str, int]]): data collected
Raises:
InvalidUid: raised if no unit found
"""
units = list(self.db.units.find({"unit_id": unit_id}))
try:
unit = units.pop()
except IndexError:
raise InvalidUid(detail={"error": f"No unit with the id {unit_id} found"})
if len(unit["data"]) < self.max_entry:
self.db.units.update_one(
{"_id": unit["_id"]}, update={"$push": {"data": data["data"]}}
)
else:
doc = {"unit_id": unit_id, "data": [data["data"]]}
self.db.units.insert_one(doc)
def reset_password(self, **kwargs) -> None:
auth_model.reset_password(**kwargs)
def get_collected_data(
self,
page: int,
unit_id: Optional[str] = None,
):
skip, limit = pagination_utils(
page=page, page_limit=fire_watch.conf.pagination_limit
)
project_pipeline = {
"$project": {
# # "email": 1,
"_id": 0,
"unit_id": 0
# "unit_id": 0,
},
}
match = {"unit_id": unit_id} if unit_id else {}
units = self.db.units.aggregate(
[
{"$match": match},
{"$limit": limit},
{"$skip": skip},
project_pipeline,
]
)
return units | server/models/api_model.py | from typing import Dict, Optional, Union
from authentication import auth_model
from fire_watch.errorfactory import DuplicationError, InvalidUid
from fire_watch.utils import pagination_utils
import fire_watch
from .base_model import BaseModel
class ApiModel(BaseModel):
def register_user(self, *args, **kwargs):
auth_model.register_user(*args, **kwargs)
def check_existing(self, doc: Dict[str, Union[str, int]]):
"""Check existing users against the entered `email` only.
Args:
doc (Dict[str, Union[str, int]]): user data
Raises:
DuplicationError: If user exists
"""
user = self.db.users.find_one({"email": doc["email"]})
if user:
raise DuplicationError({"error": "User exists!"})
def credentials(self, *args, **kwargs):
return auth_model.credentials(*args, **kwargs)
def insert_data(self, unit_id: str, data: Dict[str, Union[str, int]]):
"""Insert collected data into respective unit documents.
Insert into document if current insertions are less than
200 else create new unit document with same unit id and
insert.
Can be used as a standalone function to insert data or
through available `upload` api route
Args:
unit_id (str): unique unit identifier
data (Dict[str, Union[str, int]]): data collected
Raises:
InvalidUid: raised if no unit found
"""
units = list(self.db.units.find({"unit_id": unit_id}))
try:
unit = units.pop()
except IndexError:
raise InvalidUid(detail={"error": f"No unit with the id {unit_id} found"})
if len(unit["data"]) < self.max_entry:
self.db.units.update_one(
{"_id": unit["_id"]}, update={"$push": {"data": data["data"]}}
)
else:
doc = {"unit_id": unit_id, "data": [data["data"]]}
self.db.units.insert_one(doc)
def reset_password(self, **kwargs) -> None:
auth_model.reset_password(**kwargs)
def get_collected_data(
self,
page: int,
unit_id: Optional[str] = None,
):
skip, limit = pagination_utils(
page=page, page_limit=fire_watch.conf.pagination_limit
)
project_pipeline = {
"$project": {
# # "email": 1,
"_id": 0,
"unit_id": 0
# "unit_id": 0,
},
}
match = {"unit_id": unit_id} if unit_id else {}
units = self.db.units.aggregate(
[
{"$match": match},
{"$limit": limit},
{"$skip": skip},
project_pipeline,
]
)
return units | 0.903916 | 0.295185 |
from docplex.mp.compat23 import StringIO
from docplex.mp.constants import ComparisonType
from collections import Counter
from docplex.mp.vartype import *
class ModelStatistics(object):
"""ModelStatistics()
This class gathers statistics from the model.
Instances of this class are returned by the method :func:`docplex.mp.model.Model.get_statistics`.
The class contains counters on the various types of variables and constraints
in the model.
"""
def __init__(self):
self._number_of_binary_variables = 0
self._number_of_integer_variables = 0
self._number_of_continuous_variables = 0
self._number_of_semicontinuous_variables = 0
self._number_of_semiinteger_variables = 0
self._number_of_le_constraints = 0
self._number_of_ge_constraints = 0
self._number_of_eq_constraints = 0
self._number_of_range_constraints = 0
self._number_of_indicator_constraints = 0
self._number_of_equivalence_constraints = 0
self._number_of_quadratic_constraints = 0
def as_tuple(self):
return (self._number_of_binary_variables,
self._number_of_integer_variables,
self._number_of_continuous_variables,
self._number_of_semicontinuous_variables,
self._number_of_semiinteger_variables,
self._number_of_le_constraints,
self._number_of_ge_constraints,
self._number_of_eq_constraints,
self._number_of_range_constraints,
self._number_of_indicator_constraints,
self._number_of_equivalence_constraints,
self._number_of_quadratic_constraints)
def equal_stats(self, other):
return isinstance(other, ModelStatistics) and (self.as_tuple() == other.as_tuple())
def __eq__(self, other):
return self.equal_stats(other)
def __sub__(self, other):
if not isinstance(other, ModelStatistics):
raise TypeError
diffstats = ModelStatistics()
for attr in ["_number_of_le_constraints", "_number_of_ge_constraints", "_number_of_eq_constraints"]:
setattr(diffstats, attr, getattr(self, attr) - getattr(other, attr))
return diffstats
@staticmethod
def _make_new_stats(mdl):
# INTERNAL
stats = ModelStatistics()
vartype_count = Counter(type(dv.vartype) for dv in mdl.iter_variables())
stats._number_of_binary_variables = vartype_count[BinaryVarType]
stats._number_of_integer_variables = vartype_count[IntegerVarType]
stats._number_of_continuous_variables = vartype_count[ContinuousVarType]
stats._number_of_semicontinuous_variables = vartype_count[SemiContinuousVarType]
stats._number_of_semiinteger_variables = vartype_count[SemiIntegerVarType]
linct_count = Counter(ct.sense for ct in mdl.iter_binary_constraints())
stats._number_of_le_constraints = linct_count[ComparisonType.LE]
stats._number_of_eq_constraints = linct_count[ComparisonType.EQ]
stats._number_of_ge_constraints = linct_count[ComparisonType.GE]
stats._number_of_range_constraints = mdl.number_of_range_constraints
stats._number_of_indicator_constraints = mdl.number_of_indicator_constraints
stats._number_of_equivalence_constraints = mdl.number_of_equivalence_constraints
stats._number_of_quadratic_constraints = mdl.number_of_quadratic_constraints
return stats
@property
def number_of_variables(self):
""" This property returns the total number of variables in the model.
"""
return self._number_of_binary_variables \
+ self._number_of_integer_variables \
+ self._number_of_continuous_variables \
+ self._number_of_semicontinuous_variables
@property
def number_of_binary_variables(self):
""" This property returns the number of binary decision variables in the model.
"""
return self._number_of_binary_variables
@property
def number_of_integer_variables(self):
""" This property returns the number of integer decision variables in the model.
"""
return self._number_of_integer_variables
@property
def number_of_continuous_variables(self):
""" This property returns the number of continuous decision variables in the model.
"""
return self._number_of_continuous_variables
@property
def number_of_semicontinuous_variables(self):
""" This property returns the number of semicontinuous decision variables in the model.
"""
return self._number_of_semicontinuous_variables
@property
def number_of_semiinteger_variables(self):
""" This property returns the number of semi-integer decision variables in the model.
"""
return self._number_of_semiinteger_variables
@property
def number_of_linear_constraints(self):
""" This property returns the total number of linear constraints in the model.
This number comprises all relational constraints: <=, ==, and >=
and also range constraints.
"""
return self._number_of_eq_constraints + \
self._number_of_le_constraints + \
self._number_of_ge_constraints + \
self._number_of_range_constraints
@property
def number_of_le_constraints(self):
""" This property returns the number of <= constraints
"""
return self._number_of_le_constraints
@property
def number_of_eq_constraints(self):
""" This property returns the number of == constraints
"""
return self._number_of_eq_constraints
@property
def number_of_ge_constraints(self):
""" This property returns the number of >= constraints
"""
return self._number_of_ge_constraints
@property
def number_of_range_constraints(self):
""" This property returns the number of range constraints.
Range constraints are of the form L <= expression <= U.
See Also:
:class:`docplex.mp.constr.RangeConstraint`
"""
return self._number_of_range_constraints
@property
def number_of_indicator_constraints(self):
""" This property returns the number of indicator constraints.
See Also:
:class:`docplex.mp.constr.IndicatorConstraint`
"""
return self._number_of_indicator_constraints
@property
def number_of_equivalence_constraints(self):
""" This property returns the number of equivalence constraints.
See Also:
:class:`docplex.mp.constr.EquivalenceConstraint`
"""
return self._number_of_equivalence_constraints
@property
def number_of_quadratic_constraints(self):
""" This property returns the number of quadratic constraints.
See Also:
:class:`docplex.mp.constr.QuadraticConstraint`
"""
return self._number_of_quadratic_constraints
@property
def number_of_constraints(self):
return self.number_of_linear_constraints +\
self.number_of_quadratic_constraints +\
self.number_of_indicator_constraints +\
self._number_of_equivalence_constraints
def print_information(self):
""" Prints model statistics in readable format.
"""
print(' - number of variables: {0}'.format(self.number_of_variables))
var_fmt = ' - binary={0}, integer={1}, continuous={2}'
if 0 != self._number_of_semicontinuous_variables:
var_fmt += ', semi-continuous={3}'
print(var_fmt.format(self.number_of_binary_variables,
self.number_of_integer_variables,
self.number_of_continuous_variables,
self._number_of_semicontinuous_variables
))
print(' - number of constraints: {0}'.format(self.number_of_constraints))
ct_fmt = ' - linear={0}'
if 0 != self._number_of_indicator_constraints:
ct_fmt += ', indicator={1}'
if 0 != self._number_of_equivalence_constraints:
ct_fmt += ', equiv={2}'
if 0 != self._number_of_quadratic_constraints:
ct_fmt += ', quadratic={3}'
print(ct_fmt.format(self.number_of_linear_constraints,
self.number_of_indicator_constraints,
self.number_of_equivalence_constraints,
self.number_of_quadratic_constraints))
def to_string(self):
oss = StringIO()
oss.write(" - number of variables: %d\n" % self.number_of_variables)
var_fmt = ' - binary={0}, integer={1}, continuous={2}'
if 0 != self._number_of_semicontinuous_variables:
var_fmt += ', semi-continuous={3}'
oss.write(var_fmt.format(self.number_of_binary_variables,
self.number_of_integer_variables,
self.number_of_continuous_variables,
self._number_of_semicontinuous_variables
))
oss.write('\n')
nb_constraints = self.number_of_constraints
oss.write(' - number of constraints: {0}\n'.format(nb_constraints))
if nb_constraints:
ct_fmt = ' - linear={0}'
if 0 != self._number_of_indicator_constraints:
ct_fmt += ', indicator={1}'
if 0 != self._number_of_equivalence_constraints:
ct_fmt += ', equiv={2}'
if 0 != self._number_of_quadratic_constraints:
ct_fmt += ', quadratic={3}'
oss.write(ct_fmt.format(self.number_of_linear_constraints,
self.number_of_indicator_constraints,
self.number_of_equivalence_constraints,
self.number_of_quadratic_constraints))
return oss.getvalue()
def __str__(self):
return self.to_string()
def __repr__(self): # pragma: no cover
return "docplex.mp.Model.ModelStatistics()" | ukpsummarizer-be/cplex/python/docplex/docplex/mp/model_stats.py |
from docplex.mp.compat23 import StringIO
from docplex.mp.constants import ComparisonType
from collections import Counter
from docplex.mp.vartype import *
class ModelStatistics(object):
"""ModelStatistics()
This class gathers statistics from the model.
Instances of this class are returned by the method :func:`docplex.mp.model.Model.get_statistics`.
The class contains counters on the various types of variables and constraints
in the model.
"""
def __init__(self):
self._number_of_binary_variables = 0
self._number_of_integer_variables = 0
self._number_of_continuous_variables = 0
self._number_of_semicontinuous_variables = 0
self._number_of_semiinteger_variables = 0
self._number_of_le_constraints = 0
self._number_of_ge_constraints = 0
self._number_of_eq_constraints = 0
self._number_of_range_constraints = 0
self._number_of_indicator_constraints = 0
self._number_of_equivalence_constraints = 0
self._number_of_quadratic_constraints = 0
def as_tuple(self):
return (self._number_of_binary_variables,
self._number_of_integer_variables,
self._number_of_continuous_variables,
self._number_of_semicontinuous_variables,
self._number_of_semiinteger_variables,
self._number_of_le_constraints,
self._number_of_ge_constraints,
self._number_of_eq_constraints,
self._number_of_range_constraints,
self._number_of_indicator_constraints,
self._number_of_equivalence_constraints,
self._number_of_quadratic_constraints)
def equal_stats(self, other):
return isinstance(other, ModelStatistics) and (self.as_tuple() == other.as_tuple())
def __eq__(self, other):
return self.equal_stats(other)
def __sub__(self, other):
if not isinstance(other, ModelStatistics):
raise TypeError
diffstats = ModelStatistics()
for attr in ["_number_of_le_constraints", "_number_of_ge_constraints", "_number_of_eq_constraints"]:
setattr(diffstats, attr, getattr(self, attr) - getattr(other, attr))
return diffstats
@staticmethod
def _make_new_stats(mdl):
# INTERNAL
stats = ModelStatistics()
vartype_count = Counter(type(dv.vartype) for dv in mdl.iter_variables())
stats._number_of_binary_variables = vartype_count[BinaryVarType]
stats._number_of_integer_variables = vartype_count[IntegerVarType]
stats._number_of_continuous_variables = vartype_count[ContinuousVarType]
stats._number_of_semicontinuous_variables = vartype_count[SemiContinuousVarType]
stats._number_of_semiinteger_variables = vartype_count[SemiIntegerVarType]
linct_count = Counter(ct.sense for ct in mdl.iter_binary_constraints())
stats._number_of_le_constraints = linct_count[ComparisonType.LE]
stats._number_of_eq_constraints = linct_count[ComparisonType.EQ]
stats._number_of_ge_constraints = linct_count[ComparisonType.GE]
stats._number_of_range_constraints = mdl.number_of_range_constraints
stats._number_of_indicator_constraints = mdl.number_of_indicator_constraints
stats._number_of_equivalence_constraints = mdl.number_of_equivalence_constraints
stats._number_of_quadratic_constraints = mdl.number_of_quadratic_constraints
return stats
@property
def number_of_variables(self):
""" This property returns the total number of variables in the model.
"""
return self._number_of_binary_variables \
+ self._number_of_integer_variables \
+ self._number_of_continuous_variables \
+ self._number_of_semicontinuous_variables
@property
def number_of_binary_variables(self):
""" This property returns the number of binary decision variables in the model.
"""
return self._number_of_binary_variables
@property
def number_of_integer_variables(self):
""" This property returns the number of integer decision variables in the model.
"""
return self._number_of_integer_variables
@property
def number_of_continuous_variables(self):
""" This property returns the number of continuous decision variables in the model.
"""
return self._number_of_continuous_variables
@property
def number_of_semicontinuous_variables(self):
""" This property returns the number of semicontinuous decision variables in the model.
"""
return self._number_of_semicontinuous_variables
@property
def number_of_semiinteger_variables(self):
""" This property returns the number of semi-integer decision variables in the model.
"""
return self._number_of_semiinteger_variables
@property
def number_of_linear_constraints(self):
""" This property returns the total number of linear constraints in the model.
This number comprises all relational constraints: <=, ==, and >=
and also range constraints.
"""
return self._number_of_eq_constraints + \
self._number_of_le_constraints + \
self._number_of_ge_constraints + \
self._number_of_range_constraints
@property
def number_of_le_constraints(self):
""" This property returns the number of <= constraints
"""
return self._number_of_le_constraints
@property
def number_of_eq_constraints(self):
""" This property returns the number of == constraints
"""
return self._number_of_eq_constraints
@property
def number_of_ge_constraints(self):
""" This property returns the number of >= constraints
"""
return self._number_of_ge_constraints
@property
def number_of_range_constraints(self):
""" This property returns the number of range constraints.
Range constraints are of the form L <= expression <= U.
See Also:
:class:`docplex.mp.constr.RangeConstraint`
"""
return self._number_of_range_constraints
@property
def number_of_indicator_constraints(self):
""" This property returns the number of indicator constraints.
See Also:
:class:`docplex.mp.constr.IndicatorConstraint`
"""
return self._number_of_indicator_constraints
@property
def number_of_equivalence_constraints(self):
""" This property returns the number of equivalence constraints.
See Also:
:class:`docplex.mp.constr.EquivalenceConstraint`
"""
return self._number_of_equivalence_constraints
@property
def number_of_quadratic_constraints(self):
""" This property returns the number of quadratic constraints.
See Also:
:class:`docplex.mp.constr.QuadraticConstraint`
"""
return self._number_of_quadratic_constraints
@property
def number_of_constraints(self):
return self.number_of_linear_constraints +\
self.number_of_quadratic_constraints +\
self.number_of_indicator_constraints +\
self._number_of_equivalence_constraints
def print_information(self):
""" Prints model statistics in readable format.
"""
print(' - number of variables: {0}'.format(self.number_of_variables))
var_fmt = ' - binary={0}, integer={1}, continuous={2}'
if 0 != self._number_of_semicontinuous_variables:
var_fmt += ', semi-continuous={3}'
print(var_fmt.format(self.number_of_binary_variables,
self.number_of_integer_variables,
self.number_of_continuous_variables,
self._number_of_semicontinuous_variables
))
print(' - number of constraints: {0}'.format(self.number_of_constraints))
ct_fmt = ' - linear={0}'
if 0 != self._number_of_indicator_constraints:
ct_fmt += ', indicator={1}'
if 0 != self._number_of_equivalence_constraints:
ct_fmt += ', equiv={2}'
if 0 != self._number_of_quadratic_constraints:
ct_fmt += ', quadratic={3}'
print(ct_fmt.format(self.number_of_linear_constraints,
self.number_of_indicator_constraints,
self.number_of_equivalence_constraints,
self.number_of_quadratic_constraints))
def to_string(self):
oss = StringIO()
oss.write(" - number of variables: %d\n" % self.number_of_variables)
var_fmt = ' - binary={0}, integer={1}, continuous={2}'
if 0 != self._number_of_semicontinuous_variables:
var_fmt += ', semi-continuous={3}'
oss.write(var_fmt.format(self.number_of_binary_variables,
self.number_of_integer_variables,
self.number_of_continuous_variables,
self._number_of_semicontinuous_variables
))
oss.write('\n')
nb_constraints = self.number_of_constraints
oss.write(' - number of constraints: {0}\n'.format(nb_constraints))
if nb_constraints:
ct_fmt = ' - linear={0}'
if 0 != self._number_of_indicator_constraints:
ct_fmt += ', indicator={1}'
if 0 != self._number_of_equivalence_constraints:
ct_fmt += ', equiv={2}'
if 0 != self._number_of_quadratic_constraints:
ct_fmt += ', quadratic={3}'
oss.write(ct_fmt.format(self.number_of_linear_constraints,
self.number_of_indicator_constraints,
self.number_of_equivalence_constraints,
self.number_of_quadratic_constraints))
return oss.getvalue()
def __str__(self):
return self.to_string()
def __repr__(self): # pragma: no cover
return "docplex.mp.Model.ModelStatistics()" | 0.861727 | 0.402451 |
import argparse
import collections
import csv
import datetime
import json
import os
import sys
import time
if sys.version_info > (3,):
from urllib.error import HTTPError, URLError
from urllib.request import urlopen
else:
from urllib2 import HTTPError, URLError, urlopen
HEADER_FIELDNAMES = (
'chart', # TODO: should be 'date'?
'date', # TODO: should be 'time'?
'executionTime',
'upstreamLatency',
'proxyLatency',
'responseCode',
'responseTime'
)
# Only retain 7 days of data (10 minute intervals)
# 6 (10 minute chunks per hour) * 24 (hours per day) * 7 (days)
MAX_ROWS = 6 * 24 * 7 # 1008
def _get_current_date_and_time():
d = datetime.datetime.now()
return {
'chart': d.strftime('%Y/%m/%d'), # TODO: should be 'date'?
'date': d.strftime('%H:%M:%S') # TODO: should be 'time'?
}
def _get_url_response_info(url):
try:
# Run an HTTP GET on the url
start_time = time.time()
response = urlopen(url)
except HTTPError as e:
return {'responseCode': e.code}
except (URLError, ValueError):
return {}
else:
end_time = time.time()
try:
data = json.loads(response.read())
except ValueError:
# If the data being deserialized is not a valid JSON document,
# a ValueError will be raised (Python 2.7 -> 3.4)
exec_time = ''
else:
exec_time = data.get('executionTime', '')
return {
'executionTime': exec_time,
'proxyLatency': response.info().getheader(
'X-Kong-Proxy-Latency',
default=''
),
'responseCode': response.getcode(),
'responseTime': (end_time - start_time) * 1000,
'upstreamLatency': response.info().getheader(
'X-Kong-Upstream-Latency',
default=''
)
}
def _parse_cmd_line_args():
parser = argparse.ArgumentParser(
description='Checks the heartbeat of a given URL'
)
parser.add_argument(
'-url',
dest='url',
action='store',
required=True,
help='The URL to check the heartbeat of'
)
parser.add_argument(
'-o',
dest='output_filename',
action='store',
required=True,
help='The file to append heartbeat results to'
)
return parser.parse_args()
def _read_csv(filename):
result = collections.deque(maxlen=MAX_ROWS)
if os.path.exists(filename):
with open(filename, 'r') as csvfile:
reader = csv.DictReader(csvfile, delimiter='|')
result.extend(reader)
return result
def _write_csv(filename, rows):
with open(filename, 'w') as csvfile:
writer = csv.DictWriter(
csvfile,
delimiter='|',
fieldnames=HEADER_FIELDNAMES
)
writer.writeheader()
writer.writerows(rows)
def main():
# Parse command-line arguments from sys.argv
args = _parse_cmd_line_args()
# Initialize dictionary to hold new URL response information
url_response_info = {key: '' for key in HEADER_FIELDNAMES}
# Update the date and time key/value pairs
url_response_info.update(_get_current_date_and_time())
# Update the URL response information key/value pairs
url_response_info.update(_get_url_response_info(args.url))
# Read rows into a deque (if file exists, else empty deque)
current_rows = _read_csv(args.output_filename)
# Append new URL response information to the deque
current_rows.append(url_response_info)
# Write all rows to csv output file
_write_csv(args.output_filename, current_rows)
if __name__ == '__main__':
main() | heartbeat/heartbeat.py | import argparse
import collections
import csv
import datetime
import json
import os
import sys
import time
if sys.version_info > (3,):
from urllib.error import HTTPError, URLError
from urllib.request import urlopen
else:
from urllib2 import HTTPError, URLError, urlopen
HEADER_FIELDNAMES = (
'chart', # TODO: should be 'date'?
'date', # TODO: should be 'time'?
'executionTime',
'upstreamLatency',
'proxyLatency',
'responseCode',
'responseTime'
)
# Only retain 7 days of data (10 minute intervals)
# 6 (10 minute chunks per hour) * 24 (hours per day) * 7 (days)
MAX_ROWS = 6 * 24 * 7 # 1008
def _get_current_date_and_time():
d = datetime.datetime.now()
return {
'chart': d.strftime('%Y/%m/%d'), # TODO: should be 'date'?
'date': d.strftime('%H:%M:%S') # TODO: should be 'time'?
}
def _get_url_response_info(url):
try:
# Run an HTTP GET on the url
start_time = time.time()
response = urlopen(url)
except HTTPError as e:
return {'responseCode': e.code}
except (URLError, ValueError):
return {}
else:
end_time = time.time()
try:
data = json.loads(response.read())
except ValueError:
# If the data being deserialized is not a valid JSON document,
# a ValueError will be raised (Python 2.7 -> 3.4)
exec_time = ''
else:
exec_time = data.get('executionTime', '')
return {
'executionTime': exec_time,
'proxyLatency': response.info().getheader(
'X-Kong-Proxy-Latency',
default=''
),
'responseCode': response.getcode(),
'responseTime': (end_time - start_time) * 1000,
'upstreamLatency': response.info().getheader(
'X-Kong-Upstream-Latency',
default=''
)
}
def _parse_cmd_line_args():
parser = argparse.ArgumentParser(
description='Checks the heartbeat of a given URL'
)
parser.add_argument(
'-url',
dest='url',
action='store',
required=True,
help='The URL to check the heartbeat of'
)
parser.add_argument(
'-o',
dest='output_filename',
action='store',
required=True,
help='The file to append heartbeat results to'
)
return parser.parse_args()
def _read_csv(filename):
result = collections.deque(maxlen=MAX_ROWS)
if os.path.exists(filename):
with open(filename, 'r') as csvfile:
reader = csv.DictReader(csvfile, delimiter='|')
result.extend(reader)
return result
def _write_csv(filename, rows):
with open(filename, 'w') as csvfile:
writer = csv.DictWriter(
csvfile,
delimiter='|',
fieldnames=HEADER_FIELDNAMES
)
writer.writeheader()
writer.writerows(rows)
def main():
# Parse command-line arguments from sys.argv
args = _parse_cmd_line_args()
# Initialize dictionary to hold new URL response information
url_response_info = {key: '' for key in HEADER_FIELDNAMES}
# Update the date and time key/value pairs
url_response_info.update(_get_current_date_and_time())
# Update the URL response information key/value pairs
url_response_info.update(_get_url_response_info(args.url))
# Read rows into a deque (if file exists, else empty deque)
current_rows = _read_csv(args.output_filename)
# Append new URL response information to the deque
current_rows.append(url_response_info)
# Write all rows to csv output file
_write_csv(args.output_filename, current_rows)
if __name__ == '__main__':
main() | 0.193528 | 0.108945 |
__docformat__ = 'restructuredtext'
from zope.interface.interfaces import IInterface
from zope.proxy import removeAllProxies
from zope.publisher.browser import BrowserView
from zope.traversing.browser import absoluteURL
from zope.app.apidoc.utilities import getPythonPath, renderText
from zope.app.apidoc.codemodule.interfaces import IModuleDocumentation
from zope.app.apidoc.codemodule.interfaces import IClassDocumentation
from zope.app.apidoc.codemodule.interfaces import IFunctionDocumentation
from zope.app.apidoc.codemodule.interfaces import IZCMLFile
from zope.app.apidoc.codemodule.interfaces import ITextFile
from zope.app.apidoc.browser.utilities import findAPIDocumentationRootURL
def formatDocString(text, module=None, summary=False):
"""Format a doc string for display.
module is either a Python module (from sys.modules) or the dotted name
of a module.
If summary is true, the result is plain text and includes only
the summary part of the doc string.
If summary is false, the result is HTML and includes the whole doc string.
"""
if text is None:
return None
lines = text.strip().split('\n')
# Get rid of possible CVS id.
lines = [line for line in lines if not line.startswith('$Id')]
if summary:
for i in range(len(lines)):
if not lines[i].strip():
del lines[i:]
break
return '\n'.join(lines)
return renderText('\n'.join(lines), module)
class ModuleDetails(BrowserView):
"""Represents the details of a module or package."""
def __init__(self, context, request):
super(ModuleDetails, self).__init__(context, request)
items = sorted(self.context.items())
self.text_files = []
self.zcml_files = []
self.modules = []
self.interfaces = []
self.classes = []
self.functions = []
for name, obj in items:
entry = {'name': name, 'url': absoluteURL(obj, self.request)}
if IFunctionDocumentation.providedBy(obj):
entry['doc'] = formatDocString(
obj.getDocString(), self.context.getPath())
entry['signature'] = obj.getSignature()
self.functions.append(entry)
elif IModuleDocumentation.providedBy(obj):
entry['doc'] = formatDocString(
obj.getDocString(), obj.getPath(), True)
self.modules.append(entry)
elif IInterface.providedBy(obj):
entry['path'] = getPythonPath(removeAllProxies(obj))
entry['doc'] = formatDocString(
obj.__doc__, obj.__module__, True)
self.interfaces.append(entry)
elif IClassDocumentation.providedBy(obj):
entry['doc'] = formatDocString(
obj.getDocString(), self.context.getPath(), True)
self.classes.append(entry)
elif IZCMLFile.providedBy(obj):
self.zcml_files.append(entry)
elif ITextFile.providedBy(obj):
self.text_files.append(entry)
def getAPIDocRootURL(self):
return findAPIDocumentationRootURL(self.context, self.request)
def getDoc(self):
"""Get the doc string of the module, formatted as HTML."""
return formatDocString(
self.context.getDocString(), self.context.getPath())
def getPath(self):
"""Return the path to the module"""
return self.context.getPath()
def isPackage(self):
"""Return true if this module is a package"""
return self.context.isPackage()
def getModuleInterfaces(self):
"""Return entries about interfaces the module provides"""
entries = []
for iface in self.context.getDeclaration():
entries.append({
'name': iface.__name__,
'path': getPythonPath(removeAllProxies(iface))
})
return entries
def getModules(self):
"""Return entries about contained modules and subpackages"""
return self.modules
def getInterfaces(self):
"""Return entries about interfaces declared by the module"""
return self.interfaces
def getClasses(self):
"""Return entries about classes declared by the module"""
return self.classes
def getTextFiles(self):
"""Return entries about text files contained in the package"""
return self.text_files
def getZCMLFiles(self):
"""Return entries about ZCML files contained in the package"""
return self.zcml_files
def getFunctions(self):
"""Return entries about functions declared by the package"""
return self.functions | src/zope/app/apidoc/codemodule/browser/module.py | __docformat__ = 'restructuredtext'
from zope.interface.interfaces import IInterface
from zope.proxy import removeAllProxies
from zope.publisher.browser import BrowserView
from zope.traversing.browser import absoluteURL
from zope.app.apidoc.utilities import getPythonPath, renderText
from zope.app.apidoc.codemodule.interfaces import IModuleDocumentation
from zope.app.apidoc.codemodule.interfaces import IClassDocumentation
from zope.app.apidoc.codemodule.interfaces import IFunctionDocumentation
from zope.app.apidoc.codemodule.interfaces import IZCMLFile
from zope.app.apidoc.codemodule.interfaces import ITextFile
from zope.app.apidoc.browser.utilities import findAPIDocumentationRootURL
def formatDocString(text, module=None, summary=False):
"""Format a doc string for display.
module is either a Python module (from sys.modules) or the dotted name
of a module.
If summary is true, the result is plain text and includes only
the summary part of the doc string.
If summary is false, the result is HTML and includes the whole doc string.
"""
if text is None:
return None
lines = text.strip().split('\n')
# Get rid of possible CVS id.
lines = [line for line in lines if not line.startswith('$Id')]
if summary:
for i in range(len(lines)):
if not lines[i].strip():
del lines[i:]
break
return '\n'.join(lines)
return renderText('\n'.join(lines), module)
class ModuleDetails(BrowserView):
"""Represents the details of a module or package."""
def __init__(self, context, request):
super(ModuleDetails, self).__init__(context, request)
items = sorted(self.context.items())
self.text_files = []
self.zcml_files = []
self.modules = []
self.interfaces = []
self.classes = []
self.functions = []
for name, obj in items:
entry = {'name': name, 'url': absoluteURL(obj, self.request)}
if IFunctionDocumentation.providedBy(obj):
entry['doc'] = formatDocString(
obj.getDocString(), self.context.getPath())
entry['signature'] = obj.getSignature()
self.functions.append(entry)
elif IModuleDocumentation.providedBy(obj):
entry['doc'] = formatDocString(
obj.getDocString(), obj.getPath(), True)
self.modules.append(entry)
elif IInterface.providedBy(obj):
entry['path'] = getPythonPath(removeAllProxies(obj))
entry['doc'] = formatDocString(
obj.__doc__, obj.__module__, True)
self.interfaces.append(entry)
elif IClassDocumentation.providedBy(obj):
entry['doc'] = formatDocString(
obj.getDocString(), self.context.getPath(), True)
self.classes.append(entry)
elif IZCMLFile.providedBy(obj):
self.zcml_files.append(entry)
elif ITextFile.providedBy(obj):
self.text_files.append(entry)
def getAPIDocRootURL(self):
return findAPIDocumentationRootURL(self.context, self.request)
def getDoc(self):
"""Get the doc string of the module, formatted as HTML."""
return formatDocString(
self.context.getDocString(), self.context.getPath())
def getPath(self):
"""Return the path to the module"""
return self.context.getPath()
def isPackage(self):
"""Return true if this module is a package"""
return self.context.isPackage()
def getModuleInterfaces(self):
"""Return entries about interfaces the module provides"""
entries = []
for iface in self.context.getDeclaration():
entries.append({
'name': iface.__name__,
'path': getPythonPath(removeAllProxies(iface))
})
return entries
def getModules(self):
"""Return entries about contained modules and subpackages"""
return self.modules
def getInterfaces(self):
"""Return entries about interfaces declared by the module"""
return self.interfaces
def getClasses(self):
"""Return entries about classes declared by the module"""
return self.classes
def getTextFiles(self):
"""Return entries about text files contained in the package"""
return self.text_files
def getZCMLFiles(self):
"""Return entries about ZCML files contained in the package"""
return self.zcml_files
def getFunctions(self):
"""Return entries about functions declared by the package"""
return self.functions | 0.627723 | 0.122786 |
__author__ = 'ipetrash'
import functools
import logging
import os
import sys
import re
import time
from pathlib import Path
from typing import Callable, List
from telegram import Update
from telegram.ext import Updater, CallbackContext, Handler, Defaults
import config
def get_logger(file_name: str, dir_name='logs'):
dir_name = Path(dir_name).resolve()
dir_name.mkdir(parents=True, exist_ok=True)
file_name = str(dir_name / Path(file_name).resolve().name) + '.log'
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(asctime)s] %(filename)s[LINE:%(lineno)d] %(levelname)-8s %(message)s')
fh = logging.FileHandler(file_name, encoding='utf-8')
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler(stream=sys.stdout)
ch.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
ch.setFormatter(formatter)
log.addHandler(fh)
log.addHandler(ch)
return log
def log_func(log: logging.Logger):
def actual_decorator(func):
@functools.wraps(func)
def wrapper(update: Update, context: CallbackContext):
if update:
chat_id = user_id = first_name = last_name = username = language_code = None
if update.effective_chat:
chat_id = update.effective_chat.id
if update.effective_user:
user_id = update.effective_user.id
first_name = update.effective_user.first_name
last_name = update.effective_user.last_name
username = update.effective_user.username
language_code = update.effective_user.language_code
try:
message = update.effective_message.text
except:
message = ''
try:
query_data = update.callback_query.data
except:
query_data = ''
msg = f'[chat_id={chat_id}, user_id={user_id}, ' \
f'first_name={first_name!r}, last_name={last_name!r}, ' \
f'username={username!r}, language_code={language_code}, ' \
f'message={message!r}, query_data={query_data!r}]'
msg = func.__name__ + msg
log.debug(msg)
return func(update, context)
return wrapper
return actual_decorator
def reply_error(log: logging.Logger, update: Update, context: CallbackContext):
log.error('Error: %s\nUpdate: %s', context.error, update, exc_info=context.error)
if update:
update.effective_message.reply_text(config.ERROR_TEXT)
def fill_string_pattern(pattern: re.Pattern, *args) -> str:
pattern = pattern.pattern
pattern = pattern.strip('^$')
return re.sub(r'\(.+?\)', '{}', pattern).format(*args)
def start_bot(
log: logging.Logger,
handlers: List[Handler],
before_start_func: Callable[[Updater], None] = None,
**updater_kwargs,
):
cpu_count = os.cpu_count()
workers = cpu_count
log.debug(f'System: CPU_COUNT={cpu_count}, WORKERS={workers}')
log.debug('Start')
# Create the EventHandler and pass it your bot's token.
updater = Updater(
config.TOKEN,
workers=workers,
defaults=Defaults(run_async=True),
**updater_kwargs,
)
# Get the dispatcher to register handlers
dp = updater.dispatcher
for handler in handlers:
dp.add_handler(handler)
# Handle all errors
dp.add_error_handler(lambda update, context: reply_error(log, update, context))
if before_start_func:
before_start_func(updater)
# Start the Bot
updater.start_polling()
# Run the bot until the you presses Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
log.debug('Finish')
def run_main(main_func: Callable, log: logging.Logger, timeout=15):
while True:
try:
main_func()
except:
log.exception('')
log.info(f'Restarting the bot after {timeout} seconds')
time.sleep(timeout) | telegram_bot_examples/common.py |
__author__ = 'ipetrash'
import functools
import logging
import os
import sys
import re
import time
from pathlib import Path
from typing import Callable, List
from telegram import Update
from telegram.ext import Updater, CallbackContext, Handler, Defaults
import config
def get_logger(file_name: str, dir_name='logs'):
dir_name = Path(dir_name).resolve()
dir_name.mkdir(parents=True, exist_ok=True)
file_name = str(dir_name / Path(file_name).resolve().name) + '.log'
log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
formatter = logging.Formatter('[%(asctime)s] %(filename)s[LINE:%(lineno)d] %(levelname)-8s %(message)s')
fh = logging.FileHandler(file_name, encoding='utf-8')
fh.setLevel(logging.DEBUG)
ch = logging.StreamHandler(stream=sys.stdout)
ch.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
ch.setFormatter(formatter)
log.addHandler(fh)
log.addHandler(ch)
return log
def log_func(log: logging.Logger):
def actual_decorator(func):
@functools.wraps(func)
def wrapper(update: Update, context: CallbackContext):
if update:
chat_id = user_id = first_name = last_name = username = language_code = None
if update.effective_chat:
chat_id = update.effective_chat.id
if update.effective_user:
user_id = update.effective_user.id
first_name = update.effective_user.first_name
last_name = update.effective_user.last_name
username = update.effective_user.username
language_code = update.effective_user.language_code
try:
message = update.effective_message.text
except:
message = ''
try:
query_data = update.callback_query.data
except:
query_data = ''
msg = f'[chat_id={chat_id}, user_id={user_id}, ' \
f'first_name={first_name!r}, last_name={last_name!r}, ' \
f'username={username!r}, language_code={language_code}, ' \
f'message={message!r}, query_data={query_data!r}]'
msg = func.__name__ + msg
log.debug(msg)
return func(update, context)
return wrapper
return actual_decorator
def reply_error(log: logging.Logger, update: Update, context: CallbackContext):
log.error('Error: %s\nUpdate: %s', context.error, update, exc_info=context.error)
if update:
update.effective_message.reply_text(config.ERROR_TEXT)
def fill_string_pattern(pattern: re.Pattern, *args) -> str:
pattern = pattern.pattern
pattern = pattern.strip('^$')
return re.sub(r'\(.+?\)', '{}', pattern).format(*args)
def start_bot(
log: logging.Logger,
handlers: List[Handler],
before_start_func: Callable[[Updater], None] = None,
**updater_kwargs,
):
cpu_count = os.cpu_count()
workers = cpu_count
log.debug(f'System: CPU_COUNT={cpu_count}, WORKERS={workers}')
log.debug('Start')
# Create the EventHandler and pass it your bot's token.
updater = Updater(
config.TOKEN,
workers=workers,
defaults=Defaults(run_async=True),
**updater_kwargs,
)
# Get the dispatcher to register handlers
dp = updater.dispatcher
for handler in handlers:
dp.add_handler(handler)
# Handle all errors
dp.add_error_handler(lambda update, context: reply_error(log, update, context))
if before_start_func:
before_start_func(updater)
# Start the Bot
updater.start_polling()
# Run the bot until the you presses Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
log.debug('Finish')
def run_main(main_func: Callable, log: logging.Logger, timeout=15):
while True:
try:
main_func()
except:
log.exception('')
log.info(f'Restarting the bot after {timeout} seconds')
time.sleep(timeout) | 0.436622 | 0.072637 |
import numpy as np
from numba import njit
from json_checker import Checker, And
from typing import Dict, Union, Tuple
import xarray as xr
from pandora.img_tools import shift_sec_img
from . import aggregation
from scipy.ndimage.filters import median_filter
@aggregation.AbstractAggregation.register_subclass('cbca')
class CrossBasedCostAggregation(aggregation.AbstractAggregation):
"""
CrossBasedCostAggregation class, allows to perform the aggregation step
"""
# Default configuration, do not change these values
_CBCA_INTENSITY = 30.
_CBCA_DISTANCE = 5
def __init__(self, **cfg: dict):
"""
:param cfg: optional configuration, {'cbca_intensity': value, 'cbca_distance': value}
:type cfg: dict
"""
self.cfg = self.check_conf(**cfg)
self._cbca_intensity = self.cfg['cbca_intensity']
self._cbca_distance = self.cfg['cbca_distance']
def check_conf(self, **cfg: Union[str, float, int]) -> Dict[str, Union[str, float, int]]:
"""
Add default values to the dictionary if there are missing elements and check if the dictionary is correct
:param cfg: aggregation configuration
:type cfg: dict
:return cfg: aggregation configuration updated
:rtype: dict
"""
# Give the default value if the required element is not in the configuration
if 'cbca_intensity' not in cfg:
cfg['cbca_intensity'] = self._CBCA_INTENSITY
if 'cbca_distance' not in cfg:
cfg['cbca_distance'] = self._CBCA_DISTANCE
schema = {
"aggregation_method": And(str, lambda x: 'cbca'),
"cbca_intensity": And(float, lambda x: x > 0),
"cbca_distance": And(int, lambda x: x > 0)
}
checker = Checker(schema)
checker.validate(cfg)
return cfg
def desc(self):
"""
Describes the aggregation method
"""
print('CrossBasedCostAggregation method')
def cost_volume_aggregation(self, img_ref: xr.Dataset, img_sec: xr.Dataset, cv: xr.Dataset) -> xr.Dataset:
"""
Aggregated the cost volume with Cross-Based Cost Aggregation, using the pipeline define in
<NAME>., <NAME>., & <NAME>. (2009).
Cross-based local stereo matching using orthogonal integral images.
IEEE transactions on circuits and systems for video technology, 19(7), 1073-1079.
:param img_ref: reference Dataset image
:type img_ref:
xarray.Dataset containing :
- im : 2D (row, col) xarray.DataArray
- msk (optional): 2D (row, col) xarray.DataArray
:param img_sec: secondary Dataset image
:type img_sec:
xarray.Dataset containing :
- im : 2D (row, col) xarray.DataArray
- msk (optional): 2D (row, col) xarray.DataArray
:param cv: cost volume dataset
:type cv:
xarray.Dataset, with the data variables:
- cost_volume 3D xarray.DataArray (row, col, disp)
- confidence_measure 3D xarray.DataArray (row, col, indicator)
:return: the cost volume aggregated in the dataset
:rtype:
xarray.Dataset, with the data variables:
- cost_volume 3D xarray.DataArray (row, col, disp)
- confidence_measure 3D xarray.DataArray (row, col, indicator)
"""
subpix = cv.attrs['subpixel']
# Contains the shifted secondary images
img_sec_shift = shift_sec_img(img_sec, subpix)
# Compute the reference cross support. Apply a 3×3 median filter to the input image
cross_ref = cross_support(median_filter(img_ref['im'].data, size=(3, 3), mode="nearest"),
self._cbca_distance, self._cbca_intensity)
# Compute the secondary cross support. Apply a 3×3 median filter to the input image
cross_sec = []
for i in range(0, len(img_sec_shift)):
cross_sec.append(cross_support(median_filter(img_sec_shift[i]['im'].data, size=(3, 3), mode="nearest"),
self._cbca_distance, self._cbca_intensity))
ny_, nx_, nb_disp = cv['cost_volume'].shape
# Allocate the numpy aggregated cost volume cv = (disp, col, row), for efficient memory management
agg = np.zeros((nb_disp, nx_, ny_), dtype=np.float32)
# Add invalid costs (i.e = np.nan ) to the output aggregated cost volume (because the step 1 of cbca do not
# propagate invalid pixels, we need to retrieve them at the end of aggregation )
# Much faster than :
# id_nan = np.isnan(cv['cost_volume'].data)
# compute the aggregation ..
# cv['cost_volume'].data[id_nan] = np.nan
agg += np.swapaxes(cv['cost_volume'].data, 0, 2)
agg *= 0
disparity_range = cv.coords['disp'].data
range_col = np.arange(0, nx_)
for d in range(nb_disp):
i_sec = int((disparity_range[d] % 1) * cv.attrs['subpixel'])
# Step 1 : horizontal integral image
step1 = cbca_step_1(cv['cost_volume'].data[:, :, d])
range_col_sec = range_col + disparity_range[d]
valid_index = np.where((range_col_sec >= 0) & (range_col_sec < cross_sec[i_sec].shape[1]))
# Step 2 : horizontal matching cost
step2, sum2 = cbca_step_2(step1, cross_ref, cross_sec[i_sec], range_col[valid_index], range_col_sec[valid_index].astype(int))
# Step 3 : vertical integral image
step3 = cbca_step_3(step2)
# Step 4 : aggregate cost volume
step4, sum4 = cbca_step_4(step3, sum2, cross_ref, cross_sec[i_sec], range_col[valid_index], range_col_sec[valid_index].astype(int))
# Added the pixel anchor pixel to the number of support pixels used during the aggregation
sum4 += 1
# Add the aggregate cost to the output
agg[d, :, :] += np.swapaxes(step4, 0, 1)
# Normalize the aggregated cost
agg[d, :, :] /= np.swapaxes(sum4, 0, 1)
cv['cost_volume'].data = np.swapaxes(agg, 0, 2)
cv.attrs['aggregation'] = 'cbca'
# Maximal cost of the cost volume after agregation
cmax = cv.attrs['cmax'] * ((self._cbca_distance * 2) - 1) ** 2
cv.attrs['cmax'] = cmax
return cv
@njit('f4[:, :](f4[:, :])', cache=True)
def cbca_step_1(cv: np.ndarray) -> np.ndarray:
"""
Giving the matching cost for one disparity, build a horizontal integral image storing the cumulative row sum,
S_h(x, y) = S_h(x-1, y) + cv(x, y)
:param cv: cost volume for the current disparity
:type cv: 2D np.array (row, col) dtype = np.float32
:return: the horizontal integral image, step 1
:rtype: 2D np.array (row, col + 1) dtype = np.float32
"""
ny_, nx_ = cv.shape
# Allocate the intermediate cost volume S_h
# added a column to manage the case in the step 2 : x - left_arm_length -1 = -1
step1 = np.zeros((ny_, nx_ + 1), dtype=np.float32)
for y in range(ny_):
for x in range(nx_):
if x - 1 == -1:
step1[y, x] = 0
# Do not propagate nan
if not np.isnan(cv[y, x]):
step1[y, x] = step1[y, x - 1] + cv[y, x]
else:
step1[y, x] = 0
return step1
@njit('(f4[:, :], i2[:, :, :], i2[:, :, :], i8[:], i8[:])', cache=True)
def cbca_step_2(step1: np.ndarray, cross_ref: np.ndarray, cross_sec: np.ndarray, range_col: np.ndarray,
range_col_sec: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""
Giving the horizontal integral image, computed the horizontal matching cost for one disparity,
E_h(x, y) = S_h(x + right_arm_length, y) - S_h(x - left_arm_length -1, y)
:param step1: horizontal integral image, from the cbca_step1, with an extra column that contains 0
:type step1: 2D np.array (row, col + 1) dtype = np.float32
:param cross_ref: cross support of the reference image
:type cross_ref: 3D np.array (row, col, [left, right, top, bot]) dtype=np.int16
:param cross_sec: cross support of the secondary image
:type cross_sec: 3D np.array (row, col, [left, right, tpo, bot]) dtype=np.int16
:param range_col: reference column for the current disparity (i.e : np.arrange(nb columns), where the correspondent
in the right image is reachable)
:type range_col: 1D np.array
:param range_col_sec: secondary column for the current disparity (i.e : np.arrange(nb columns) - disparity, where
column - disparity >= 0 and <= nb columns)
:type range_col_sec: 1D np.array
:return: the horizontal matching cost for the current disparity, and the number of support pixels used for the
step 2
:rtype: tuple (2D np.array (row, col) dtype = np.float32, 2D np.array (row, col) dtype = np.float32)
"""
ny_, nx_ = step1.shape
# Allocate the intermediate cost volume E_h
# , remove the extra column from the step 1
step2 = np.zeros((ny_, nx_ - 1), dtype=np.float32)
sum_step2 = np.zeros((ny_, nx_ - 1), dtype=np.float32)
for y in range(step1.shape[0]):
for x in range(range_col.shape[0]):
right = min(cross_ref[y, range_col[x], 1], cross_sec[y, range_col_sec[x], 1])
left = min(cross_ref[y, range_col[x], 0], cross_sec[y, range_col_sec[x], 0])
step2[y, range_col[x]] = step1[y, range_col[x] + right] - step1[y, range_col[x] - left - 1]
sum_step2[y, range_col[x]] += (right + left)
return step2, sum_step2
@njit('f4[:, :](f4[:, :])', cache=True)
def cbca_step_3(step2: xr.DataArray) -> np.ndarray:
"""
Giving the horizontal matching cost, build a vertical integral image for one disparity,
S_v = S_v(x, y - 1) + E_h(x, y)
:param step2: horizontal matching cost, from the cbca_step2
:type step2: 3D xarray.DataArray (row, col, disp)
:return: the vertical integral image for the current disparity
:rtype: 2D np.array (row + 1, col) dtype = np.float32
"""
ny_, nx_ = step2.shape
# Allocate the intermediate cost volume S_v
# added a row to manage the case in the step 4 : y - up_arm_length -1 = -1
step3 = np.zeros((ny_ + 1, nx_), dtype=np.float32)
step3[0, :] = step2[0, :]
for y in range(1, ny_):
for x in range(nx_):
step3[y, x] = step3[y - 1, x] + step2[y, x]
return step3
@njit('(f4[:, :], f4[:, :], i2[:, :, :], i2[:, :, :], i8[:], i8[:])', cache=True)
def cbca_step_4(step3: np.ndarray, sum2: np.ndarray, cross_ref: np.ndarray, cross_sec: np.ndarray,
range_col: np.ndarray, range_col_sec: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""
Giving the vertical integral image, build the fully aggregated matching cost for one disparity,
E = S_v(x, y + bottom_arm_length) - S_v(x, y - top_arm_length - 1)
:param step3: vertical integral image, from the cbca_step3, with an extra row that contains 0
:type step3: 2D np.array (row + 1, col) dtype = np.float32
:param sum2: the number of support pixels used for the step 2
:type sum2: 2D np.array (row, col) dtype = np.float32
:param cross_ref: cross support of the reference image
:type cross_ref: 3D np.array (row, col, [left, right, top, bot]) dtype=np.int16
:param cross_sec: cross support of the secondary image
:type cross_sec: 3D np.array (row, col, [left, right, tpo, bot]) dtype=np.int16
:param range_col: reference column for the current disparity (i.e : np.arrange(nb columns), where the correspondent
in the right image is reachable)
:type range_col: 1D np.array
:param range_col_sec: secondary column for the current disparity (i.e : np.arrange(nb columns) - disparity, where
column - disparity >= 0 and <= nb columns)
:type range_col_sec: 1D np.array
:return: the fully aggregated matching cost, and the total number of support pixels used for the aggregation
:rtype: tuple(2D np.array (row , col) dtype = np.float32, 2D np.array (row , col) dtype = np.float32)
"""
ny_, nx_ = step3.shape
# Allocate the final cost volume E
# , remove the extra row from the step 3
step4 = np.zeros((ny_ - 1, nx_), dtype=np.float32)
sum4 = np.copy(sum2)
for y in range(step4.shape[0]):
for x in range(range_col.shape[0]):
top = min(cross_ref[y, range_col[x], 2], cross_sec[y, range_col_sec[x], 2])
bot = min(cross_ref[y, range_col[x], 3], cross_sec[y, range_col_sec[x], 3])
step4[y, range_col[x]] = step3[y + bot, range_col[x]] - step3[y - top - 1, range_col[x]]
sum4[y, range_col[x]] += (top + bot)
if top != 0:
sum4[y, range_col[x]] += np.sum(sum2[y-top:y, range_col[x]])
if bot != 0:
sum4[y, range_col[x]] += np.sum(sum2[y+1:y+bot+1, range_col[x]])
return step4, sum4
@njit('i2[:, :, :](f4[:, :], i2, f4)', cache=True)
def cross_support(image: np.ndarray, len_arms: int, intensity: float) -> np.ndarray:
"""
Compute the cross support for an image: find the 4 arms
:param image: image
:type image: 2D np.array (row , col) dtype = np.float32
:param len_arms: maximal length arms
:param len_arms: int16
:param intensity: maximal intensity
:param intensity: float 32
:return: a 3D np.array ( row, col, [left, right, top, bot] ),
with the four arms lengths computes for each pixel
:rtype: 3D np.array ( row, col, [left, right, top, bot] ), dtype=np.int16
"""
ny_, nx_ = image.shape
cross = np.zeros((ny_, nx_, 4), dtype=np.int16)
for y in range(ny_):
for x in range(nx_):
left_len = 0
for left in range(x-1, max(x-len_arms, -1), -1):
if abs(image[y, x] - image[y, left]) >= intensity:
break
left_len += 1
# enforces a minimum support region of 3×3
cross[y, x, 0] = max(left_len, 1 * (x >= 1))
right_len = 0
for right in range(x+1, min(x+len_arms, nx_)):
if abs(image[y, x] - image[y, right]) >= intensity:
break
right_len += 1
# enforces a minimum support region of 3×3
cross[y, x, 1] = max(right_len, 1 * (x < nx_ - 1))
up_len = 0
for up in range(y-1, max(y-len_arms, -1), -1):
if abs(image[y, x] - image[up, x]) >= intensity:
break
up_len += 1
# enforces a minimum support region of 3×3
cross[y, x, 2] = max(up_len, 1 * (y >= 1))
bot_len = 0
for bot in range(y+1, min(y+len_arms, ny_)):
if abs(image[y, x] - image[bot, x]) >= intensity:
break
bot_len += 1
# enforces a minimum support region of 3×3
cross[y, x, 3] = max(bot_len, 1 * (y < ny_ - 1))
return cross | pandora/aggregation/cbca.py | import numpy as np
from numba import njit
from json_checker import Checker, And
from typing import Dict, Union, Tuple
import xarray as xr
from pandora.img_tools import shift_sec_img
from . import aggregation
from scipy.ndimage.filters import median_filter
@aggregation.AbstractAggregation.register_subclass('cbca')
class CrossBasedCostAggregation(aggregation.AbstractAggregation):
"""
CrossBasedCostAggregation class, allows to perform the aggregation step
"""
# Default configuration, do not change these values
_CBCA_INTENSITY = 30.
_CBCA_DISTANCE = 5
def __init__(self, **cfg: dict):
"""
:param cfg: optional configuration, {'cbca_intensity': value, 'cbca_distance': value}
:type cfg: dict
"""
self.cfg = self.check_conf(**cfg)
self._cbca_intensity = self.cfg['cbca_intensity']
self._cbca_distance = self.cfg['cbca_distance']
def check_conf(self, **cfg: Union[str, float, int]) -> Dict[str, Union[str, float, int]]:
"""
Add default values to the dictionary if there are missing elements and check if the dictionary is correct
:param cfg: aggregation configuration
:type cfg: dict
:return cfg: aggregation configuration updated
:rtype: dict
"""
# Give the default value if the required element is not in the configuration
if 'cbca_intensity' not in cfg:
cfg['cbca_intensity'] = self._CBCA_INTENSITY
if 'cbca_distance' not in cfg:
cfg['cbca_distance'] = self._CBCA_DISTANCE
schema = {
"aggregation_method": And(str, lambda x: 'cbca'),
"cbca_intensity": And(float, lambda x: x > 0),
"cbca_distance": And(int, lambda x: x > 0)
}
checker = Checker(schema)
checker.validate(cfg)
return cfg
def desc(self):
"""
Describes the aggregation method
"""
print('CrossBasedCostAggregation method')
def cost_volume_aggregation(self, img_ref: xr.Dataset, img_sec: xr.Dataset, cv: xr.Dataset) -> xr.Dataset:
"""
Aggregated the cost volume with Cross-Based Cost Aggregation, using the pipeline define in
<NAME>., <NAME>., & <NAME>. (2009).
Cross-based local stereo matching using orthogonal integral images.
IEEE transactions on circuits and systems for video technology, 19(7), 1073-1079.
:param img_ref: reference Dataset image
:type img_ref:
xarray.Dataset containing :
- im : 2D (row, col) xarray.DataArray
- msk (optional): 2D (row, col) xarray.DataArray
:param img_sec: secondary Dataset image
:type img_sec:
xarray.Dataset containing :
- im : 2D (row, col) xarray.DataArray
- msk (optional): 2D (row, col) xarray.DataArray
:param cv: cost volume dataset
:type cv:
xarray.Dataset, with the data variables:
- cost_volume 3D xarray.DataArray (row, col, disp)
- confidence_measure 3D xarray.DataArray (row, col, indicator)
:return: the cost volume aggregated in the dataset
:rtype:
xarray.Dataset, with the data variables:
- cost_volume 3D xarray.DataArray (row, col, disp)
- confidence_measure 3D xarray.DataArray (row, col, indicator)
"""
subpix = cv.attrs['subpixel']
# Contains the shifted secondary images
img_sec_shift = shift_sec_img(img_sec, subpix)
# Compute the reference cross support. Apply a 3×3 median filter to the input image
cross_ref = cross_support(median_filter(img_ref['im'].data, size=(3, 3), mode="nearest"),
self._cbca_distance, self._cbca_intensity)
# Compute the secondary cross support. Apply a 3×3 median filter to the input image
cross_sec = []
for i in range(0, len(img_sec_shift)):
cross_sec.append(cross_support(median_filter(img_sec_shift[i]['im'].data, size=(3, 3), mode="nearest"),
self._cbca_distance, self._cbca_intensity))
ny_, nx_, nb_disp = cv['cost_volume'].shape
# Allocate the numpy aggregated cost volume cv = (disp, col, row), for efficient memory management
agg = np.zeros((nb_disp, nx_, ny_), dtype=np.float32)
# Add invalid costs (i.e = np.nan ) to the output aggregated cost volume (because the step 1 of cbca do not
# propagate invalid pixels, we need to retrieve them at the end of aggregation )
# Much faster than :
# id_nan = np.isnan(cv['cost_volume'].data)
# compute the aggregation ..
# cv['cost_volume'].data[id_nan] = np.nan
agg += np.swapaxes(cv['cost_volume'].data, 0, 2)
agg *= 0
disparity_range = cv.coords['disp'].data
range_col = np.arange(0, nx_)
for d in range(nb_disp):
i_sec = int((disparity_range[d] % 1) * cv.attrs['subpixel'])
# Step 1 : horizontal integral image
step1 = cbca_step_1(cv['cost_volume'].data[:, :, d])
range_col_sec = range_col + disparity_range[d]
valid_index = np.where((range_col_sec >= 0) & (range_col_sec < cross_sec[i_sec].shape[1]))
# Step 2 : horizontal matching cost
step2, sum2 = cbca_step_2(step1, cross_ref, cross_sec[i_sec], range_col[valid_index], range_col_sec[valid_index].astype(int))
# Step 3 : vertical integral image
step3 = cbca_step_3(step2)
# Step 4 : aggregate cost volume
step4, sum4 = cbca_step_4(step3, sum2, cross_ref, cross_sec[i_sec], range_col[valid_index], range_col_sec[valid_index].astype(int))
# Added the pixel anchor pixel to the number of support pixels used during the aggregation
sum4 += 1
# Add the aggregate cost to the output
agg[d, :, :] += np.swapaxes(step4, 0, 1)
# Normalize the aggregated cost
agg[d, :, :] /= np.swapaxes(sum4, 0, 1)
cv['cost_volume'].data = np.swapaxes(agg, 0, 2)
cv.attrs['aggregation'] = 'cbca'
# Maximal cost of the cost volume after agregation
cmax = cv.attrs['cmax'] * ((self._cbca_distance * 2) - 1) ** 2
cv.attrs['cmax'] = cmax
return cv
@njit('f4[:, :](f4[:, :])', cache=True)
def cbca_step_1(cv: np.ndarray) -> np.ndarray:
"""
Giving the matching cost for one disparity, build a horizontal integral image storing the cumulative row sum,
S_h(x, y) = S_h(x-1, y) + cv(x, y)
:param cv: cost volume for the current disparity
:type cv: 2D np.array (row, col) dtype = np.float32
:return: the horizontal integral image, step 1
:rtype: 2D np.array (row, col + 1) dtype = np.float32
"""
ny_, nx_ = cv.shape
# Allocate the intermediate cost volume S_h
# added a column to manage the case in the step 2 : x - left_arm_length -1 = -1
step1 = np.zeros((ny_, nx_ + 1), dtype=np.float32)
for y in range(ny_):
for x in range(nx_):
if x - 1 == -1:
step1[y, x] = 0
# Do not propagate nan
if not np.isnan(cv[y, x]):
step1[y, x] = step1[y, x - 1] + cv[y, x]
else:
step1[y, x] = 0
return step1
@njit('(f4[:, :], i2[:, :, :], i2[:, :, :], i8[:], i8[:])', cache=True)
def cbca_step_2(step1: np.ndarray, cross_ref: np.ndarray, cross_sec: np.ndarray, range_col: np.ndarray,
range_col_sec: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""
Giving the horizontal integral image, computed the horizontal matching cost for one disparity,
E_h(x, y) = S_h(x + right_arm_length, y) - S_h(x - left_arm_length -1, y)
:param step1: horizontal integral image, from the cbca_step1, with an extra column that contains 0
:type step1: 2D np.array (row, col + 1) dtype = np.float32
:param cross_ref: cross support of the reference image
:type cross_ref: 3D np.array (row, col, [left, right, top, bot]) dtype=np.int16
:param cross_sec: cross support of the secondary image
:type cross_sec: 3D np.array (row, col, [left, right, tpo, bot]) dtype=np.int16
:param range_col: reference column for the current disparity (i.e : np.arrange(nb columns), where the correspondent
in the right image is reachable)
:type range_col: 1D np.array
:param range_col_sec: secondary column for the current disparity (i.e : np.arrange(nb columns) - disparity, where
column - disparity >= 0 and <= nb columns)
:type range_col_sec: 1D np.array
:return: the horizontal matching cost for the current disparity, and the number of support pixels used for the
step 2
:rtype: tuple (2D np.array (row, col) dtype = np.float32, 2D np.array (row, col) dtype = np.float32)
"""
ny_, nx_ = step1.shape
# Allocate the intermediate cost volume E_h
# , remove the extra column from the step 1
step2 = np.zeros((ny_, nx_ - 1), dtype=np.float32)
sum_step2 = np.zeros((ny_, nx_ - 1), dtype=np.float32)
for y in range(step1.shape[0]):
for x in range(range_col.shape[0]):
right = min(cross_ref[y, range_col[x], 1], cross_sec[y, range_col_sec[x], 1])
left = min(cross_ref[y, range_col[x], 0], cross_sec[y, range_col_sec[x], 0])
step2[y, range_col[x]] = step1[y, range_col[x] + right] - step1[y, range_col[x] - left - 1]
sum_step2[y, range_col[x]] += (right + left)
return step2, sum_step2
@njit('f4[:, :](f4[:, :])', cache=True)
def cbca_step_3(step2: xr.DataArray) -> np.ndarray:
"""
Giving the horizontal matching cost, build a vertical integral image for one disparity,
S_v = S_v(x, y - 1) + E_h(x, y)
:param step2: horizontal matching cost, from the cbca_step2
:type step2: 3D xarray.DataArray (row, col, disp)
:return: the vertical integral image for the current disparity
:rtype: 2D np.array (row + 1, col) dtype = np.float32
"""
ny_, nx_ = step2.shape
# Allocate the intermediate cost volume S_v
# added a row to manage the case in the step 4 : y - up_arm_length -1 = -1
step3 = np.zeros((ny_ + 1, nx_), dtype=np.float32)
step3[0, :] = step2[0, :]
for y in range(1, ny_):
for x in range(nx_):
step3[y, x] = step3[y - 1, x] + step2[y, x]
return step3
@njit('(f4[:, :], f4[:, :], i2[:, :, :], i2[:, :, :], i8[:], i8[:])', cache=True)
def cbca_step_4(step3: np.ndarray, sum2: np.ndarray, cross_ref: np.ndarray, cross_sec: np.ndarray,
range_col: np.ndarray, range_col_sec: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""
Giving the vertical integral image, build the fully aggregated matching cost for one disparity,
E = S_v(x, y + bottom_arm_length) - S_v(x, y - top_arm_length - 1)
:param step3: vertical integral image, from the cbca_step3, with an extra row that contains 0
:type step3: 2D np.array (row + 1, col) dtype = np.float32
:param sum2: the number of support pixels used for the step 2
:type sum2: 2D np.array (row, col) dtype = np.float32
:param cross_ref: cross support of the reference image
:type cross_ref: 3D np.array (row, col, [left, right, top, bot]) dtype=np.int16
:param cross_sec: cross support of the secondary image
:type cross_sec: 3D np.array (row, col, [left, right, tpo, bot]) dtype=np.int16
:param range_col: reference column for the current disparity (i.e : np.arrange(nb columns), where the correspondent
in the right image is reachable)
:type range_col: 1D np.array
:param range_col_sec: secondary column for the current disparity (i.e : np.arrange(nb columns) - disparity, where
column - disparity >= 0 and <= nb columns)
:type range_col_sec: 1D np.array
:return: the fully aggregated matching cost, and the total number of support pixels used for the aggregation
:rtype: tuple(2D np.array (row , col) dtype = np.float32, 2D np.array (row , col) dtype = np.float32)
"""
ny_, nx_ = step3.shape
# Allocate the final cost volume E
# , remove the extra row from the step 3
step4 = np.zeros((ny_ - 1, nx_), dtype=np.float32)
sum4 = np.copy(sum2)
for y in range(step4.shape[0]):
for x in range(range_col.shape[0]):
top = min(cross_ref[y, range_col[x], 2], cross_sec[y, range_col_sec[x], 2])
bot = min(cross_ref[y, range_col[x], 3], cross_sec[y, range_col_sec[x], 3])
step4[y, range_col[x]] = step3[y + bot, range_col[x]] - step3[y - top - 1, range_col[x]]
sum4[y, range_col[x]] += (top + bot)
if top != 0:
sum4[y, range_col[x]] += np.sum(sum2[y-top:y, range_col[x]])
if bot != 0:
sum4[y, range_col[x]] += np.sum(sum2[y+1:y+bot+1, range_col[x]])
return step4, sum4
@njit('i2[:, :, :](f4[:, :], i2, f4)', cache=True)
def cross_support(image: np.ndarray, len_arms: int, intensity: float) -> np.ndarray:
"""
Compute the cross support for an image: find the 4 arms
:param image: image
:type image: 2D np.array (row , col) dtype = np.float32
:param len_arms: maximal length arms
:param len_arms: int16
:param intensity: maximal intensity
:param intensity: float 32
:return: a 3D np.array ( row, col, [left, right, top, bot] ),
with the four arms lengths computes for each pixel
:rtype: 3D np.array ( row, col, [left, right, top, bot] ), dtype=np.int16
"""
ny_, nx_ = image.shape
cross = np.zeros((ny_, nx_, 4), dtype=np.int16)
for y in range(ny_):
for x in range(nx_):
left_len = 0
for left in range(x-1, max(x-len_arms, -1), -1):
if abs(image[y, x] - image[y, left]) >= intensity:
break
left_len += 1
# enforces a minimum support region of 3×3
cross[y, x, 0] = max(left_len, 1 * (x >= 1))
right_len = 0
for right in range(x+1, min(x+len_arms, nx_)):
if abs(image[y, x] - image[y, right]) >= intensity:
break
right_len += 1
# enforces a minimum support region of 3×3
cross[y, x, 1] = max(right_len, 1 * (x < nx_ - 1))
up_len = 0
for up in range(y-1, max(y-len_arms, -1), -1):
if abs(image[y, x] - image[up, x]) >= intensity:
break
up_len += 1
# enforces a minimum support region of 3×3
cross[y, x, 2] = max(up_len, 1 * (y >= 1))
bot_len = 0
for bot in range(y+1, min(y+len_arms, ny_)):
if abs(image[y, x] - image[bot, x]) >= intensity:
break
bot_len += 1
# enforces a minimum support region of 3×3
cross[y, x, 3] = max(bot_len, 1 * (y < ny_ - 1))
return cross | 0.904427 | 0.485051 |
from common.exceptions import DSBaseException
class NodeDoesNotExist(DSBaseException):
def __init__(self, key: str):
"""
Node does not exist error.
:param key: key name of the node
:type key: str
"""
message = "Node with key [%s] does not exist in the graph." % key
super(NodeDoesNotExist, self).__init__(message)
class AdjacentNodeDoesNotExist(DSBaseException):
def __init__(self, node: str, adjacent_node: str):
"""
Node does not exist error.
:param node: key name of the node
:type node: str
:param adjacent_node: key name of the adjacent node
:type adjacent_node: str
"""
message = "Node with key [%s] is not an adjacent node to [%s]" % (node, adjacent_node)
super(AdjacentNodeDoesNotExist, self).__init__(message)
class DuplicateNodeNotAllowed(DSBaseException):
def __init__(self, key: str):
"""
Duplicate node is not allowed in the graph.
:param key: key name of the node
:type key: str
"""
message = "Node with key [%s] already exist in the graph." % key
super(DuplicateNodeNotAllowed, self).__init__(message)
class UnsupportedGraphType(DSBaseException):
def __init__(self):
"""
Graph type is not supported.
"""
message = "Graph type is not supported."
super(UnsupportedGraphType, self).__init__(message)
class DirectGraphDoesNotSupportAPI(DSBaseException):
def __init__(self, reason: str):
"""
Undirected graph does not support this API.
:param reason: the reason it is not supported.
:type reason: str
"""
message = "Directed graph does not support this API - %s ." % reason
super(DirectGraphDoesNotSupportAPI, self).__init__(message)
class UnDirectGraphDoesNotSupportAPI(DSBaseException):
def __init__(self, reason: str):
"""
Undirected graph does not support this API.
:param reason: the reason it is not supported.
:type reason: str
"""
message = "Undirected graph does not support this API - %s ." % reason
super(UnDirectGraphDoesNotSupportAPI, self).__init__(message) | pyds/graph/exceptions.py | from common.exceptions import DSBaseException
class NodeDoesNotExist(DSBaseException):
def __init__(self, key: str):
"""
Node does not exist error.
:param key: key name of the node
:type key: str
"""
message = "Node with key [%s] does not exist in the graph." % key
super(NodeDoesNotExist, self).__init__(message)
class AdjacentNodeDoesNotExist(DSBaseException):
def __init__(self, node: str, adjacent_node: str):
"""
Node does not exist error.
:param node: key name of the node
:type node: str
:param adjacent_node: key name of the adjacent node
:type adjacent_node: str
"""
message = "Node with key [%s] is not an adjacent node to [%s]" % (node, adjacent_node)
super(AdjacentNodeDoesNotExist, self).__init__(message)
class DuplicateNodeNotAllowed(DSBaseException):
def __init__(self, key: str):
"""
Duplicate node is not allowed in the graph.
:param key: key name of the node
:type key: str
"""
message = "Node with key [%s] already exist in the graph." % key
super(DuplicateNodeNotAllowed, self).__init__(message)
class UnsupportedGraphType(DSBaseException):
def __init__(self):
"""
Graph type is not supported.
"""
message = "Graph type is not supported."
super(UnsupportedGraphType, self).__init__(message)
class DirectGraphDoesNotSupportAPI(DSBaseException):
def __init__(self, reason: str):
"""
Undirected graph does not support this API.
:param reason: the reason it is not supported.
:type reason: str
"""
message = "Directed graph does not support this API - %s ." % reason
super(DirectGraphDoesNotSupportAPI, self).__init__(message)
class UnDirectGraphDoesNotSupportAPI(DSBaseException):
def __init__(self, reason: str):
"""
Undirected graph does not support this API.
:param reason: the reason it is not supported.
:type reason: str
"""
message = "Undirected graph does not support this API - %s ." % reason
super(UnDirectGraphDoesNotSupportAPI, self).__init__(message) | 0.902622 | 0.238406 |
from typing import List
from z3 import z3
from smsymer import utils
from .immutableStorageTracker import ImmutableStorageTracker
from smsymer.analyzer.tool import RefTracker
from smsymer.evm import Instruction, Stack
class TimestampDepTracker(RefTracker):
def __init__(self, addr: int, height: int):
super().__init__(addr, height)
self.dependency_addr = -1
@property
def is_buggy(self):
return self.used is True
@property
def root_cause_addr(self):
return self.addr
def op(self, instruction: Instruction, stack: Stack, immutable_storage_references: List[ImmutableStorageTracker]):
# cases that the timestamp is used in conditional jump
if instruction.opcode == "JUMPI":
# 检查是否是时间戳与一个固定值进行比较
if utils.is_symbol(stack[-2]):
for z3_ref in utils.extract_z3_ref(stack[-2]):
if utils.is_z3_constant(z3_ref):
# 是一个z3表达式中的常量
continue
elif z3.eq(z3_ref, z3.Int("IHs")):
# 是时间戳
continue
else:
for ref in immutable_storage_references:
if ref.contains(len(stack) - 2) and utils.is_symbol(ref.storage_value) and utils.in_list(
utils.extract_z3_ref(ref.storage_value), z3_ref):
break
else:
# 不是一个不可变Storage变量
break
# 是某一个不可变Storage变量
continue
else:
# 参与比较的所有变量都是常量(除了时间戳本身)
self.pop(instruction.input_amount, len(stack))
return
not_used_before = not self.used
self.use(instruction, len(stack))
if not_used_before and self.used:
self.dependency_addr = instruction.addr
else:
self.pop(instruction.input_amount, len(stack)) | smsymer/analyzer/timestampDepTracker.py | from typing import List
from z3 import z3
from smsymer import utils
from .immutableStorageTracker import ImmutableStorageTracker
from smsymer.analyzer.tool import RefTracker
from smsymer.evm import Instruction, Stack
class TimestampDepTracker(RefTracker):
def __init__(self, addr: int, height: int):
super().__init__(addr, height)
self.dependency_addr = -1
@property
def is_buggy(self):
return self.used is True
@property
def root_cause_addr(self):
return self.addr
def op(self, instruction: Instruction, stack: Stack, immutable_storage_references: List[ImmutableStorageTracker]):
# cases that the timestamp is used in conditional jump
if instruction.opcode == "JUMPI":
# 检查是否是时间戳与一个固定值进行比较
if utils.is_symbol(stack[-2]):
for z3_ref in utils.extract_z3_ref(stack[-2]):
if utils.is_z3_constant(z3_ref):
# 是一个z3表达式中的常量
continue
elif z3.eq(z3_ref, z3.Int("IHs")):
# 是时间戳
continue
else:
for ref in immutable_storage_references:
if ref.contains(len(stack) - 2) and utils.is_symbol(ref.storage_value) and utils.in_list(
utils.extract_z3_ref(ref.storage_value), z3_ref):
break
else:
# 不是一个不可变Storage变量
break
# 是某一个不可变Storage变量
continue
else:
# 参与比较的所有变量都是常量(除了时间戳本身)
self.pop(instruction.input_amount, len(stack))
return
not_used_before = not self.used
self.use(instruction, len(stack))
if not_used_before and self.used:
self.dependency_addr = instruction.addr
else:
self.pop(instruction.input_amount, len(stack)) | 0.67694 | 0.363054 |
import torch
import torch.nn as nn
from .core import trainable_params_
from .torch_imports import *
IS_TORCH_04 = LooseVersion(torch.__version__) >= LooseVersion('0.4')
class FP16(nn.Module):
def __init__(self, module):
super().__init__()
self.module = batchnorm_to_fp32(module.half())
def forward(self, input):
if is_float(input): input = input.half()
return self.module(input)
def load_state_dict(self, *inputs, **kwargs):
self.module.load_state_dict(*inputs, **kwargs)
def state_dict(self, *inputs, **kwargs):
return self.module.state_dict(*inputs, **kwargs)
def __getitem__(self, idx):
return self.module[idx]
def is_float(tensor):
if IS_TORCH_04: return tensor.is_floating_point()
if isinstance(tensor, Variable): tensor = tensor.data
return isinstance(tensor, torch.cuda.FloatTensor)
def batchnorm_to_fp32(module):
'''
BatchNorm layers to have parameters in single precision.
Find all layers and convert them back to float. This can't
be done with built in .apply as that function will apply
fn to all modules, parameters, and buffers. Thus we wouldn't
be able to guard the float conversion based on the module type.
'''
if isinstance(module, nn.modules.batchnorm._BatchNorm):
module.float()
for child in module.children():
batchnorm_to_fp32(child)
return module
def copy_model_to_fp32(m, optim):
""" Creates a fp32 copy of model parameters and sets optimizer parameters
"""
fp32_params = [m_param.clone().type(torch.cuda.FloatTensor).detach() for m_param in trainable_params_(m)]
optim_groups = [group['params'] for group in optim.param_groups]
iter_fp32_params = iter(fp32_params)
for group_params in optim_groups:
for i in range(len(group_params)):
if not group_params[i].requires_grad: continue # only update trainable_params_
fp32_param = next(iter_fp32_params)
assert(fp32_param.shape == group_params[i].shape)
fp32_param.requires_grad = group_params[i].requires_grad
group_params[i] = fp32_param
return fp32_params
def copy_fp32_to_model(m, fp32_params):
m_params = trainable_params_(m)
assert(len(m_params) == len(fp32_params))
for fp32_param, m_param in zip(fp32_params, m_params):
m_param.data.copy_(fp32_param.data)
def update_fp32_grads(fp32_params, m):
m_params = trainable_params_(m)
assert(len(m_params) == len(fp32_params))
for fp32_param, m_param in zip(fp32_params, m_params):
if fp32_param.grad is None:
fp32_param.grad = nn.Parameter(fp32_param.data.new().resize_(*fp32_param.data.size()))
fp32_param.grad.data.copy_(m_param.grad.data) | fastai/fp16.py | import torch
import torch.nn as nn
from .core import trainable_params_
from .torch_imports import *
IS_TORCH_04 = LooseVersion(torch.__version__) >= LooseVersion('0.4')
class FP16(nn.Module):
def __init__(self, module):
super().__init__()
self.module = batchnorm_to_fp32(module.half())
def forward(self, input):
if is_float(input): input = input.half()
return self.module(input)
def load_state_dict(self, *inputs, **kwargs):
self.module.load_state_dict(*inputs, **kwargs)
def state_dict(self, *inputs, **kwargs):
return self.module.state_dict(*inputs, **kwargs)
def __getitem__(self, idx):
return self.module[idx]
def is_float(tensor):
if IS_TORCH_04: return tensor.is_floating_point()
if isinstance(tensor, Variable): tensor = tensor.data
return isinstance(tensor, torch.cuda.FloatTensor)
def batchnorm_to_fp32(module):
'''
BatchNorm layers to have parameters in single precision.
Find all layers and convert them back to float. This can't
be done with built in .apply as that function will apply
fn to all modules, parameters, and buffers. Thus we wouldn't
be able to guard the float conversion based on the module type.
'''
if isinstance(module, nn.modules.batchnorm._BatchNorm):
module.float()
for child in module.children():
batchnorm_to_fp32(child)
return module
def copy_model_to_fp32(m, optim):
""" Creates a fp32 copy of model parameters and sets optimizer parameters
"""
fp32_params = [m_param.clone().type(torch.cuda.FloatTensor).detach() for m_param in trainable_params_(m)]
optim_groups = [group['params'] for group in optim.param_groups]
iter_fp32_params = iter(fp32_params)
for group_params in optim_groups:
for i in range(len(group_params)):
if not group_params[i].requires_grad: continue # only update trainable_params_
fp32_param = next(iter_fp32_params)
assert(fp32_param.shape == group_params[i].shape)
fp32_param.requires_grad = group_params[i].requires_grad
group_params[i] = fp32_param
return fp32_params
def copy_fp32_to_model(m, fp32_params):
m_params = trainable_params_(m)
assert(len(m_params) == len(fp32_params))
for fp32_param, m_param in zip(fp32_params, m_params):
m_param.data.copy_(fp32_param.data)
def update_fp32_grads(fp32_params, m):
m_params = trainable_params_(m)
assert(len(m_params) == len(fp32_params))
for fp32_param, m_param in zip(fp32_params, m_params):
if fp32_param.grad is None:
fp32_param.grad = nn.Parameter(fp32_param.data.new().resize_(*fp32_param.data.size()))
fp32_param.grad.data.copy_(m_param.grad.data) | 0.893301 | 0.286112 |
import unicodedata
import re
import jieba
import tensorflow as tf
from tensorflow import keras
from sklearn.model_selection import train_test_split
def unicode_to_ascii(s):
return ''.join(c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn')
def preprocess_sentence(s):
# 转化成ascii,变小写去空格
s = unicode_to_ascii(s.lower().strip())
# 标点符号前后加空格
s = re.sub(r'([?.!,。,!?‘’“”()()])', r' \1 ', s)
# 多余的空格变成一个空格
s = re.sub(r'[" "]+', ' ', s)
# 除了标点符号和字母外都是空格
# s = re.sub(r'[^a-zA-Z?.!,¿]', ' ', s)
# 去掉前后空格
s = s.rstrip().strip()
# 前后加标记
s = '<start> ' + s + ' <end>'
return s
# 解析文件
def parse_data(filename):
lines = open(filename, encoding='utf-8').read().strip().split('\n')
sentence_pairs = [line.split('\t') for line in lines]
preprocess_sentence_pairs = [
(preprocess_sentence(en), preprocess_sentence(' '.join(jieba.cut_for_search(cmn)))) for en, cmn in
sentence_pairs]
# 解包和zip联用:将每一个元组解开,重新组合成两个新的列表
return zip(*preprocess_sentence_pairs)
def parse_enbigdata(filename):
lines = open(filename, encoding='utf-8').read().strip().split('\n')
en_sentence = [preprocess_sentence(line) for line in lines]
return en_sentence
def parse_cmnbigdata(filename):
lines = open(filename, encoding='utf-8').read().strip().split('\n')
cmn_sentence = [preprocess_sentence(' '.join(jieba.cut_for_search(line))) for line in lines]
return cmn_sentence
def tokenizer(lang):
lang_tokenizer = keras.preprocessing.text.Tokenizer(num_words=None, filters='', split=' ')
# 统计词频,生成词表
lang_tokenizer.fit_on_texts(lang)
# id化
tensor = lang_tokenizer.texts_to_sequences(lang)
# padding
tensor = keras.preprocessing.sequence.pad_sequences(tensor, padding='post')
return tensor, lang_tokenizer
# 求最大长度
def max_length(tensor):
return max(len(t) for t in tensor)
# 验证tokenizer是否转化正确
def convert(example, tokenizer):
for t in example:
if t != 0:
print('%d --> %s' % (t, tokenizer.index_word[t]))
def make_dataset(input_tensor, output_tensor, batch_size, epochs, shuffle, buffer_size):
dataset = tf.data.Dataset.from_tensor_slices((input_tensor, output_tensor))
if shuffle:
dataset = dataset.shuffle(buffer_size)
dataset = dataset.repeat(epochs).batch(batch_size, drop_remainder=True)
return dataset
# 解析文件
en_cmn_file_path = 'data/cmn_proc.txt'
en_dataset, cmn_dataset = parse_data(en_cmn_file_path)
en_file_path = 'data/news-commentary-v12.zh-en.en'
cmn_file_path = 'data/news-commentary-v12.zh-en.zh'
en_dataset_b = en_dataset + tuple(parse_enbigdata(en_file_path))
cmn_dataset_b = cmn_dataset + tuple(parse_cmnbigdata(cmn_file_path))
# tokenizer
input_tensor, input_tokenizer = tokenizer(cmn_dataset)
output_tensor, output_tokenizer = tokenizer(en_dataset)
# 求最大长度
max_length_input = max_length(input_tensor)
max_length_output = max_length(output_tensor)
print(max_length_input, max_length_output)
# 切分训练集和验证集
input_train, input_eval, output_train, output_eval = train_test_split(input_tensor, output_tensor, test_size=0.2)
# data参数
buffer_size = 10000 #20000
batch_size = 32 # 64
epochs = 100
# 构建dataset
train_dataset = make_dataset(input_train, output_train, batch_size, epochs, True, buffer_size)
eval_dataset = make_dataset(input_eval, output_eval, batch_size, 1, False, buffer_size)
for x, y in train_dataset.take(1):
print(x.shape, y.shape) | load_data.py | import unicodedata
import re
import jieba
import tensorflow as tf
from tensorflow import keras
from sklearn.model_selection import train_test_split
def unicode_to_ascii(s):
return ''.join(c for c in unicodedata.normalize('NFD', s) if unicodedata.category(c) != 'Mn')
def preprocess_sentence(s):
# 转化成ascii,变小写去空格
s = unicode_to_ascii(s.lower().strip())
# 标点符号前后加空格
s = re.sub(r'([?.!,。,!?‘’“”()()])', r' \1 ', s)
# 多余的空格变成一个空格
s = re.sub(r'[" "]+', ' ', s)
# 除了标点符号和字母外都是空格
# s = re.sub(r'[^a-zA-Z?.!,¿]', ' ', s)
# 去掉前后空格
s = s.rstrip().strip()
# 前后加标记
s = '<start> ' + s + ' <end>'
return s
# 解析文件
def parse_data(filename):
lines = open(filename, encoding='utf-8').read().strip().split('\n')
sentence_pairs = [line.split('\t') for line in lines]
preprocess_sentence_pairs = [
(preprocess_sentence(en), preprocess_sentence(' '.join(jieba.cut_for_search(cmn)))) for en, cmn in
sentence_pairs]
# 解包和zip联用:将每一个元组解开,重新组合成两个新的列表
return zip(*preprocess_sentence_pairs)
def parse_enbigdata(filename):
lines = open(filename, encoding='utf-8').read().strip().split('\n')
en_sentence = [preprocess_sentence(line) for line in lines]
return en_sentence
def parse_cmnbigdata(filename):
lines = open(filename, encoding='utf-8').read().strip().split('\n')
cmn_sentence = [preprocess_sentence(' '.join(jieba.cut_for_search(line))) for line in lines]
return cmn_sentence
def tokenizer(lang):
lang_tokenizer = keras.preprocessing.text.Tokenizer(num_words=None, filters='', split=' ')
# 统计词频,生成词表
lang_tokenizer.fit_on_texts(lang)
# id化
tensor = lang_tokenizer.texts_to_sequences(lang)
# padding
tensor = keras.preprocessing.sequence.pad_sequences(tensor, padding='post')
return tensor, lang_tokenizer
# 求最大长度
def max_length(tensor):
return max(len(t) for t in tensor)
# 验证tokenizer是否转化正确
def convert(example, tokenizer):
for t in example:
if t != 0:
print('%d --> %s' % (t, tokenizer.index_word[t]))
def make_dataset(input_tensor, output_tensor, batch_size, epochs, shuffle, buffer_size):
dataset = tf.data.Dataset.from_tensor_slices((input_tensor, output_tensor))
if shuffle:
dataset = dataset.shuffle(buffer_size)
dataset = dataset.repeat(epochs).batch(batch_size, drop_remainder=True)
return dataset
# 解析文件
en_cmn_file_path = 'data/cmn_proc.txt'
en_dataset, cmn_dataset = parse_data(en_cmn_file_path)
en_file_path = 'data/news-commentary-v12.zh-en.en'
cmn_file_path = 'data/news-commentary-v12.zh-en.zh'
en_dataset_b = en_dataset + tuple(parse_enbigdata(en_file_path))
cmn_dataset_b = cmn_dataset + tuple(parse_cmnbigdata(cmn_file_path))
# tokenizer
input_tensor, input_tokenizer = tokenizer(cmn_dataset)
output_tensor, output_tokenizer = tokenizer(en_dataset)
# 求最大长度
max_length_input = max_length(input_tensor)
max_length_output = max_length(output_tensor)
print(max_length_input, max_length_output)
# 切分训练集和验证集
input_train, input_eval, output_train, output_eval = train_test_split(input_tensor, output_tensor, test_size=0.2)
# data参数
buffer_size = 10000 #20000
batch_size = 32 # 64
epochs = 100
# 构建dataset
train_dataset = make_dataset(input_train, output_train, batch_size, epochs, True, buffer_size)
eval_dataset = make_dataset(input_eval, output_eval, batch_size, 1, False, buffer_size)
for x, y in train_dataset.take(1):
print(x.shape, y.shape) | 0.339171 | 0.259058 |
from pynput.keyboard import Controller as KeyboardController
from pynput.mouse import Controller as MouseController
import logging
# Log all events (both creation and execution) into the terminal
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(message)s')
keyboard = KeyboardController()
mouse = MouseController()
class Event:
class KeyboardEvent:
class Press:
def __init__(self, key):
self.key = key
logging.info(f"Pressing {self.key}")
def execute(self):
logging.info(f"Pressed {self.key}")
keyboard.press(self.key)
class Release:
def __init__(self, key):
self.key = key
logging.info(f"Releasing {self.key}")
def execute(self):
logging.info(f"Released {self.key}")
keyboard.release(self.key)
class Tap:
def __init__(self, key):
self.key = key
logging.info(f"Tapping {self.key}")
def execute(self):
logging.info(f"Tapped {self.key}")
keyboard.tap(self.key)
class MouseEvent:
class Move:
def __init__(self, x, y):
self.x = x
self.y = y
logging.info(f"Moving to ({self.x}, {self.y})")
def execute(self):
logging.info(f"Moved to ({self.x}, {self.y})")
mouse.position = (self.x, self.y)
class Click:
def __init__(self, x, y, button, pressed):
self.x = x
self.y = y
self.button = button
self.pressed = pressed
logging.info(f"Moving to ({self.x}, {self.y})")
did_press = "Pressing" if self.pressed else "Releasing"
logging.info(f"{did_press} {self.button}")
def execute(self):
mouse.position = (self.x, self.y)
if self.pressed:
mouse.press(self.button)
else:
mouse.release(self.button)
logging.info(f"Moved to ({self.x}, {self.y})")
did_press = "Pressed" if self.pressed else "Released"
logging.info(f"{did_press} {self.button}")
class Scroll:
def __init__(self, x, y, dx, dy):
self.x = x
self.y = y
self.dx = dx
self.dy = dy
logging.info(f"Moving to ({self.x}, {self.y})")
direction = ""
if dy > 0:
direction += "Up"
elif dy < 0:
direction += "Down"
if dx > 0:
direction += "Right"
elif dx < 0:
direction += "Left"
logging.info(f"Scrolling {direction}: ({self.x}, {self.y})")
def execute(self):
mouse.position = (self.x, self.y)
mouse.scroll(self.dx, self.dy)
logging.info(f"Moved to ({self.x}, {self.y})")
direction = ""
if self.dy > 0:
direction += "Up"
elif self.dy < 0:
direction += "Down"
if self.dx > 0:
direction += "Right"
elif self.dx < 0:
direction += "Left"
logging.info(
f"Scrolled {self.direction}: ({self.x}, {self.y})") | Events.py | from pynput.keyboard import Controller as KeyboardController
from pynput.mouse import Controller as MouseController
import logging
# Log all events (both creation and execution) into the terminal
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(message)s')
keyboard = KeyboardController()
mouse = MouseController()
class Event:
class KeyboardEvent:
class Press:
def __init__(self, key):
self.key = key
logging.info(f"Pressing {self.key}")
def execute(self):
logging.info(f"Pressed {self.key}")
keyboard.press(self.key)
class Release:
def __init__(self, key):
self.key = key
logging.info(f"Releasing {self.key}")
def execute(self):
logging.info(f"Released {self.key}")
keyboard.release(self.key)
class Tap:
def __init__(self, key):
self.key = key
logging.info(f"Tapping {self.key}")
def execute(self):
logging.info(f"Tapped {self.key}")
keyboard.tap(self.key)
class MouseEvent:
class Move:
def __init__(self, x, y):
self.x = x
self.y = y
logging.info(f"Moving to ({self.x}, {self.y})")
def execute(self):
logging.info(f"Moved to ({self.x}, {self.y})")
mouse.position = (self.x, self.y)
class Click:
def __init__(self, x, y, button, pressed):
self.x = x
self.y = y
self.button = button
self.pressed = pressed
logging.info(f"Moving to ({self.x}, {self.y})")
did_press = "Pressing" if self.pressed else "Releasing"
logging.info(f"{did_press} {self.button}")
def execute(self):
mouse.position = (self.x, self.y)
if self.pressed:
mouse.press(self.button)
else:
mouse.release(self.button)
logging.info(f"Moved to ({self.x}, {self.y})")
did_press = "Pressed" if self.pressed else "Released"
logging.info(f"{did_press} {self.button}")
class Scroll:
def __init__(self, x, y, dx, dy):
self.x = x
self.y = y
self.dx = dx
self.dy = dy
logging.info(f"Moving to ({self.x}, {self.y})")
direction = ""
if dy > 0:
direction += "Up"
elif dy < 0:
direction += "Down"
if dx > 0:
direction += "Right"
elif dx < 0:
direction += "Left"
logging.info(f"Scrolling {direction}: ({self.x}, {self.y})")
def execute(self):
mouse.position = (self.x, self.y)
mouse.scroll(self.dx, self.dy)
logging.info(f"Moved to ({self.x}, {self.y})")
direction = ""
if self.dy > 0:
direction += "Up"
elif self.dy < 0:
direction += "Down"
if self.dx > 0:
direction += "Right"
elif self.dx < 0:
direction += "Left"
logging.info(
f"Scrolled {self.direction}: ({self.x}, {self.y})") | 0.481941 | 0.189034 |
import numpy as np
import cv2
from mindarmour.natural_robustness.transform.image.natural_perturb import _NaturalPerturb
from mindarmour.utils._check_param import check_param_multi_types, check_int_positive, check_param_type
from mindarmour.utils.logger import LogUtil
LOGGER = LogUtil.get_instance()
TAG = 'Image Blur'
class GaussianBlur(_NaturalPerturb):
"""
Blurs the image using Gaussian blur filter.
Args:
ksize (int): Size of gaussian kernel, this value must be non-negnative.
auto_param (bool): Auto selected parameters. Selected parameters will preserve semantics of image.
Example:
>>> img = cv2.imread('1.png')
>>> img = np.array(img)
>>> ksize = 5
>>> trans = GaussianBlur(ksize)
>>> dst = trans(img)
"""
def __init__(self, ksize=2, auto_param=False):
super(GaussianBlur, self).__init__()
ksize = check_int_positive('ksize', ksize)
if auto_param:
ksize = 2 * np.random.randint(0, 5) + 1
else:
ksize = 2 * ksize + 1
self.ksize = (ksize, ksize)
def __call__(self, image):
"""
Transform the image.
Args:
image (numpy.ndarray): Original image to be transformed.
Returns:
numpy.ndarray, transformed image.
"""
ori_dtype = image.dtype
_, chw, normalized, gray3dim, image = self._check(image)
new_img = cv2.GaussianBlur(image, self.ksize, 0)
new_img = self._original_format(new_img, chw, normalized, gray3dim)
return new_img.astype(ori_dtype)
class MotionBlur(_NaturalPerturb):
"""
Motion blur for a given image.
Args:
degree (int): Degree of blur. This value must be positive. Suggested value range in [1, 15].
angle: (union[float, int]): Direction of motion blur. Angle=0 means up and down motion blur. Angle is
counterclockwise.
auto_param (bool): Auto selected parameters. Selected parameters will preserve semantics of image.
Example:
>>> img = cv2.imread('1.png')
>>> img = np.array(img)
>>> angle = 0
>>> degree = 5
>>> trans = MotionBlur(degree=degree, angle=angle)
>>> new_img = trans(img)
"""
def __init__(self, degree=5, angle=45, auto_param=False):
super(MotionBlur, self).__init__()
self.degree = check_int_positive('degree', degree)
self.degree = check_param_multi_types('degree', degree, [float, int])
auto_param = check_param_type('auto_param', auto_param, bool)
if auto_param:
self.degree = np.random.randint(1, 5)
self.angle = np.random.uniform(0, 360)
else:
self.angle = angle - 45
def __call__(self, image):
"""
Motion blur for a given image.
Args:
image (numpy.ndarray): Original image.
Returns:
numpy.ndarray, image after motion blur.
"""
ori_dtype = image.dtype
_, chw, normalized, gray3dim, image = self._check(image)
matrix = cv2.getRotationMatrix2D((self.degree / 2, self.degree / 2), self.angle, 1)
motion_blur_kernel = np.diag(np.ones(self.degree))
motion_blur_kernel = cv2.warpAffine(motion_blur_kernel, matrix, (self.degree, self.degree))
motion_blur_kernel = motion_blur_kernel / self.degree
blurred = cv2.filter2D(image, -1, motion_blur_kernel)
# convert to uint8
cv2.normalize(blurred, blurred, 0, 255, cv2.NORM_MINMAX)
blurred = self._original_format(blurred, chw, normalized, gray3dim)
return blurred.astype(ori_dtype)
class GradientBlur(_NaturalPerturb):
"""
Gradient blur.
Args:
point (union[tuple, list]): 2D coordinate of the Blur center point.
kernel_num (int): Number of blur kernels. Suggested value range in [1, 8].
center (bool): Blurred or clear at the center of a specified point.
auto_param (bool): Auto selected parameters. Selected parameters will preserve semantics of image.
Example:
>>> img = cv2.imread('xx.png')
>>> img = np.array(img)
>>> number = 5
>>> h, w = img.shape[:2]
>>> point = (int(h / 5), int(w / 5))
>>> center = True
>>> trans = GradientBlur(point, number, center)
>>> new_img = trans(img)
"""
def __init__(self, point, kernel_num=3, center=True, auto_param=False):
super(GradientBlur).__init__()
point = check_param_multi_types('point', point, [list, tuple])
self.auto_param = check_param_type('auto_param', auto_param, bool)
self.point = tuple(point)
self.kernel_num = check_int_positive('kernel_num', kernel_num)
self.center = check_param_type('center', center, bool)
def _auto_param(self, h, w):
self.point = (int(np.random.uniform(0, h)), int(np.random.uniform(0, w)))
self.kernel_num = np.random.randint(1, 6)
self.center = np.random.choice([True, False])
def __call__(self, image):
"""
Args:
image (numpy.ndarray): Original image.
Returns:
numpy.ndarray, gradient blurred image.
"""
ori_dtype = image.dtype
_, chw, normalized, gray3dim, image = self._check(image)
w, h = image.shape[:2]
if self.auto_param:
self._auto_param(h, w)
mask = np.zeros(image.shape, dtype=np.uint8)
masks = []
radius = max(w - self.point[0], self.point[0], h - self.point[1], self.point[1])
radius = int(radius / self.kernel_num)
for i in range(self.kernel_num):
circle = cv2.circle(mask.copy(), self.point, radius * (1 + i), (1, 1, 1), -1)
masks.append(circle)
blurs = []
for i in range(3, 3 + 2 * self.kernel_num, 2):
ksize = (i, i)
blur = cv2.GaussianBlur(image, ksize, 0)
blurs.append(blur)
dst = image.copy()
if self.center:
for i in range(self.kernel_num):
dst = masks[i] * dst + (1 - masks[i]) * blurs[i]
else:
for i in range(self.kernel_num - 1, -1, -1):
dst = masks[i] * blurs[self.kernel_num - 1 - i] + (1 - masks[i]) * dst
dst = self._original_format(dst, chw, normalized, gray3dim)
return dst.astype(ori_dtype) | mindarmour/natural_robustness/transform/image/blur.py | import numpy as np
import cv2
from mindarmour.natural_robustness.transform.image.natural_perturb import _NaturalPerturb
from mindarmour.utils._check_param import check_param_multi_types, check_int_positive, check_param_type
from mindarmour.utils.logger import LogUtil
LOGGER = LogUtil.get_instance()
TAG = 'Image Blur'
class GaussianBlur(_NaturalPerturb):
"""
Blurs the image using Gaussian blur filter.
Args:
ksize (int): Size of gaussian kernel, this value must be non-negnative.
auto_param (bool): Auto selected parameters. Selected parameters will preserve semantics of image.
Example:
>>> img = cv2.imread('1.png')
>>> img = np.array(img)
>>> ksize = 5
>>> trans = GaussianBlur(ksize)
>>> dst = trans(img)
"""
def __init__(self, ksize=2, auto_param=False):
super(GaussianBlur, self).__init__()
ksize = check_int_positive('ksize', ksize)
if auto_param:
ksize = 2 * np.random.randint(0, 5) + 1
else:
ksize = 2 * ksize + 1
self.ksize = (ksize, ksize)
def __call__(self, image):
"""
Transform the image.
Args:
image (numpy.ndarray): Original image to be transformed.
Returns:
numpy.ndarray, transformed image.
"""
ori_dtype = image.dtype
_, chw, normalized, gray3dim, image = self._check(image)
new_img = cv2.GaussianBlur(image, self.ksize, 0)
new_img = self._original_format(new_img, chw, normalized, gray3dim)
return new_img.astype(ori_dtype)
class MotionBlur(_NaturalPerturb):
"""
Motion blur for a given image.
Args:
degree (int): Degree of blur. This value must be positive. Suggested value range in [1, 15].
angle: (union[float, int]): Direction of motion blur. Angle=0 means up and down motion blur. Angle is
counterclockwise.
auto_param (bool): Auto selected parameters. Selected parameters will preserve semantics of image.
Example:
>>> img = cv2.imread('1.png')
>>> img = np.array(img)
>>> angle = 0
>>> degree = 5
>>> trans = MotionBlur(degree=degree, angle=angle)
>>> new_img = trans(img)
"""
def __init__(self, degree=5, angle=45, auto_param=False):
super(MotionBlur, self).__init__()
self.degree = check_int_positive('degree', degree)
self.degree = check_param_multi_types('degree', degree, [float, int])
auto_param = check_param_type('auto_param', auto_param, bool)
if auto_param:
self.degree = np.random.randint(1, 5)
self.angle = np.random.uniform(0, 360)
else:
self.angle = angle - 45
def __call__(self, image):
"""
Motion blur for a given image.
Args:
image (numpy.ndarray): Original image.
Returns:
numpy.ndarray, image after motion blur.
"""
ori_dtype = image.dtype
_, chw, normalized, gray3dim, image = self._check(image)
matrix = cv2.getRotationMatrix2D((self.degree / 2, self.degree / 2), self.angle, 1)
motion_blur_kernel = np.diag(np.ones(self.degree))
motion_blur_kernel = cv2.warpAffine(motion_blur_kernel, matrix, (self.degree, self.degree))
motion_blur_kernel = motion_blur_kernel / self.degree
blurred = cv2.filter2D(image, -1, motion_blur_kernel)
# convert to uint8
cv2.normalize(blurred, blurred, 0, 255, cv2.NORM_MINMAX)
blurred = self._original_format(blurred, chw, normalized, gray3dim)
return blurred.astype(ori_dtype)
class GradientBlur(_NaturalPerturb):
"""
Gradient blur.
Args:
point (union[tuple, list]): 2D coordinate of the Blur center point.
kernel_num (int): Number of blur kernels. Suggested value range in [1, 8].
center (bool): Blurred or clear at the center of a specified point.
auto_param (bool): Auto selected parameters. Selected parameters will preserve semantics of image.
Example:
>>> img = cv2.imread('xx.png')
>>> img = np.array(img)
>>> number = 5
>>> h, w = img.shape[:2]
>>> point = (int(h / 5), int(w / 5))
>>> center = True
>>> trans = GradientBlur(point, number, center)
>>> new_img = trans(img)
"""
def __init__(self, point, kernel_num=3, center=True, auto_param=False):
super(GradientBlur).__init__()
point = check_param_multi_types('point', point, [list, tuple])
self.auto_param = check_param_type('auto_param', auto_param, bool)
self.point = tuple(point)
self.kernel_num = check_int_positive('kernel_num', kernel_num)
self.center = check_param_type('center', center, bool)
def _auto_param(self, h, w):
self.point = (int(np.random.uniform(0, h)), int(np.random.uniform(0, w)))
self.kernel_num = np.random.randint(1, 6)
self.center = np.random.choice([True, False])
def __call__(self, image):
"""
Args:
image (numpy.ndarray): Original image.
Returns:
numpy.ndarray, gradient blurred image.
"""
ori_dtype = image.dtype
_, chw, normalized, gray3dim, image = self._check(image)
w, h = image.shape[:2]
if self.auto_param:
self._auto_param(h, w)
mask = np.zeros(image.shape, dtype=np.uint8)
masks = []
radius = max(w - self.point[0], self.point[0], h - self.point[1], self.point[1])
radius = int(radius / self.kernel_num)
for i in range(self.kernel_num):
circle = cv2.circle(mask.copy(), self.point, radius * (1 + i), (1, 1, 1), -1)
masks.append(circle)
blurs = []
for i in range(3, 3 + 2 * self.kernel_num, 2):
ksize = (i, i)
blur = cv2.GaussianBlur(image, ksize, 0)
blurs.append(blur)
dst = image.copy()
if self.center:
for i in range(self.kernel_num):
dst = masks[i] * dst + (1 - masks[i]) * blurs[i]
else:
for i in range(self.kernel_num - 1, -1, -1):
dst = masks[i] * blurs[self.kernel_num - 1 - i] + (1 - masks[i]) * dst
dst = self._original_format(dst, chw, normalized, gray3dim)
return dst.astype(ori_dtype) | 0.864239 | 0.439326 |
import os
import sys
import yaml
from netpacket import common
def write_net_header():
write_content = "\n"
write_content += "enum ENetHeader\n"
write_content += "{\n"
write_content += " ENetHeader_None = 0,\n"
for net_header in net_header_list:
write_content += " ENetHeader_" + net_header + ",\n"
write_content += " ENetHeader_Max\n"
write_content += "};\n"
common.write_file_content(config_yaml_data["NetDefFile"], config_yaml_data["NetDefStart"],
config_yaml_data["NetDefEnd"], write_content)
print("Write NetHeader Success!")
def write_net_packet_name():
write_content = "#pragma once\n"
write_content += "\n"
write_content += "//Exported by Tool, please don't edit this file directly.\n"
write_content += "\n"
for net_packet_name in net_packet_name_list:
write_content += "#include \"NetRequest"+net_packet_name+".h\"\n"
common.overwrite_file_content(
config_yaml_data["NetRequestFile"], write_content)
print("Write NetRequest Name Success!")
def get_data_type(data_type):
if data_type == "bool":
return "bool"
if data_type == "int32":
return "int32"
if data_type == "int64":
return "int64"
if data_type == "float":
return "float"
if data_type == "string":
return "FString"
if data_type == "bool[]":
return "TArray<bool>"
if data_type == "int32[]":
return "TArray<int32>"
if data_type == "int64[]":
return "TArray<int64>"
if data_type == "float[]":
return "TArray<float>"
if data_type == "string[]":
return "TArray<FString>"
return data_type
def get_data_type_empty(data_type):
if data_type == "bool":
return " = false"
if data_type == "int32":
return " = 0"
if data_type == "int64":
return " = 0"
if data_type == "float":
return " = 0.f"
if data_type == "string":
return ".Empty()"
if data_type == "bool[]":
return ".Empty()"
if data_type == "int32[]":
return ".Empty()"
if data_type == "int64[]":
return ".Empty()"
if data_type == "float[]":
return ".Empty()"
if data_type == "string[]":
return ".Empty()"
return ".Clear()"
def get_data_type_parse(data_type, data_name):
if data_type == "bool":
return ".AppendInt(" + data_name + "_?1:0)"
if data_type == "int32":
return ".AppendInt(" + data_name + "_)"
if data_type == "int64":
return ".AppendInt(" + data_name + "_)"
if data_type == "float":
return ".Append(FString::SanitizeFloat(" + data_name + "_))"
if data_type == "string":
return ".Append(" + data_name + "_)"
if data_type == "bool[]":
return ".Append(GameParser::GetConvertListBool(" + data_name + "_))"
if data_type == "int32[]":
return ".Append(GameParser::GetConvertListInt32(" + data_name + "_))"
if data_type == "int64[]":
return ".Append(GameParser::GetConvertListInt64(" + data_name + "_))"
if data_type == "float[]":
return ".Append(GameParser::GetConvertListFloat(" + data_name + "_))"
if data_type == "string[]":
return ".Append(GameParser::GetConvertListString(" + data_name + "_))"
return ""
def is_need_initialization(data_type):
if data_type == "bool":
return True
if data_type == "int32":
return True
if data_type == "int64":
return True
if data_type == "float":
return True
return False
def is_need_has_variable(data_type):
if data_type == "bool":
return True
if data_type == "int32":
return True
if data_type == "int64":
return True
if data_type == "float":
return True
if data_type == "string":
return True
return False
def get_need_initialization(data_type):
if data_type == "bool":
return "false"
if data_type == "int32":
return "0"
if data_type == "int64":
return "0"
if data_type == "float":
return "0.f"
return False
def get_data_public_h_function(data_type, data_name):
content_prefix = " "
content = ""
if data_type == "bool":
content += content_prefix + "bool "+data_name+"() const;\n"
content += "\tvoid set_" + data_name + \
"(bool const& " + data_name + ");\n"
elif data_type == "int32":
content += content_prefix + "int32 "+data_name+"() const;\n"
content += "\tvoid set_" + data_name + \
"(int32 const& " + data_name + ");\n"
elif data_type == "int64":
content += content_prefix + "int64 "+data_name+"() const;\n"
content += "\tvoid set_" + data_name + \
"(int64 const& " + data_name + ");\n"
elif data_type == "float":
content += content_prefix + "float "+data_name+"() const;\n"
content += "\tvoid set_" + data_name + \
"(float const& " + data_name + ");\n"
elif data_type == "string":
content += content_prefix + "FString const& "+data_name+"() const;\n"
content += "\tvoid set_" + data_name + \
"(FString const& " + data_name + ");\n"
elif data_type == "bool[]":
content += content_prefix + "TArray<bool> const& "+data_name+"() const;\n"
content += content_prefix + "bool "+data_name+"(int32 Index) const;\n"
content += content_prefix + "int32 "+data_name+"_size() const;\n"
content += content_prefix + "void add_" + \
data_name + "(bool const& " + data_name + ");\n"
content += content_prefix + "void set_" + \
data_name + "(TArray<bool> const& " + data_name + ");\n"
elif data_type == "int32[]":
content += content_prefix + "TArray<int32> const& "+data_name+"() const;\n"
content += content_prefix + "int32 "+data_name+"(int32 Index) const;\n"
content += content_prefix + "int32 "+data_name+"_size() const;\n"
content += content_prefix + "void add_" + \
data_name + "(int32 const& " + data_name + ");\n"
content += content_prefix + "void set_" + \
data_name + "(TArray<int32> const& " + data_name + ");\n"
elif data_type == "int64[]":
content += content_prefix + "TArray<int64> const& "+data_name+"() const;\n"
content += content_prefix + "int64 "+data_name+"(int32 Index) const;\n"
content += content_prefix + "int32 "+data_name+"_size() const;\n"
content += content_prefix + "void add_" + \
data_name + "(int64 const& " + data_name + ");\n"
content += content_prefix + "void set_" + \
data_name + "(TArray<int64> const& " + data_name + ");\n"
elif data_type == "float[]":
content += content_prefix + "TArray<float> const& "+data_name+"() const;\n"
content += content_prefix + "float " + \
data_name+"(int32 Index) const;\n"
content += content_prefix + "int32 "+data_name+"_size() const;\n"
content += content_prefix + "void add_" + \
data_name + "(float const& " + data_name + ");\n"
content += content_prefix + "void set_" + \
data_name + "(TArray<float> const& " + data_name + ");\n"
elif data_type == "string[]":
content += content_prefix + "TArray<FString> const& "+data_name+"() const;\n"
content += content_prefix + "FString const& " + \
data_name+"(int32 Index) const;\n"
content += content_prefix + "int32 "+data_name+"_size() const;\n"
content += content_prefix + "void add_" + \
data_name + "(FString const& " + data_name + ");\n"
content += content_prefix + "void set_" + \
data_name + "(TArray<FString> const& " + data_name + ");\n"
content += "\tbool has_" + data_name + "() const;\n"
return content
def get_data_type_cpp_function(class_name, data_type, data_name):
content_prefix = ""
content = "\n"
if data_type == "bool":
content += content_prefix + "bool "+class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + "\treturn "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::set_" + \
data_name + "(bool const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\thas_"+data_name+"_ = true;\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + "\treturn has_"+data_name+"_;\n"
content += content_prefix + "}\n"
elif data_type == "int32":
content += content_prefix + "int32 "+class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::set_" + \
data_name + "(int32 const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\thas_"+data_name+"_ = true;\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return has_"+data_name+"_;\n"
content += content_prefix + "}\n"
elif data_type == "int64":
content += content_prefix + "int64 "+class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::set_" + \
data_name + "(int64 const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\thas_"+data_name+"_ = true;\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return has_"+data_name+"_;\n"
content += content_prefix + "}\n"
elif data_type == "float":
content += content_prefix + "float "+class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + "\treturn "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::set_" + \
data_name + "(float const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\thas_"+data_name+"_ = true;\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return has_"+data_name+"_;\n"
content += content_prefix + "}\n"
elif data_type == "string":
content += content_prefix + "FString const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::set_" + \
data_name + "(FString const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\thas_"+data_name+"_ = true;\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return has_"+data_name+"_;\n"
content += content_prefix + "}\n"
elif data_type == "bool[]":
content += content_prefix + "TArray<bool> const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool " + \
class_name+"::"+data_name+"(int32 Index) const\n"
content += content_prefix + "{\n"
content += content_prefix + \
" if (!(Index >= 0 && Index < "+data_name+"_size()))\n"
content += content_prefix + " {\n"
content += content_prefix + \
" UE_LOG(LogNetResponse, Error, TEXT(\"" + \
class_name+"::"+data_name+" Out of Range!\"));\n"
content += content_prefix + " }\n"
content += content_prefix + " return "+data_name+"_[Index];\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 "+class_name+"::"+data_name+"_size() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_.Num();\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "void " + class_name + "::set_" + \
data_name + "(TArray<bool> const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::add_" + \
data_name + "(bool const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name + \
"_.Emplace(" + data_name + ");\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return " + data_name + "_size() > 0;\n"
content += content_prefix + "}\n"
elif data_type == "int32[]":
content += content_prefix + "TArray<int32> const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 " + \
class_name+"::"+data_name+"(int32 Index) const\n"
content += content_prefix + "{\n"
content += content_prefix + \
" if (!(Index >= 0 && Index < "+data_name+"_size()))\n"
content += content_prefix + " {\n"
content += content_prefix + \
" UE_LOG(LogNetResponse, Error, TEXT(\"" + \
class_name+"::"+data_name+" Out of Range!\"));\n"
content += content_prefix + " }\n"
content += content_prefix + " return "+data_name+"_[Index];\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 "+class_name+"::"+data_name+"_size() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_.Num();\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "void " + class_name + "::set_" + \
data_name + "(TArray<int32> const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::add_" + \
data_name + "(int32 const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name + \
"_.Emplace(" + data_name + ");\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return " + data_name + "_size() > 0;\n"
content += content_prefix + "}\n"
elif data_type == "int64[]":
content += content_prefix + "TArray<int64> const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int64 " + \
class_name+"::"+data_name+"(int32 Index) const\n"
content += content_prefix + "{\n"
content += content_prefix + \
" if (!(Index >= 0 && Index < "+data_name+"_size()))\n"
content += content_prefix + " {\n"
content += content_prefix + \
" UE_LOG(LogNetResponse, Error, TEXT(\"" + \
class_name+"::"+data_name+" Out of Range!\"));\n"
content += content_prefix + " }\n"
content += content_prefix + " return "+data_name+"_[Index];\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 "+class_name+"::"+data_name+"_size() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_.Num();\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "void " + class_name + "::set_" + \
data_name + "(TArray<int64> const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::add_" + \
data_name + "(int64 const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name + \
"_.Emplace(" + data_name + ");\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return " + data_name + "_size() > 0;\n"
content += content_prefix + "}\n"
elif data_type == "float[]":
content += content_prefix + "TArray<float> const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "float " + \
class_name+"::"+data_name+"(int32 Index) const\n"
content += content_prefix + "{\n"
content += content_prefix + \
" if (!(Index >= 0 && Index < "+data_name+"_size()))\n"
content += content_prefix + " {\n"
content += content_prefix + \
" UE_LOG(LogNetResponse, Error, TEXT(\"" + \
class_name+"::"+data_name+" Out of Range!\"));\n"
content += content_prefix + " }\n"
content += content_prefix + " return "+data_name+"_[Index];\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 "+class_name+"::"+data_name+"_size() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_.Num();\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "void " + class_name + "::set_" + \
data_name + "(TArray<float> const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::add_" + \
data_name + "(float const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name + \
"_.Emplace(" + data_name + ");\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return " + data_name + "_size() > 0;\n"
content += content_prefix + "}\n"
elif data_type == "string[]":
content += content_prefix + "TArray<FString> const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "FString const& " + \
class_name+"::"+data_name+"(int32 Index) const\n"
content += content_prefix + "{\n"
content += content_prefix + \
" if (!(Index >= 0 && Index < "+data_name+"_size()))\n"
content += content_prefix + " {\n"
content += content_prefix + \
" UE_LOG(LogNetResponse, Error, TEXT(\"" + \
class_name+"::"+data_name+" Out of Range!\"));\n"
content += content_prefix + " }\n"
content += content_prefix + " return "+data_name+"_[Index];\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 "+class_name+"::"+data_name+"_size() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_.Num();\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "void " + class_name + "::set_" + \
data_name + "(TArray<FString> const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::add_" + \
data_name + "(FString const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name + \
"_.Emplace(" + data_name + ");\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return " + data_name + "_size() > 0;\n"
content += content_prefix + "}\n"
return content
def write_net_packet(net_packet_name):
for net_packet_key in packet_config_data:
net_header_list.append(net_packet_name + net_packet_key)
write_h_content = "#pragma once\n"
write_h_content += "\n"
write_h_content += "//Exported by Tool, please don't edit this file directly.\n"
write_h_content += "\n"
write_h_content += "#include \"NetRequestBase.h\"\n"
for net_packet_key in packet_config_data:
net_packet_data = packet_config_data[net_packet_key]
write_h_content += "\n"
packet_class_name = "NetRequest" + net_packet_name + net_packet_key
write_h_content += "class "+packet_class_name+" : public NetRequestBase\n"
write_h_content += "{\n"
write_h_content += "public:\n"
write_h_content += "\t" + packet_class_name + "();\n"
write_h_content += "\n"
write_h_content += "\tstatic " + packet_class_name + \
"* Cast(NetRequestBase& Data);\n"
write_h_content += "\n"
write_h_content += "\tvirtual NetRequestBase* Create() const override;\n"
write_h_content += "\n"
write_h_content += "\tvirtual void Clear() override;\n"
write_h_content += "\n"
write_h_content += "\tvirtual FString ParseStr() override;\n"
if "Data" in net_packet_data:
write_h_content += "\n"
write_h_content += "public:\n"
for net_package_data_key in net_packet_data["Data"]:
write_h_content += get_data_public_h_function(
net_packet_data["Data"][net_package_data_key], net_package_data_key)
if "Data" in net_packet_data:
write_h_content += "\n"
write_h_content += "private:\n"
for net_package_data_key in net_packet_data["Data"]:
write_h_content += "\t"
write_h_content += get_data_type(
net_packet_data["Data"][net_package_data_key])
write_h_content += " " + net_package_data_key + "_;\n"
if is_need_has_variable(net_packet_data["Data"][net_package_data_key]):
write_h_content += "\tbool has_" + net_package_data_key + "_;\n"
write_h_content += "};\n"
write_cpp_content = "//Exported by Tool, please don't edit this file directly.\n"
write_cpp_content += "\n"
write_cpp_content += "#include \"NetRequest"+net_packet_name+".h\"\n"
write_cpp_content += "#include \"GameParser.h\"\n"
for net_packet_key in packet_config_data:
net_packet_data = packet_config_data[net_packet_key]
write_cpp_content += "\n"
packet_class_name = "NetRequest" + net_packet_name + net_packet_key
write_cpp_content += packet_class_name + "::" + packet_class_name + "()\n"
if "Data" in net_packet_data:
is_first = True
for net_package_data_key in net_packet_data["Data"]:
data_type = get_data_type(
net_packet_data["Data"][net_package_data_key])
if is_need_initialization(data_type):
write_cpp_content += "\t"
if is_first:
is_first = False
write_cpp_content += ":"
else:
write_cpp_content += ","
write_cpp_content += " " + net_package_data_key + "_(" + \
get_need_initialization(data_type)+")\n"
if is_need_has_variable(net_packet_data["Data"][net_package_data_key]):
write_cpp_content += "\t"
if is_first:
is_first = False
write_cpp_content += ":"
else:
write_cpp_content += ","
write_cpp_content += " has_" + \
net_package_data_key + "_(false)\n"
write_cpp_content += "{\n"
write_cpp_content += "\tHeader = ENetHeader_" + \
net_packet_name + net_packet_key + ";\n"
write_cpp_content += "\tMethod = ENetMethod_" + \
net_packet_data["Method"]+";\n"
if "Server" in net_packet_data:
write_cpp_content += "\tServer = \"" + \
net_packet_data["Server"]+"\";\n"
else:
write_cpp_content += "\tServer = \"\";\n"
write_cpp_content += "\tUrl = \""+net_packet_data["Url"]+"\";\n"
write_cpp_content += "}\n"
write_cpp_content += "\n"
write_cpp_content += packet_class_name + "* " + \
packet_class_name + "::Cast(NetRequestBase& Data)\n"
write_cpp_content += "{\n"
write_cpp_content += "\tif (Data.Header != ENetHeader_" + \
net_packet_name + net_packet_key + ")\n"
write_cpp_content += "\t{\n"
write_cpp_content += "\t\treturn nullptr;\n"
write_cpp_content += "\t}\n"
write_cpp_content += "\treturn static_cast<" + \
packet_class_name + "*>(&Data);\n"
write_cpp_content += "}\n"
write_cpp_content += "\n"
write_cpp_content += "NetRequestBase* " + \
packet_class_name + "::Create() const\n"
write_cpp_content += "{\n"
write_cpp_content += "\tconst auto Data = new " + packet_class_name + "();\n"
write_cpp_content += "\t*Data = *this;\n"
write_cpp_content += "\treturn Data;\n"
write_cpp_content += "}\n"
write_cpp_content += "\n"
write_cpp_content += "void " + packet_class_name + "::Clear()\n"
write_cpp_content += "{\n"
if "Data" in net_packet_data:
for net_package_data_key in net_packet_data["Data"]:
write_cpp_content += "\t"
write_cpp_content += net_package_data_key + "_"
write_cpp_content += get_data_type_empty(
net_packet_data["Data"][net_package_data_key])
write_cpp_content += ";\n"
if is_need_has_variable(net_packet_data["Data"][net_package_data_key]):
write_cpp_content += "\thas_" + net_package_data_key + "_ = false;\n"
write_cpp_content += "}\n"
write_cpp_content += "\n"
write_cpp_content += "FString " + packet_class_name + "::ParseStr()\n"
write_cpp_content += "{\n"
write_cpp_content += "\tDataString.Empty();\n"
if "Data" in net_packet_data:
data_count = len(net_packet_data["Data"])
if data_count > 1:
write_cpp_content += "\tbool IsFirst = true;\n"
index = 0
for net_package_data_key in net_packet_data["Data"]:
index += 1
write_cpp_content += "\tif (has_" + \
net_package_data_key + "())\n"
write_cpp_content += "\t{\n"
if data_count > 1:
write_cpp_content += "\t\tif (!IsFirst)\n"
write_cpp_content += "\t\t{\n"
write_cpp_content += "\t\t\tDataString.Append(\"&\");\n"
write_cpp_content += "\t\t}\n"
if index < data_count:
write_cpp_content += "\t\telse\n"
write_cpp_content += "\t\t{\n"
write_cpp_content += "\t\t\tIsFirst = false;\n"
write_cpp_content += "\t\t}\n"
write_cpp_content += "\t\tDataString.Append(\"" + \
net_package_data_key+"=\");\n"
write_cpp_content += "\t\tDataString"
write_cpp_content += get_data_type_parse(
net_packet_data["Data"][net_package_data_key], net_package_data_key)
write_cpp_content += ";\n"
write_cpp_content += "\t}\n"
write_cpp_content += "\treturn DataString;\n"
write_cpp_content += "}\n"
if "Data" in net_packet_data:
for net_package_data_key in net_packet_data["Data"]:
write_cpp_content += get_data_type_cpp_function(
packet_class_name, net_packet_data["Data"][net_package_data_key], net_package_data_key)
common.overwrite_file_content(
config_yaml_data["RequestExportPath"]+"/NetRequest"+net_packet_name+".h", write_h_content)
common.overwrite_file_content(
config_yaml_data["RequestExportPath"]+"/NetRequest"+net_packet_name+".cpp", write_cpp_content)
print("Write NetRequest "+net_packet_name + " Success!")
if __name__ == "__main__":
# 设置环境变量
file_path = os.path.dirname(os.path.abspath(sys.argv[0]))
os.chdir(file_path)
cwd = os.getcwd()
# 取数据配置
with open('../Config.yaml', 'r', encoding='utf-8') as config_yaml_file:
config_yaml_data = yaml.load(config_yaml_file, Loader=yaml.FullLoader)
config_yaml_file.close()
# print(config_yaml_data)
net_header_list = []
net_packet_name_list = []
common.clean_file_path(config_yaml_data["RequestExportPath"])
config_files = os.listdir(config_yaml_data["RequestConfigPath"])
for file_name in config_files:
with open(config_yaml_data["RequestConfigPath"]+"/"+file_name, 'r', encoding='utf-8') as packet_config_file:
packet_config_data = yaml.load(
packet_config_file, Loader=yaml.FullLoader)
packet_config_file.close()
packet_file_name = os.path.splitext(file_name)[0]
net_packet_name_list.append(packet_file_name)
write_net_packet(packet_file_name)
write_net_header()
write_net_packet_name() | NetPacket/Tool/GenerateRequest.py |
import os
import sys
import yaml
from netpacket import common
def write_net_header():
write_content = "\n"
write_content += "enum ENetHeader\n"
write_content += "{\n"
write_content += " ENetHeader_None = 0,\n"
for net_header in net_header_list:
write_content += " ENetHeader_" + net_header + ",\n"
write_content += " ENetHeader_Max\n"
write_content += "};\n"
common.write_file_content(config_yaml_data["NetDefFile"], config_yaml_data["NetDefStart"],
config_yaml_data["NetDefEnd"], write_content)
print("Write NetHeader Success!")
def write_net_packet_name():
write_content = "#pragma once\n"
write_content += "\n"
write_content += "//Exported by Tool, please don't edit this file directly.\n"
write_content += "\n"
for net_packet_name in net_packet_name_list:
write_content += "#include \"NetRequest"+net_packet_name+".h\"\n"
common.overwrite_file_content(
config_yaml_data["NetRequestFile"], write_content)
print("Write NetRequest Name Success!")
def get_data_type(data_type):
if data_type == "bool":
return "bool"
if data_type == "int32":
return "int32"
if data_type == "int64":
return "int64"
if data_type == "float":
return "float"
if data_type == "string":
return "FString"
if data_type == "bool[]":
return "TArray<bool>"
if data_type == "int32[]":
return "TArray<int32>"
if data_type == "int64[]":
return "TArray<int64>"
if data_type == "float[]":
return "TArray<float>"
if data_type == "string[]":
return "TArray<FString>"
return data_type
def get_data_type_empty(data_type):
if data_type == "bool":
return " = false"
if data_type == "int32":
return " = 0"
if data_type == "int64":
return " = 0"
if data_type == "float":
return " = 0.f"
if data_type == "string":
return ".Empty()"
if data_type == "bool[]":
return ".Empty()"
if data_type == "int32[]":
return ".Empty()"
if data_type == "int64[]":
return ".Empty()"
if data_type == "float[]":
return ".Empty()"
if data_type == "string[]":
return ".Empty()"
return ".Clear()"
def get_data_type_parse(data_type, data_name):
if data_type == "bool":
return ".AppendInt(" + data_name + "_?1:0)"
if data_type == "int32":
return ".AppendInt(" + data_name + "_)"
if data_type == "int64":
return ".AppendInt(" + data_name + "_)"
if data_type == "float":
return ".Append(FString::SanitizeFloat(" + data_name + "_))"
if data_type == "string":
return ".Append(" + data_name + "_)"
if data_type == "bool[]":
return ".Append(GameParser::GetConvertListBool(" + data_name + "_))"
if data_type == "int32[]":
return ".Append(GameParser::GetConvertListInt32(" + data_name + "_))"
if data_type == "int64[]":
return ".Append(GameParser::GetConvertListInt64(" + data_name + "_))"
if data_type == "float[]":
return ".Append(GameParser::GetConvertListFloat(" + data_name + "_))"
if data_type == "string[]":
return ".Append(GameParser::GetConvertListString(" + data_name + "_))"
return ""
def is_need_initialization(data_type):
if data_type == "bool":
return True
if data_type == "int32":
return True
if data_type == "int64":
return True
if data_type == "float":
return True
return False
def is_need_has_variable(data_type):
if data_type == "bool":
return True
if data_type == "int32":
return True
if data_type == "int64":
return True
if data_type == "float":
return True
if data_type == "string":
return True
return False
def get_need_initialization(data_type):
if data_type == "bool":
return "false"
if data_type == "int32":
return "0"
if data_type == "int64":
return "0"
if data_type == "float":
return "0.f"
return False
def get_data_public_h_function(data_type, data_name):
content_prefix = " "
content = ""
if data_type == "bool":
content += content_prefix + "bool "+data_name+"() const;\n"
content += "\tvoid set_" + data_name + \
"(bool const& " + data_name + ");\n"
elif data_type == "int32":
content += content_prefix + "int32 "+data_name+"() const;\n"
content += "\tvoid set_" + data_name + \
"(int32 const& " + data_name + ");\n"
elif data_type == "int64":
content += content_prefix + "int64 "+data_name+"() const;\n"
content += "\tvoid set_" + data_name + \
"(int64 const& " + data_name + ");\n"
elif data_type == "float":
content += content_prefix + "float "+data_name+"() const;\n"
content += "\tvoid set_" + data_name + \
"(float const& " + data_name + ");\n"
elif data_type == "string":
content += content_prefix + "FString const& "+data_name+"() const;\n"
content += "\tvoid set_" + data_name + \
"(FString const& " + data_name + ");\n"
elif data_type == "bool[]":
content += content_prefix + "TArray<bool> const& "+data_name+"() const;\n"
content += content_prefix + "bool "+data_name+"(int32 Index) const;\n"
content += content_prefix + "int32 "+data_name+"_size() const;\n"
content += content_prefix + "void add_" + \
data_name + "(bool const& " + data_name + ");\n"
content += content_prefix + "void set_" + \
data_name + "(TArray<bool> const& " + data_name + ");\n"
elif data_type == "int32[]":
content += content_prefix + "TArray<int32> const& "+data_name+"() const;\n"
content += content_prefix + "int32 "+data_name+"(int32 Index) const;\n"
content += content_prefix + "int32 "+data_name+"_size() const;\n"
content += content_prefix + "void add_" + \
data_name + "(int32 const& " + data_name + ");\n"
content += content_prefix + "void set_" + \
data_name + "(TArray<int32> const& " + data_name + ");\n"
elif data_type == "int64[]":
content += content_prefix + "TArray<int64> const& "+data_name+"() const;\n"
content += content_prefix + "int64 "+data_name+"(int32 Index) const;\n"
content += content_prefix + "int32 "+data_name+"_size() const;\n"
content += content_prefix + "void add_" + \
data_name + "(int64 const& " + data_name + ");\n"
content += content_prefix + "void set_" + \
data_name + "(TArray<int64> const& " + data_name + ");\n"
elif data_type == "float[]":
content += content_prefix + "TArray<float> const& "+data_name+"() const;\n"
content += content_prefix + "float " + \
data_name+"(int32 Index) const;\n"
content += content_prefix + "int32 "+data_name+"_size() const;\n"
content += content_prefix + "void add_" + \
data_name + "(float const& " + data_name + ");\n"
content += content_prefix + "void set_" + \
data_name + "(TArray<float> const& " + data_name + ");\n"
elif data_type == "string[]":
content += content_prefix + "TArray<FString> const& "+data_name+"() const;\n"
content += content_prefix + "FString const& " + \
data_name+"(int32 Index) const;\n"
content += content_prefix + "int32 "+data_name+"_size() const;\n"
content += content_prefix + "void add_" + \
data_name + "(FString const& " + data_name + ");\n"
content += content_prefix + "void set_" + \
data_name + "(TArray<FString> const& " + data_name + ");\n"
content += "\tbool has_" + data_name + "() const;\n"
return content
def get_data_type_cpp_function(class_name, data_type, data_name):
content_prefix = ""
content = "\n"
if data_type == "bool":
content += content_prefix + "bool "+class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + "\treturn "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::set_" + \
data_name + "(bool const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\thas_"+data_name+"_ = true;\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + "\treturn has_"+data_name+"_;\n"
content += content_prefix + "}\n"
elif data_type == "int32":
content += content_prefix + "int32 "+class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::set_" + \
data_name + "(int32 const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\thas_"+data_name+"_ = true;\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return has_"+data_name+"_;\n"
content += content_prefix + "}\n"
elif data_type == "int64":
content += content_prefix + "int64 "+class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::set_" + \
data_name + "(int64 const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\thas_"+data_name+"_ = true;\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return has_"+data_name+"_;\n"
content += content_prefix + "}\n"
elif data_type == "float":
content += content_prefix + "float "+class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + "\treturn "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::set_" + \
data_name + "(float const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\thas_"+data_name+"_ = true;\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return has_"+data_name+"_;\n"
content += content_prefix + "}\n"
elif data_type == "string":
content += content_prefix + "FString const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::set_" + \
data_name + "(FString const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\thas_"+data_name+"_ = true;\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return has_"+data_name+"_;\n"
content += content_prefix + "}\n"
elif data_type == "bool[]":
content += content_prefix + "TArray<bool> const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool " + \
class_name+"::"+data_name+"(int32 Index) const\n"
content += content_prefix + "{\n"
content += content_prefix + \
" if (!(Index >= 0 && Index < "+data_name+"_size()))\n"
content += content_prefix + " {\n"
content += content_prefix + \
" UE_LOG(LogNetResponse, Error, TEXT(\"" + \
class_name+"::"+data_name+" Out of Range!\"));\n"
content += content_prefix + " }\n"
content += content_prefix + " return "+data_name+"_[Index];\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 "+class_name+"::"+data_name+"_size() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_.Num();\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "void " + class_name + "::set_" + \
data_name + "(TArray<bool> const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::add_" + \
data_name + "(bool const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name + \
"_.Emplace(" + data_name + ");\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return " + data_name + "_size() > 0;\n"
content += content_prefix + "}\n"
elif data_type == "int32[]":
content += content_prefix + "TArray<int32> const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 " + \
class_name+"::"+data_name+"(int32 Index) const\n"
content += content_prefix + "{\n"
content += content_prefix + \
" if (!(Index >= 0 && Index < "+data_name+"_size()))\n"
content += content_prefix + " {\n"
content += content_prefix + \
" UE_LOG(LogNetResponse, Error, TEXT(\"" + \
class_name+"::"+data_name+" Out of Range!\"));\n"
content += content_prefix + " }\n"
content += content_prefix + " return "+data_name+"_[Index];\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 "+class_name+"::"+data_name+"_size() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_.Num();\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "void " + class_name + "::set_" + \
data_name + "(TArray<int32> const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::add_" + \
data_name + "(int32 const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name + \
"_.Emplace(" + data_name + ");\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return " + data_name + "_size() > 0;\n"
content += content_prefix + "}\n"
elif data_type == "int64[]":
content += content_prefix + "TArray<int64> const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int64 " + \
class_name+"::"+data_name+"(int32 Index) const\n"
content += content_prefix + "{\n"
content += content_prefix + \
" if (!(Index >= 0 && Index < "+data_name+"_size()))\n"
content += content_prefix + " {\n"
content += content_prefix + \
" UE_LOG(LogNetResponse, Error, TEXT(\"" + \
class_name+"::"+data_name+" Out of Range!\"));\n"
content += content_prefix + " }\n"
content += content_prefix + " return "+data_name+"_[Index];\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 "+class_name+"::"+data_name+"_size() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_.Num();\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "void " + class_name + "::set_" + \
data_name + "(TArray<int64> const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::add_" + \
data_name + "(int64 const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name + \
"_.Emplace(" + data_name + ");\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return " + data_name + "_size() > 0;\n"
content += content_prefix + "}\n"
elif data_type == "float[]":
content += content_prefix + "TArray<float> const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "float " + \
class_name+"::"+data_name+"(int32 Index) const\n"
content += content_prefix + "{\n"
content += content_prefix + \
" if (!(Index >= 0 && Index < "+data_name+"_size()))\n"
content += content_prefix + " {\n"
content += content_prefix + \
" UE_LOG(LogNetResponse, Error, TEXT(\"" + \
class_name+"::"+data_name+" Out of Range!\"));\n"
content += content_prefix + " }\n"
content += content_prefix + " return "+data_name+"_[Index];\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 "+class_name+"::"+data_name+"_size() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_.Num();\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "void " + class_name + "::set_" + \
data_name + "(TArray<float> const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::add_" + \
data_name + "(float const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name + \
"_.Emplace(" + data_name + ");\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return " + data_name + "_size() > 0;\n"
content += content_prefix + "}\n"
elif data_type == "string[]":
content += content_prefix + "TArray<FString> const& " + \
class_name+"::"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_;\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "FString const& " + \
class_name+"::"+data_name+"(int32 Index) const\n"
content += content_prefix + "{\n"
content += content_prefix + \
" if (!(Index >= 0 && Index < "+data_name+"_size()))\n"
content += content_prefix + " {\n"
content += content_prefix + \
" UE_LOG(LogNetResponse, Error, TEXT(\"" + \
class_name+"::"+data_name+" Out of Range!\"));\n"
content += content_prefix + " }\n"
content += content_prefix + " return "+data_name+"_[Index];\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "int32 "+class_name+"::"+data_name+"_size() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return "+data_name+"_.Num();\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "void " + class_name + "::set_" + \
data_name + "(TArray<FString> const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name+"_ = " + data_name + ";\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += "void " + class_name + "::add_" + \
data_name + "(FString const& " + data_name + ")\n"
content += content_prefix + "{\n"
content += content_prefix + "\t"+data_name + \
"_.Emplace(" + data_name + ");\n"
content += content_prefix + "}\n"
content += content_prefix + "\n"
content += content_prefix + "bool "+class_name+"::has_"+data_name+"() const\n"
content += content_prefix + "{\n"
content += content_prefix + " return " + data_name + "_size() > 0;\n"
content += content_prefix + "}\n"
return content
def write_net_packet(net_packet_name):
for net_packet_key in packet_config_data:
net_header_list.append(net_packet_name + net_packet_key)
write_h_content = "#pragma once\n"
write_h_content += "\n"
write_h_content += "//Exported by Tool, please don't edit this file directly.\n"
write_h_content += "\n"
write_h_content += "#include \"NetRequestBase.h\"\n"
for net_packet_key in packet_config_data:
net_packet_data = packet_config_data[net_packet_key]
write_h_content += "\n"
packet_class_name = "NetRequest" + net_packet_name + net_packet_key
write_h_content += "class "+packet_class_name+" : public NetRequestBase\n"
write_h_content += "{\n"
write_h_content += "public:\n"
write_h_content += "\t" + packet_class_name + "();\n"
write_h_content += "\n"
write_h_content += "\tstatic " + packet_class_name + \
"* Cast(NetRequestBase& Data);\n"
write_h_content += "\n"
write_h_content += "\tvirtual NetRequestBase* Create() const override;\n"
write_h_content += "\n"
write_h_content += "\tvirtual void Clear() override;\n"
write_h_content += "\n"
write_h_content += "\tvirtual FString ParseStr() override;\n"
if "Data" in net_packet_data:
write_h_content += "\n"
write_h_content += "public:\n"
for net_package_data_key in net_packet_data["Data"]:
write_h_content += get_data_public_h_function(
net_packet_data["Data"][net_package_data_key], net_package_data_key)
if "Data" in net_packet_data:
write_h_content += "\n"
write_h_content += "private:\n"
for net_package_data_key in net_packet_data["Data"]:
write_h_content += "\t"
write_h_content += get_data_type(
net_packet_data["Data"][net_package_data_key])
write_h_content += " " + net_package_data_key + "_;\n"
if is_need_has_variable(net_packet_data["Data"][net_package_data_key]):
write_h_content += "\tbool has_" + net_package_data_key + "_;\n"
write_h_content += "};\n"
write_cpp_content = "//Exported by Tool, please don't edit this file directly.\n"
write_cpp_content += "\n"
write_cpp_content += "#include \"NetRequest"+net_packet_name+".h\"\n"
write_cpp_content += "#include \"GameParser.h\"\n"
for net_packet_key in packet_config_data:
net_packet_data = packet_config_data[net_packet_key]
write_cpp_content += "\n"
packet_class_name = "NetRequest" + net_packet_name + net_packet_key
write_cpp_content += packet_class_name + "::" + packet_class_name + "()\n"
if "Data" in net_packet_data:
is_first = True
for net_package_data_key in net_packet_data["Data"]:
data_type = get_data_type(
net_packet_data["Data"][net_package_data_key])
if is_need_initialization(data_type):
write_cpp_content += "\t"
if is_first:
is_first = False
write_cpp_content += ":"
else:
write_cpp_content += ","
write_cpp_content += " " + net_package_data_key + "_(" + \
get_need_initialization(data_type)+")\n"
if is_need_has_variable(net_packet_data["Data"][net_package_data_key]):
write_cpp_content += "\t"
if is_first:
is_first = False
write_cpp_content += ":"
else:
write_cpp_content += ","
write_cpp_content += " has_" + \
net_package_data_key + "_(false)\n"
write_cpp_content += "{\n"
write_cpp_content += "\tHeader = ENetHeader_" + \
net_packet_name + net_packet_key + ";\n"
write_cpp_content += "\tMethod = ENetMethod_" + \
net_packet_data["Method"]+";\n"
if "Server" in net_packet_data:
write_cpp_content += "\tServer = \"" + \
net_packet_data["Server"]+"\";\n"
else:
write_cpp_content += "\tServer = \"\";\n"
write_cpp_content += "\tUrl = \""+net_packet_data["Url"]+"\";\n"
write_cpp_content += "}\n"
write_cpp_content += "\n"
write_cpp_content += packet_class_name + "* " + \
packet_class_name + "::Cast(NetRequestBase& Data)\n"
write_cpp_content += "{\n"
write_cpp_content += "\tif (Data.Header != ENetHeader_" + \
net_packet_name + net_packet_key + ")\n"
write_cpp_content += "\t{\n"
write_cpp_content += "\t\treturn nullptr;\n"
write_cpp_content += "\t}\n"
write_cpp_content += "\treturn static_cast<" + \
packet_class_name + "*>(&Data);\n"
write_cpp_content += "}\n"
write_cpp_content += "\n"
write_cpp_content += "NetRequestBase* " + \
packet_class_name + "::Create() const\n"
write_cpp_content += "{\n"
write_cpp_content += "\tconst auto Data = new " + packet_class_name + "();\n"
write_cpp_content += "\t*Data = *this;\n"
write_cpp_content += "\treturn Data;\n"
write_cpp_content += "}\n"
write_cpp_content += "\n"
write_cpp_content += "void " + packet_class_name + "::Clear()\n"
write_cpp_content += "{\n"
if "Data" in net_packet_data:
for net_package_data_key in net_packet_data["Data"]:
write_cpp_content += "\t"
write_cpp_content += net_package_data_key + "_"
write_cpp_content += get_data_type_empty(
net_packet_data["Data"][net_package_data_key])
write_cpp_content += ";\n"
if is_need_has_variable(net_packet_data["Data"][net_package_data_key]):
write_cpp_content += "\thas_" + net_package_data_key + "_ = false;\n"
write_cpp_content += "}\n"
write_cpp_content += "\n"
write_cpp_content += "FString " + packet_class_name + "::ParseStr()\n"
write_cpp_content += "{\n"
write_cpp_content += "\tDataString.Empty();\n"
if "Data" in net_packet_data:
data_count = len(net_packet_data["Data"])
if data_count > 1:
write_cpp_content += "\tbool IsFirst = true;\n"
index = 0
for net_package_data_key in net_packet_data["Data"]:
index += 1
write_cpp_content += "\tif (has_" + \
net_package_data_key + "())\n"
write_cpp_content += "\t{\n"
if data_count > 1:
write_cpp_content += "\t\tif (!IsFirst)\n"
write_cpp_content += "\t\t{\n"
write_cpp_content += "\t\t\tDataString.Append(\"&\");\n"
write_cpp_content += "\t\t}\n"
if index < data_count:
write_cpp_content += "\t\telse\n"
write_cpp_content += "\t\t{\n"
write_cpp_content += "\t\t\tIsFirst = false;\n"
write_cpp_content += "\t\t}\n"
write_cpp_content += "\t\tDataString.Append(\"" + \
net_package_data_key+"=\");\n"
write_cpp_content += "\t\tDataString"
write_cpp_content += get_data_type_parse(
net_packet_data["Data"][net_package_data_key], net_package_data_key)
write_cpp_content += ";\n"
write_cpp_content += "\t}\n"
write_cpp_content += "\treturn DataString;\n"
write_cpp_content += "}\n"
if "Data" in net_packet_data:
for net_package_data_key in net_packet_data["Data"]:
write_cpp_content += get_data_type_cpp_function(
packet_class_name, net_packet_data["Data"][net_package_data_key], net_package_data_key)
common.overwrite_file_content(
config_yaml_data["RequestExportPath"]+"/NetRequest"+net_packet_name+".h", write_h_content)
common.overwrite_file_content(
config_yaml_data["RequestExportPath"]+"/NetRequest"+net_packet_name+".cpp", write_cpp_content)
print("Write NetRequest "+net_packet_name + " Success!")
if __name__ == "__main__":
# 设置环境变量
file_path = os.path.dirname(os.path.abspath(sys.argv[0]))
os.chdir(file_path)
cwd = os.getcwd()
# 取数据配置
with open('../Config.yaml', 'r', encoding='utf-8') as config_yaml_file:
config_yaml_data = yaml.load(config_yaml_file, Loader=yaml.FullLoader)
config_yaml_file.close()
# print(config_yaml_data)
net_header_list = []
net_packet_name_list = []
common.clean_file_path(config_yaml_data["RequestExportPath"])
config_files = os.listdir(config_yaml_data["RequestConfigPath"])
for file_name in config_files:
with open(config_yaml_data["RequestConfigPath"]+"/"+file_name, 'r', encoding='utf-8') as packet_config_file:
packet_config_data = yaml.load(
packet_config_file, Loader=yaml.FullLoader)
packet_config_file.close()
packet_file_name = os.path.splitext(file_name)[0]
net_packet_name_list.append(packet_file_name)
write_net_packet(packet_file_name)
write_net_header()
write_net_packet_name() | 0.10917 | 0.223843 |
def unilabel(y_true, y_pred):
"""Compute relevance(s) of predicted labels.
This version of the relevance function works only for the queries
(problems) with a single groud truth label.
It is provided mainly for two reasons: there is a slight speedup (order of
seconds for the large `n_samples`) and it adds expresivity
if needed.
Parameters
----------
y_true : ndarray of shape (n_samples, 1), where `n_samples >= 1`
Ground true labels for a given query (as returned by an IR system).
y_pred : ndarray of shape (n_samples, n_labels), where `n_samples >= 1`
Target labels sorted by relevance (as returned by an IR system).
Returns
-------
relevance : bolean ndarray
The relevance judgements for `y_pred` of shape (n_samples, 1)
Raises
-------
ValueError
If `y_true` has last dimension larger than 1 (multilabel case).
Examples
--------
>>> import numpy as np
>>> from irmetrics.relevance import unilabel
>>> # ground-truth label of some answers to a query:
>>> y_true = np.array([[1]]) # (1, 1)
and the predicted labels by an IR system:
>>> y_pred = np.array([[0, 1, 4]]) # (1, 3)
>>> unilabel(y_true, y_pred)
array([[False, True, False]])
>>> y_true = np.array([[1], [2]]) # (2, 1)
>>> y_pred = np.array([[0, 1, 4], [5, 6, 7]]) # (2, 3)
>>> unilabel(y_true, y_pred)
array([[False, True, False],
[False, False, False]])
"""
if y_true.shape[-1] != 1:
msg = "y_true is expected to be of shape (n_samples, 1), got {}"
raise ValueError(msg.format(y_true.shape))
return y_true == y_pred
def multilabel(y_true, y_pred):
"""Compute relevance(s) of predicted labels.
Parameters
----------
y_true : ndarray of shape (n_samples, n_true), where `n_samples >= 1`
Ground true labels for a given query (as returned by an IR system).
y_pred : ndarray of shape (n_samples, n_labels), where `n_samples >= 1`
Target labels sorted by relevance (as returned by an IR system).
The `n_labels` and `n_true` may not be the same.
Returns
-------
relevance : bolean ndarray
The relevance judgements for `y_pred` of shape (n_samples, n_labels)
Examples
--------
>>> import numpy as np
>>> from irmetrics.relevance import multilabel
>>> # ground-truth label of some answers to a query:
>>> y_true = np.array([[1]]) # (1, 1)
and the predicted labels by an IR system:
>>> y_pred = np.array([[0, 1, 4]]) # (1, 3)
>>> multilabel(y_true, y_pred)
array([[False, True, False]])
>>> y_true = np.array([[1], [2]]) # (2, 1)
>>> y_pred = np.array([[0, 1, 4], [5, 6, 7]]) # (2, 3)
>>> multilabel(y_true, y_pred)
array([[False, True, False],
[False, False, False]])
>>> # Now the multilabel case:
>>> y_true = np.array([[1, 4]]) # (1, 2)
>>> y_pred = np.array([[0, 1, 4]]) # (1, 3)
>>> multilabel(y_true, y_pred)
array([[False, True, True]])
"""
return (y_pred[:, :, None] == y_true[:, None]).any(axis=-1)
def relevant_counts(y_pred, y_true):
"""Calculate the total number of relevant items.
Parameters
----------
y_true : ndarray of shape (n_samples, n_true), where `n_samples >= 1`
Ground true labels for a given query (as returned by an IR system).
y_pred : ndarray of shape (n_samples, n_labels), where `n_samples >= 1`
Target labels sorted by relevance (as returned by an IR system).
The `n_labels` and `n_true` may not be the same.
Returns
-------
relevance_counts: ndarray
The number of true relevance judgements for `y_pred`.
Examples
--------
>>> import numpy as np
>>> from irmetrics.relevance import relevant_counts
>>> # ground-truth label of some answers to a query:
>>> y_true = np.array([[1]]) # (1, 1)
and the predicted labels by an IR system:
>>> y_pred = np.array([[0, 1, 4]]) # (1, 3)
>>> relevant_counts(y_true, y_pred)
array([[1]])
>>> y_true = np.array([[1], [2]]) # (2, 1)
>>> y_pred = np.array([[0, 1, 4], [5, 6, 7]]) # (2, 3)
>>> relevant_counts(y_true, y_pred)
array([[1],
[1]])
>>> # Now the `relevant_counts` case:
>>> y_true = np.array([[1, 4]]) # (1, 2)
>>> y_pred = np.array([[0, 1, 4]]) # (1, 3)
>>> relevant_counts(y_true, y_pred)
array([[1, 1]])
"""
return (y_pred[:, :, None] == y_pred[:, None]).sum(axis=-1) | irmetrics/relevance.py | def unilabel(y_true, y_pred):
"""Compute relevance(s) of predicted labels.
This version of the relevance function works only for the queries
(problems) with a single groud truth label.
It is provided mainly for two reasons: there is a slight speedup (order of
seconds for the large `n_samples`) and it adds expresivity
if needed.
Parameters
----------
y_true : ndarray of shape (n_samples, 1), where `n_samples >= 1`
Ground true labels for a given query (as returned by an IR system).
y_pred : ndarray of shape (n_samples, n_labels), where `n_samples >= 1`
Target labels sorted by relevance (as returned by an IR system).
Returns
-------
relevance : bolean ndarray
The relevance judgements for `y_pred` of shape (n_samples, 1)
Raises
-------
ValueError
If `y_true` has last dimension larger than 1 (multilabel case).
Examples
--------
>>> import numpy as np
>>> from irmetrics.relevance import unilabel
>>> # ground-truth label of some answers to a query:
>>> y_true = np.array([[1]]) # (1, 1)
and the predicted labels by an IR system:
>>> y_pred = np.array([[0, 1, 4]]) # (1, 3)
>>> unilabel(y_true, y_pred)
array([[False, True, False]])
>>> y_true = np.array([[1], [2]]) # (2, 1)
>>> y_pred = np.array([[0, 1, 4], [5, 6, 7]]) # (2, 3)
>>> unilabel(y_true, y_pred)
array([[False, True, False],
[False, False, False]])
"""
if y_true.shape[-1] != 1:
msg = "y_true is expected to be of shape (n_samples, 1), got {}"
raise ValueError(msg.format(y_true.shape))
return y_true == y_pred
def multilabel(y_true, y_pred):
"""Compute relevance(s) of predicted labels.
Parameters
----------
y_true : ndarray of shape (n_samples, n_true), where `n_samples >= 1`
Ground true labels for a given query (as returned by an IR system).
y_pred : ndarray of shape (n_samples, n_labels), where `n_samples >= 1`
Target labels sorted by relevance (as returned by an IR system).
The `n_labels` and `n_true` may not be the same.
Returns
-------
relevance : bolean ndarray
The relevance judgements for `y_pred` of shape (n_samples, n_labels)
Examples
--------
>>> import numpy as np
>>> from irmetrics.relevance import multilabel
>>> # ground-truth label of some answers to a query:
>>> y_true = np.array([[1]]) # (1, 1)
and the predicted labels by an IR system:
>>> y_pred = np.array([[0, 1, 4]]) # (1, 3)
>>> multilabel(y_true, y_pred)
array([[False, True, False]])
>>> y_true = np.array([[1], [2]]) # (2, 1)
>>> y_pred = np.array([[0, 1, 4], [5, 6, 7]]) # (2, 3)
>>> multilabel(y_true, y_pred)
array([[False, True, False],
[False, False, False]])
>>> # Now the multilabel case:
>>> y_true = np.array([[1, 4]]) # (1, 2)
>>> y_pred = np.array([[0, 1, 4]]) # (1, 3)
>>> multilabel(y_true, y_pred)
array([[False, True, True]])
"""
return (y_pred[:, :, None] == y_true[:, None]).any(axis=-1)
def relevant_counts(y_pred, y_true):
"""Calculate the total number of relevant items.
Parameters
----------
y_true : ndarray of shape (n_samples, n_true), where `n_samples >= 1`
Ground true labels for a given query (as returned by an IR system).
y_pred : ndarray of shape (n_samples, n_labels), where `n_samples >= 1`
Target labels sorted by relevance (as returned by an IR system).
The `n_labels` and `n_true` may not be the same.
Returns
-------
relevance_counts: ndarray
The number of true relevance judgements for `y_pred`.
Examples
--------
>>> import numpy as np
>>> from irmetrics.relevance import relevant_counts
>>> # ground-truth label of some answers to a query:
>>> y_true = np.array([[1]]) # (1, 1)
and the predicted labels by an IR system:
>>> y_pred = np.array([[0, 1, 4]]) # (1, 3)
>>> relevant_counts(y_true, y_pred)
array([[1]])
>>> y_true = np.array([[1], [2]]) # (2, 1)
>>> y_pred = np.array([[0, 1, 4], [5, 6, 7]]) # (2, 3)
>>> relevant_counts(y_true, y_pred)
array([[1],
[1]])
>>> # Now the `relevant_counts` case:
>>> y_true = np.array([[1, 4]]) # (1, 2)
>>> y_pred = np.array([[0, 1, 4]]) # (1, 3)
>>> relevant_counts(y_true, y_pred)
array([[1, 1]])
"""
return (y_pred[:, :, None] == y_pred[:, None]).sum(axis=-1) | 0.944177 | 0.902481 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'AccessGroupSpec',
'AccessGroupStatus',
'AccessPolicySpec',
'AccessPolicySpecRoles',
'AccessPolicySpecRolesCustomRolesDef',
'AccessPolicySpecSubject',
'AccessPolicySpecSubjectAccessGroupDef',
'AccessPolicySpecTarget',
'AccessPolicyStatus',
'AccessPolicyStatusRoles',
'AccessPolicyStatusRolesCustomRolesDef',
'AccessPolicyStatusSubject',
'AccessPolicyStatusSubjectAccessGroupDef',
'AccessPolicyStatusTarget',
'AuthorizationPolicySpec',
'AuthorizationPolicySpecSource',
'AuthorizationPolicySpecTarget',
'AuthorizationPolicyStatus',
'AuthorizationPolicyStatusSource',
'AuthorizationPolicyStatusTarget',
'CustomRoleSpec',
'CustomRoleStatus',
]
@pulumi.output_type
class AccessGroupSpec(dict):
"""
AccessGroupSpec defines the desired state of AccessGroup
"""
def __init__(__self__, *,
description: str,
name: str,
service_ids: Optional[Sequence[str]] = None,
user_emails: Optional[Sequence[str]] = None):
"""
AccessGroupSpec defines the desired state of AccessGroup
"""
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "name", name)
if service_ids is not None:
pulumi.set(__self__, "service_ids", service_ids)
if user_emails is not None:
pulumi.set(__self__, "user_emails", user_emails)
@property
@pulumi.getter
def description(self) -> str:
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="serviceIDs")
def service_ids(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "service_ids")
@property
@pulumi.getter(name="userEmails")
def user_emails(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "user_emails")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessGroupStatus(dict):
"""
AccessGroupStatus defines the observed state of AccessGroup
"""
def __init__(__self__, *,
group_id: Optional[str] = None,
description: Optional[str] = None,
message: Optional[str] = None,
name: Optional[str] = None,
service_ids: Optional[Sequence[str]] = None,
state: Optional[str] = None,
user_emails: Optional[Sequence[str]] = None):
"""
AccessGroupStatus defines the observed state of AccessGroup
"""
if group_id is not None:
pulumi.set(__self__, "group_id", group_id)
if description is not None:
pulumi.set(__self__, "description", description)
if message is not None:
pulumi.set(__self__, "message", message)
if name is not None:
pulumi.set(__self__, "name", name)
if service_ids is not None:
pulumi.set(__self__, "service_ids", service_ids)
if state is not None:
pulumi.set(__self__, "state", state)
if user_emails is not None:
pulumi.set(__self__, "user_emails", user_emails)
@property
@pulumi.getter(name="GroupID")
def group_id(self) -> Optional[str]:
return pulumi.get(self, "group_id")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
@property
@pulumi.getter
def message(self) -> Optional[str]:
return pulumi.get(self, "message")
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="serviceIDs")
def service_ids(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "service_ids")
@property
@pulumi.getter
def state(self) -> Optional[str]:
return pulumi.get(self, "state")
@property
@pulumi.getter(name="userEmails")
def user_emails(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "user_emails")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpec(dict):
"""
AccessPolicySpec defines the desired state of AccessPolicy
"""
def __init__(__self__, *,
roles: 'outputs.AccessPolicySpecRoles',
subject: 'outputs.AccessPolicySpecSubject',
target: 'outputs.AccessPolicySpecTarget'):
"""
AccessPolicySpec defines the desired state of AccessPolicy
"""
pulumi.set(__self__, "roles", roles)
pulumi.set(__self__, "subject", subject)
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def roles(self) -> 'outputs.AccessPolicySpecRoles':
return pulumi.get(self, "roles")
@property
@pulumi.getter
def subject(self) -> 'outputs.AccessPolicySpecSubject':
return pulumi.get(self, "subject")
@property
@pulumi.getter
def target(self) -> 'outputs.AccessPolicySpecTarget':
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpecRoles(dict):
def __init__(__self__, *,
custom_roles_d_name: Optional[Sequence[str]] = None,
custom_roles_def: Optional[Sequence['outputs.AccessPolicySpecRolesCustomRolesDef']] = None,
defined_roles: Optional[Sequence[str]] = None):
if custom_roles_d_name is not None:
pulumi.set(__self__, "custom_roles_d_name", custom_roles_d_name)
if custom_roles_def is not None:
pulumi.set(__self__, "custom_roles_def", custom_roles_def)
if defined_roles is not None:
pulumi.set(__self__, "defined_roles", defined_roles)
@property
@pulumi.getter(name="customRolesDName")
def custom_roles_d_name(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "custom_roles_d_name")
@property
@pulumi.getter(name="customRolesDef")
def custom_roles_def(self) -> Optional[Sequence['outputs.AccessPolicySpecRolesCustomRolesDef']]:
return pulumi.get(self, "custom_roles_def")
@property
@pulumi.getter(name="definedRoles")
def defined_roles(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "defined_roles")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpecRolesCustomRolesDef(dict):
def __init__(__self__, *,
custom_role_name: str,
custom_role_namespace: str):
pulumi.set(__self__, "custom_role_name", custom_role_name)
pulumi.set(__self__, "custom_role_namespace", custom_role_namespace)
@property
@pulumi.getter(name="customRoleName")
def custom_role_name(self) -> str:
return pulumi.get(self, "custom_role_name")
@property
@pulumi.getter(name="customRoleNamespace")
def custom_role_namespace(self) -> str:
return pulumi.get(self, "custom_role_namespace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpecSubject(dict):
def __init__(__self__, *,
access_group_def: Optional['outputs.AccessPolicySpecSubjectAccessGroupDef'] = None,
access_group_id: Optional[str] = None,
service_id: Optional[str] = None,
user_email: Optional[str] = None):
if access_group_def is not None:
pulumi.set(__self__, "access_group_def", access_group_def)
if access_group_id is not None:
pulumi.set(__self__, "access_group_id", access_group_id)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
if user_email is not None:
pulumi.set(__self__, "user_email", user_email)
@property
@pulumi.getter(name="accessGroupDef")
def access_group_def(self) -> Optional['outputs.AccessPolicySpecSubjectAccessGroupDef']:
return pulumi.get(self, "access_group_def")
@property
@pulumi.getter(name="accessGroupID")
def access_group_id(self) -> Optional[str]:
return pulumi.get(self, "access_group_id")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
@property
@pulumi.getter(name="userEmail")
def user_email(self) -> Optional[str]:
return pulumi.get(self, "user_email")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpecSubjectAccessGroupDef(dict):
def __init__(__self__, *,
access_group_name: str,
access_group_namespace: str):
pulumi.set(__self__, "access_group_name", access_group_name)
pulumi.set(__self__, "access_group_namespace", access_group_namespace)
@property
@pulumi.getter(name="accessGroupName")
def access_group_name(self) -> str:
return pulumi.get(self, "access_group_name")
@property
@pulumi.getter(name="accessGroupNamespace")
def access_group_namespace(self) -> str:
return pulumi.get(self, "access_group_namespace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpecTarget(dict):
def __init__(__self__, *,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_class: Optional[str] = None,
service_id: Optional[str] = None):
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_class is not None:
pulumi.set(__self__, "service_class", service_class)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> Optional[str]:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatus(dict):
"""
AccessPolicyStatus defines the observed state of AccessPolicy
"""
def __init__(__self__, *,
message: Optional[str] = None,
policy_id: Optional[str] = None,
roles: Optional['outputs.AccessPolicyStatusRoles'] = None,
state: Optional[str] = None,
subject: Optional['outputs.AccessPolicyStatusSubject'] = None,
target: Optional['outputs.AccessPolicyStatusTarget'] = None):
"""
AccessPolicyStatus defines the observed state of AccessPolicy
"""
if message is not None:
pulumi.set(__self__, "message", message)
if policy_id is not None:
pulumi.set(__self__, "policy_id", policy_id)
if roles is not None:
pulumi.set(__self__, "roles", roles)
if state is not None:
pulumi.set(__self__, "state", state)
if subject is not None:
pulumi.set(__self__, "subject", subject)
if target is not None:
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def message(self) -> Optional[str]:
return pulumi.get(self, "message")
@property
@pulumi.getter(name="policyID")
def policy_id(self) -> Optional[str]:
return pulumi.get(self, "policy_id")
@property
@pulumi.getter
def roles(self) -> Optional['outputs.AccessPolicyStatusRoles']:
return pulumi.get(self, "roles")
@property
@pulumi.getter
def state(self) -> Optional[str]:
return pulumi.get(self, "state")
@property
@pulumi.getter
def subject(self) -> Optional['outputs.AccessPolicyStatusSubject']:
return pulumi.get(self, "subject")
@property
@pulumi.getter
def target(self) -> Optional['outputs.AccessPolicyStatusTarget']:
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatusRoles(dict):
def __init__(__self__, *,
custom_roles_d_name: Optional[Sequence[str]] = None,
custom_roles_def: Optional[Sequence['outputs.AccessPolicyStatusRolesCustomRolesDef']] = None,
defined_roles: Optional[Sequence[str]] = None):
if custom_roles_d_name is not None:
pulumi.set(__self__, "custom_roles_d_name", custom_roles_d_name)
if custom_roles_def is not None:
pulumi.set(__self__, "custom_roles_def", custom_roles_def)
if defined_roles is not None:
pulumi.set(__self__, "defined_roles", defined_roles)
@property
@pulumi.getter(name="customRolesDName")
def custom_roles_d_name(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "custom_roles_d_name")
@property
@pulumi.getter(name="customRolesDef")
def custom_roles_def(self) -> Optional[Sequence['outputs.AccessPolicyStatusRolesCustomRolesDef']]:
return pulumi.get(self, "custom_roles_def")
@property
@pulumi.getter(name="definedRoles")
def defined_roles(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "defined_roles")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatusRolesCustomRolesDef(dict):
def __init__(__self__, *,
custom_role_name: str,
custom_role_namespace: str):
pulumi.set(__self__, "custom_role_name", custom_role_name)
pulumi.set(__self__, "custom_role_namespace", custom_role_namespace)
@property
@pulumi.getter(name="customRoleName")
def custom_role_name(self) -> str:
return pulumi.get(self, "custom_role_name")
@property
@pulumi.getter(name="customRoleNamespace")
def custom_role_namespace(self) -> str:
return pulumi.get(self, "custom_role_namespace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatusSubject(dict):
def __init__(__self__, *,
access_group_def: Optional['outputs.AccessPolicyStatusSubjectAccessGroupDef'] = None,
access_group_id: Optional[str] = None,
service_id: Optional[str] = None,
user_email: Optional[str] = None):
if access_group_def is not None:
pulumi.set(__self__, "access_group_def", access_group_def)
if access_group_id is not None:
pulumi.set(__self__, "access_group_id", access_group_id)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
if user_email is not None:
pulumi.set(__self__, "user_email", user_email)
@property
@pulumi.getter(name="accessGroupDef")
def access_group_def(self) -> Optional['outputs.AccessPolicyStatusSubjectAccessGroupDef']:
return pulumi.get(self, "access_group_def")
@property
@pulumi.getter(name="accessGroupID")
def access_group_id(self) -> Optional[str]:
return pulumi.get(self, "access_group_id")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
@property
@pulumi.getter(name="userEmail")
def user_email(self) -> Optional[str]:
return pulumi.get(self, "user_email")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatusSubjectAccessGroupDef(dict):
def __init__(__self__, *,
access_group_name: str,
access_group_namespace: str):
pulumi.set(__self__, "access_group_name", access_group_name)
pulumi.set(__self__, "access_group_namespace", access_group_namespace)
@property
@pulumi.getter(name="accessGroupName")
def access_group_name(self) -> str:
return pulumi.get(self, "access_group_name")
@property
@pulumi.getter(name="accessGroupNamespace")
def access_group_namespace(self) -> str:
return pulumi.get(self, "access_group_namespace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatusTarget(dict):
def __init__(__self__, *,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_class: Optional[str] = None,
service_id: Optional[str] = None):
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_class is not None:
pulumi.set(__self__, "service_class", service_class)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> Optional[str]:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicySpec(dict):
"""
AuthorizationPolicySpec defines the desired state of AuthorizationPolicy
"""
def __init__(__self__, *,
roles: Sequence[str],
source: 'outputs.AuthorizationPolicySpecSource',
target: 'outputs.AuthorizationPolicySpecTarget'):
"""
AuthorizationPolicySpec defines the desired state of AuthorizationPolicy
"""
pulumi.set(__self__, "roles", roles)
pulumi.set(__self__, "source", source)
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def roles(self) -> Sequence[str]:
return pulumi.get(self, "roles")
@property
@pulumi.getter
def source(self) -> 'outputs.AuthorizationPolicySpecSource':
return pulumi.get(self, "source")
@property
@pulumi.getter
def target(self) -> 'outputs.AuthorizationPolicySpecTarget':
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicySpecSource(dict):
def __init__(__self__, *,
service_class: str,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_id: Optional[str] = None):
pulumi.set(__self__, "service_class", service_class)
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> str:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicySpecTarget(dict):
def __init__(__self__, *,
service_class: str,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_id: Optional[str] = None):
pulumi.set(__self__, "service_class", service_class)
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> str:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicyStatus(dict):
"""
AuthorizationPolicyStatus defines the observed state of AuthorizationPolicy
"""
def __init__(__self__, *,
message: Optional[str] = None,
policy_id: Optional[str] = None,
roles: Optional[Sequence[str]] = None,
source: Optional['outputs.AuthorizationPolicyStatusSource'] = None,
state: Optional[str] = None,
target: Optional['outputs.AuthorizationPolicyStatusTarget'] = None):
"""
AuthorizationPolicyStatus defines the observed state of AuthorizationPolicy
"""
if message is not None:
pulumi.set(__self__, "message", message)
if policy_id is not None:
pulumi.set(__self__, "policy_id", policy_id)
if roles is not None:
pulumi.set(__self__, "roles", roles)
if source is not None:
pulumi.set(__self__, "source", source)
if state is not None:
pulumi.set(__self__, "state", state)
if target is not None:
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def message(self) -> Optional[str]:
return pulumi.get(self, "message")
@property
@pulumi.getter(name="policyID")
def policy_id(self) -> Optional[str]:
return pulumi.get(self, "policy_id")
@property
@pulumi.getter
def roles(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "roles")
@property
@pulumi.getter
def source(self) -> Optional['outputs.AuthorizationPolicyStatusSource']:
return pulumi.get(self, "source")
@property
@pulumi.getter
def state(self) -> Optional[str]:
return pulumi.get(self, "state")
@property
@pulumi.getter
def target(self) -> Optional['outputs.AuthorizationPolicyStatusTarget']:
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicyStatusSource(dict):
def __init__(__self__, *,
service_class: str,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_id: Optional[str] = None):
pulumi.set(__self__, "service_class", service_class)
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> str:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicyStatusTarget(dict):
def __init__(__self__, *,
service_class: str,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_id: Optional[str] = None):
pulumi.set(__self__, "service_class", service_class)
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> str:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class CustomRoleSpec(dict):
"""
CustomRoleSpec defines the desired state of CustomRole
"""
def __init__(__self__, *,
actions: Sequence[str],
description: str,
display_name: str,
role_name: str,
service_class: str):
"""
CustomRoleSpec defines the desired state of CustomRole
"""
pulumi.set(__self__, "actions", actions)
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "role_name", role_name)
pulumi.set(__self__, "service_class", service_class)
@property
@pulumi.getter
def actions(self) -> Sequence[str]:
return pulumi.get(self, "actions")
@property
@pulumi.getter
def description(self) -> str:
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="roleName")
def role_name(self) -> str:
return pulumi.get(self, "role_name")
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> str:
return pulumi.get(self, "service_class")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class CustomRoleStatus(dict):
"""
CustomRoleStatus defines the observed state of CustomRole
"""
def __init__(__self__, *,
actions: Optional[Sequence[str]] = None,
description: Optional[str] = None,
display_name: Optional[str] = None,
message: Optional[str] = None,
role_crn: Optional[str] = None,
role_id: Optional[str] = None,
role_name: Optional[str] = None,
service_class: Optional[str] = None,
state: Optional[str] = None):
"""
CustomRoleStatus defines the observed state of CustomRole
"""
if actions is not None:
pulumi.set(__self__, "actions", actions)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if message is not None:
pulumi.set(__self__, "message", message)
if role_crn is not None:
pulumi.set(__self__, "role_crn", role_crn)
if role_id is not None:
pulumi.set(__self__, "role_id", role_id)
if role_name is not None:
pulumi.set(__self__, "role_name", role_name)
if service_class is not None:
pulumi.set(__self__, "service_class", service_class)
if state is not None:
pulumi.set(__self__, "state", state)
@property
@pulumi.getter
def actions(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "actions")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def message(self) -> Optional[str]:
return pulumi.get(self, "message")
@property
@pulumi.getter(name="roleCRN")
def role_crn(self) -> Optional[str]:
return pulumi.get(self, "role_crn")
@property
@pulumi.getter(name="roleID")
def role_id(self) -> Optional[str]:
return pulumi.get(self, "role_id")
@property
@pulumi.getter(name="roleName")
def role_name(self) -> Optional[str]:
return pulumi.get(self, "role_name")
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> Optional[str]:
return pulumi.get(self, "service_class")
@property
@pulumi.getter
def state(self) -> Optional[str]:
return pulumi.get(self, "state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop | operators/ibmcloud-iam-operator/python/pulumi_pulumi_kubernetes_crds_operators_ibmcloud_iam_operator/ibmcloud/v1alpha1/outputs.py |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'AccessGroupSpec',
'AccessGroupStatus',
'AccessPolicySpec',
'AccessPolicySpecRoles',
'AccessPolicySpecRolesCustomRolesDef',
'AccessPolicySpecSubject',
'AccessPolicySpecSubjectAccessGroupDef',
'AccessPolicySpecTarget',
'AccessPolicyStatus',
'AccessPolicyStatusRoles',
'AccessPolicyStatusRolesCustomRolesDef',
'AccessPolicyStatusSubject',
'AccessPolicyStatusSubjectAccessGroupDef',
'AccessPolicyStatusTarget',
'AuthorizationPolicySpec',
'AuthorizationPolicySpecSource',
'AuthorizationPolicySpecTarget',
'AuthorizationPolicyStatus',
'AuthorizationPolicyStatusSource',
'AuthorizationPolicyStatusTarget',
'CustomRoleSpec',
'CustomRoleStatus',
]
@pulumi.output_type
class AccessGroupSpec(dict):
"""
AccessGroupSpec defines the desired state of AccessGroup
"""
def __init__(__self__, *,
description: str,
name: str,
service_ids: Optional[Sequence[str]] = None,
user_emails: Optional[Sequence[str]] = None):
"""
AccessGroupSpec defines the desired state of AccessGroup
"""
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "name", name)
if service_ids is not None:
pulumi.set(__self__, "service_ids", service_ids)
if user_emails is not None:
pulumi.set(__self__, "user_emails", user_emails)
@property
@pulumi.getter
def description(self) -> str:
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="serviceIDs")
def service_ids(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "service_ids")
@property
@pulumi.getter(name="userEmails")
def user_emails(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "user_emails")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessGroupStatus(dict):
"""
AccessGroupStatus defines the observed state of AccessGroup
"""
def __init__(__self__, *,
group_id: Optional[str] = None,
description: Optional[str] = None,
message: Optional[str] = None,
name: Optional[str] = None,
service_ids: Optional[Sequence[str]] = None,
state: Optional[str] = None,
user_emails: Optional[Sequence[str]] = None):
"""
AccessGroupStatus defines the observed state of AccessGroup
"""
if group_id is not None:
pulumi.set(__self__, "group_id", group_id)
if description is not None:
pulumi.set(__self__, "description", description)
if message is not None:
pulumi.set(__self__, "message", message)
if name is not None:
pulumi.set(__self__, "name", name)
if service_ids is not None:
pulumi.set(__self__, "service_ids", service_ids)
if state is not None:
pulumi.set(__self__, "state", state)
if user_emails is not None:
pulumi.set(__self__, "user_emails", user_emails)
@property
@pulumi.getter(name="GroupID")
def group_id(self) -> Optional[str]:
return pulumi.get(self, "group_id")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
@property
@pulumi.getter
def message(self) -> Optional[str]:
return pulumi.get(self, "message")
@property
@pulumi.getter
def name(self) -> Optional[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="serviceIDs")
def service_ids(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "service_ids")
@property
@pulumi.getter
def state(self) -> Optional[str]:
return pulumi.get(self, "state")
@property
@pulumi.getter(name="userEmails")
def user_emails(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "user_emails")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpec(dict):
"""
AccessPolicySpec defines the desired state of AccessPolicy
"""
def __init__(__self__, *,
roles: 'outputs.AccessPolicySpecRoles',
subject: 'outputs.AccessPolicySpecSubject',
target: 'outputs.AccessPolicySpecTarget'):
"""
AccessPolicySpec defines the desired state of AccessPolicy
"""
pulumi.set(__self__, "roles", roles)
pulumi.set(__self__, "subject", subject)
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def roles(self) -> 'outputs.AccessPolicySpecRoles':
return pulumi.get(self, "roles")
@property
@pulumi.getter
def subject(self) -> 'outputs.AccessPolicySpecSubject':
return pulumi.get(self, "subject")
@property
@pulumi.getter
def target(self) -> 'outputs.AccessPolicySpecTarget':
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpecRoles(dict):
def __init__(__self__, *,
custom_roles_d_name: Optional[Sequence[str]] = None,
custom_roles_def: Optional[Sequence['outputs.AccessPolicySpecRolesCustomRolesDef']] = None,
defined_roles: Optional[Sequence[str]] = None):
if custom_roles_d_name is not None:
pulumi.set(__self__, "custom_roles_d_name", custom_roles_d_name)
if custom_roles_def is not None:
pulumi.set(__self__, "custom_roles_def", custom_roles_def)
if defined_roles is not None:
pulumi.set(__self__, "defined_roles", defined_roles)
@property
@pulumi.getter(name="customRolesDName")
def custom_roles_d_name(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "custom_roles_d_name")
@property
@pulumi.getter(name="customRolesDef")
def custom_roles_def(self) -> Optional[Sequence['outputs.AccessPolicySpecRolesCustomRolesDef']]:
return pulumi.get(self, "custom_roles_def")
@property
@pulumi.getter(name="definedRoles")
def defined_roles(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "defined_roles")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpecRolesCustomRolesDef(dict):
def __init__(__self__, *,
custom_role_name: str,
custom_role_namespace: str):
pulumi.set(__self__, "custom_role_name", custom_role_name)
pulumi.set(__self__, "custom_role_namespace", custom_role_namespace)
@property
@pulumi.getter(name="customRoleName")
def custom_role_name(self) -> str:
return pulumi.get(self, "custom_role_name")
@property
@pulumi.getter(name="customRoleNamespace")
def custom_role_namespace(self) -> str:
return pulumi.get(self, "custom_role_namespace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpecSubject(dict):
def __init__(__self__, *,
access_group_def: Optional['outputs.AccessPolicySpecSubjectAccessGroupDef'] = None,
access_group_id: Optional[str] = None,
service_id: Optional[str] = None,
user_email: Optional[str] = None):
if access_group_def is not None:
pulumi.set(__self__, "access_group_def", access_group_def)
if access_group_id is not None:
pulumi.set(__self__, "access_group_id", access_group_id)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
if user_email is not None:
pulumi.set(__self__, "user_email", user_email)
@property
@pulumi.getter(name="accessGroupDef")
def access_group_def(self) -> Optional['outputs.AccessPolicySpecSubjectAccessGroupDef']:
return pulumi.get(self, "access_group_def")
@property
@pulumi.getter(name="accessGroupID")
def access_group_id(self) -> Optional[str]:
return pulumi.get(self, "access_group_id")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
@property
@pulumi.getter(name="userEmail")
def user_email(self) -> Optional[str]:
return pulumi.get(self, "user_email")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpecSubjectAccessGroupDef(dict):
def __init__(__self__, *,
access_group_name: str,
access_group_namespace: str):
pulumi.set(__self__, "access_group_name", access_group_name)
pulumi.set(__self__, "access_group_namespace", access_group_namespace)
@property
@pulumi.getter(name="accessGroupName")
def access_group_name(self) -> str:
return pulumi.get(self, "access_group_name")
@property
@pulumi.getter(name="accessGroupNamespace")
def access_group_namespace(self) -> str:
return pulumi.get(self, "access_group_namespace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicySpecTarget(dict):
def __init__(__self__, *,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_class: Optional[str] = None,
service_id: Optional[str] = None):
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_class is not None:
pulumi.set(__self__, "service_class", service_class)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> Optional[str]:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatus(dict):
"""
AccessPolicyStatus defines the observed state of AccessPolicy
"""
def __init__(__self__, *,
message: Optional[str] = None,
policy_id: Optional[str] = None,
roles: Optional['outputs.AccessPolicyStatusRoles'] = None,
state: Optional[str] = None,
subject: Optional['outputs.AccessPolicyStatusSubject'] = None,
target: Optional['outputs.AccessPolicyStatusTarget'] = None):
"""
AccessPolicyStatus defines the observed state of AccessPolicy
"""
if message is not None:
pulumi.set(__self__, "message", message)
if policy_id is not None:
pulumi.set(__self__, "policy_id", policy_id)
if roles is not None:
pulumi.set(__self__, "roles", roles)
if state is not None:
pulumi.set(__self__, "state", state)
if subject is not None:
pulumi.set(__self__, "subject", subject)
if target is not None:
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def message(self) -> Optional[str]:
return pulumi.get(self, "message")
@property
@pulumi.getter(name="policyID")
def policy_id(self) -> Optional[str]:
return pulumi.get(self, "policy_id")
@property
@pulumi.getter
def roles(self) -> Optional['outputs.AccessPolicyStatusRoles']:
return pulumi.get(self, "roles")
@property
@pulumi.getter
def state(self) -> Optional[str]:
return pulumi.get(self, "state")
@property
@pulumi.getter
def subject(self) -> Optional['outputs.AccessPolicyStatusSubject']:
return pulumi.get(self, "subject")
@property
@pulumi.getter
def target(self) -> Optional['outputs.AccessPolicyStatusTarget']:
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatusRoles(dict):
def __init__(__self__, *,
custom_roles_d_name: Optional[Sequence[str]] = None,
custom_roles_def: Optional[Sequence['outputs.AccessPolicyStatusRolesCustomRolesDef']] = None,
defined_roles: Optional[Sequence[str]] = None):
if custom_roles_d_name is not None:
pulumi.set(__self__, "custom_roles_d_name", custom_roles_d_name)
if custom_roles_def is not None:
pulumi.set(__self__, "custom_roles_def", custom_roles_def)
if defined_roles is not None:
pulumi.set(__self__, "defined_roles", defined_roles)
@property
@pulumi.getter(name="customRolesDName")
def custom_roles_d_name(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "custom_roles_d_name")
@property
@pulumi.getter(name="customRolesDef")
def custom_roles_def(self) -> Optional[Sequence['outputs.AccessPolicyStatusRolesCustomRolesDef']]:
return pulumi.get(self, "custom_roles_def")
@property
@pulumi.getter(name="definedRoles")
def defined_roles(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "defined_roles")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatusRolesCustomRolesDef(dict):
def __init__(__self__, *,
custom_role_name: str,
custom_role_namespace: str):
pulumi.set(__self__, "custom_role_name", custom_role_name)
pulumi.set(__self__, "custom_role_namespace", custom_role_namespace)
@property
@pulumi.getter(name="customRoleName")
def custom_role_name(self) -> str:
return pulumi.get(self, "custom_role_name")
@property
@pulumi.getter(name="customRoleNamespace")
def custom_role_namespace(self) -> str:
return pulumi.get(self, "custom_role_namespace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatusSubject(dict):
def __init__(__self__, *,
access_group_def: Optional['outputs.AccessPolicyStatusSubjectAccessGroupDef'] = None,
access_group_id: Optional[str] = None,
service_id: Optional[str] = None,
user_email: Optional[str] = None):
if access_group_def is not None:
pulumi.set(__self__, "access_group_def", access_group_def)
if access_group_id is not None:
pulumi.set(__self__, "access_group_id", access_group_id)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
if user_email is not None:
pulumi.set(__self__, "user_email", user_email)
@property
@pulumi.getter(name="accessGroupDef")
def access_group_def(self) -> Optional['outputs.AccessPolicyStatusSubjectAccessGroupDef']:
return pulumi.get(self, "access_group_def")
@property
@pulumi.getter(name="accessGroupID")
def access_group_id(self) -> Optional[str]:
return pulumi.get(self, "access_group_id")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
@property
@pulumi.getter(name="userEmail")
def user_email(self) -> Optional[str]:
return pulumi.get(self, "user_email")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatusSubjectAccessGroupDef(dict):
def __init__(__self__, *,
access_group_name: str,
access_group_namespace: str):
pulumi.set(__self__, "access_group_name", access_group_name)
pulumi.set(__self__, "access_group_namespace", access_group_namespace)
@property
@pulumi.getter(name="accessGroupName")
def access_group_name(self) -> str:
return pulumi.get(self, "access_group_name")
@property
@pulumi.getter(name="accessGroupNamespace")
def access_group_namespace(self) -> str:
return pulumi.get(self, "access_group_namespace")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AccessPolicyStatusTarget(dict):
def __init__(__self__, *,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_class: Optional[str] = None,
service_id: Optional[str] = None):
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_class is not None:
pulumi.set(__self__, "service_class", service_class)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> Optional[str]:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicySpec(dict):
"""
AuthorizationPolicySpec defines the desired state of AuthorizationPolicy
"""
def __init__(__self__, *,
roles: Sequence[str],
source: 'outputs.AuthorizationPolicySpecSource',
target: 'outputs.AuthorizationPolicySpecTarget'):
"""
AuthorizationPolicySpec defines the desired state of AuthorizationPolicy
"""
pulumi.set(__self__, "roles", roles)
pulumi.set(__self__, "source", source)
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def roles(self) -> Sequence[str]:
return pulumi.get(self, "roles")
@property
@pulumi.getter
def source(self) -> 'outputs.AuthorizationPolicySpecSource':
return pulumi.get(self, "source")
@property
@pulumi.getter
def target(self) -> 'outputs.AuthorizationPolicySpecTarget':
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicySpecSource(dict):
def __init__(__self__, *,
service_class: str,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_id: Optional[str] = None):
pulumi.set(__self__, "service_class", service_class)
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> str:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicySpecTarget(dict):
def __init__(__self__, *,
service_class: str,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_id: Optional[str] = None):
pulumi.set(__self__, "service_class", service_class)
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> str:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicyStatus(dict):
"""
AuthorizationPolicyStatus defines the observed state of AuthorizationPolicy
"""
def __init__(__self__, *,
message: Optional[str] = None,
policy_id: Optional[str] = None,
roles: Optional[Sequence[str]] = None,
source: Optional['outputs.AuthorizationPolicyStatusSource'] = None,
state: Optional[str] = None,
target: Optional['outputs.AuthorizationPolicyStatusTarget'] = None):
"""
AuthorizationPolicyStatus defines the observed state of AuthorizationPolicy
"""
if message is not None:
pulumi.set(__self__, "message", message)
if policy_id is not None:
pulumi.set(__self__, "policy_id", policy_id)
if roles is not None:
pulumi.set(__self__, "roles", roles)
if source is not None:
pulumi.set(__self__, "source", source)
if state is not None:
pulumi.set(__self__, "state", state)
if target is not None:
pulumi.set(__self__, "target", target)
@property
@pulumi.getter
def message(self) -> Optional[str]:
return pulumi.get(self, "message")
@property
@pulumi.getter(name="policyID")
def policy_id(self) -> Optional[str]:
return pulumi.get(self, "policy_id")
@property
@pulumi.getter
def roles(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "roles")
@property
@pulumi.getter
def source(self) -> Optional['outputs.AuthorizationPolicyStatusSource']:
return pulumi.get(self, "source")
@property
@pulumi.getter
def state(self) -> Optional[str]:
return pulumi.get(self, "state")
@property
@pulumi.getter
def target(self) -> Optional['outputs.AuthorizationPolicyStatusTarget']:
return pulumi.get(self, "target")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicyStatusSource(dict):
def __init__(__self__, *,
service_class: str,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_id: Optional[str] = None):
pulumi.set(__self__, "service_class", service_class)
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> str:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class AuthorizationPolicyStatusTarget(dict):
def __init__(__self__, *,
service_class: str,
resource_group: Optional[str] = None,
resource_id: Optional[str] = None,
resource_key: Optional[str] = None,
resource_name: Optional[str] = None,
resource_value: Optional[str] = None,
service_id: Optional[str] = None):
pulumi.set(__self__, "service_class", service_class)
if resource_group is not None:
pulumi.set(__self__, "resource_group", resource_group)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
if resource_key is not None:
pulumi.set(__self__, "resource_key", resource_key)
if resource_name is not None:
pulumi.set(__self__, "resource_name", resource_name)
if resource_value is not None:
pulumi.set(__self__, "resource_value", resource_value)
if service_id is not None:
pulumi.set(__self__, "service_id", service_id)
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> str:
return pulumi.get(self, "service_class")
@property
@pulumi.getter(name="resourceGroup")
def resource_group(self) -> Optional[str]:
return pulumi.get(self, "resource_group")
@property
@pulumi.getter(name="resourceID")
def resource_id(self) -> Optional[str]:
return pulumi.get(self, "resource_id")
@property
@pulumi.getter(name="resourceKey")
def resource_key(self) -> Optional[str]:
return pulumi.get(self, "resource_key")
@property
@pulumi.getter(name="resourceName")
def resource_name(self) -> Optional[str]:
return pulumi.get(self, "resource_name")
@property
@pulumi.getter(name="resourceValue")
def resource_value(self) -> Optional[str]:
return pulumi.get(self, "resource_value")
@property
@pulumi.getter(name="serviceID")
def service_id(self) -> Optional[str]:
return pulumi.get(self, "service_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class CustomRoleSpec(dict):
"""
CustomRoleSpec defines the desired state of CustomRole
"""
def __init__(__self__, *,
actions: Sequence[str],
description: str,
display_name: str,
role_name: str,
service_class: str):
"""
CustomRoleSpec defines the desired state of CustomRole
"""
pulumi.set(__self__, "actions", actions)
pulumi.set(__self__, "description", description)
pulumi.set(__self__, "display_name", display_name)
pulumi.set(__self__, "role_name", role_name)
pulumi.set(__self__, "service_class", service_class)
@property
@pulumi.getter
def actions(self) -> Sequence[str]:
return pulumi.get(self, "actions")
@property
@pulumi.getter
def description(self) -> str:
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="roleName")
def role_name(self) -> str:
return pulumi.get(self, "role_name")
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> str:
return pulumi.get(self, "service_class")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class CustomRoleStatus(dict):
"""
CustomRoleStatus defines the observed state of CustomRole
"""
def __init__(__self__, *,
actions: Optional[Sequence[str]] = None,
description: Optional[str] = None,
display_name: Optional[str] = None,
message: Optional[str] = None,
role_crn: Optional[str] = None,
role_id: Optional[str] = None,
role_name: Optional[str] = None,
service_class: Optional[str] = None,
state: Optional[str] = None):
"""
CustomRoleStatus defines the observed state of CustomRole
"""
if actions is not None:
pulumi.set(__self__, "actions", actions)
if description is not None:
pulumi.set(__self__, "description", description)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
if message is not None:
pulumi.set(__self__, "message", message)
if role_crn is not None:
pulumi.set(__self__, "role_crn", role_crn)
if role_id is not None:
pulumi.set(__self__, "role_id", role_id)
if role_name is not None:
pulumi.set(__self__, "role_name", role_name)
if service_class is not None:
pulumi.set(__self__, "service_class", service_class)
if state is not None:
pulumi.set(__self__, "state", state)
@property
@pulumi.getter
def actions(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "actions")
@property
@pulumi.getter
def description(self) -> Optional[str]:
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def message(self) -> Optional[str]:
return pulumi.get(self, "message")
@property
@pulumi.getter(name="roleCRN")
def role_crn(self) -> Optional[str]:
return pulumi.get(self, "role_crn")
@property
@pulumi.getter(name="roleID")
def role_id(self) -> Optional[str]:
return pulumi.get(self, "role_id")
@property
@pulumi.getter(name="roleName")
def role_name(self) -> Optional[str]:
return pulumi.get(self, "role_name")
@property
@pulumi.getter(name="serviceClass")
def service_class(self) -> Optional[str]:
return pulumi.get(self, "service_class")
@property
@pulumi.getter
def state(self) -> Optional[str]:
return pulumi.get(self, "state")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop | 0.874667 | 0.083666 |
"""Sets up repositories for use by rules_webtesting at version 0.3.3."""
load("//web:web.bzl", "platform_archive")
def browser_repositories(firefox = False, chromium = False, sauce = False):
"""Sets up repositories for browsers defined in //browsers/....
Args:
firefox: Configure repositories for //browsers:firefox-native.
chromium: Configure repositories for //browsers:chromium-native.
sauce: Configure repositories for //browser/sauce:chrome-win10-connect.
"""
if chromium:
org_chromium_chromedriver()
org_chromium_chromium()
if firefox:
org_mozilla_firefox()
org_mozilla_geckodriver()
if sauce:
com_saucelabs_sauce_connect()
def com_saucelabs_sauce_connect():
platform_archive(
name = "com_saucelabs_sauce_connect_linux_x64",
licenses = ["by_exception_only"], # SauceLabs EULA
sha256 = "6eb18a5a3f77b190fa0bb48bcda4694d26731703ac3ee56499f72f820fe10ef1",
urls = [
"https://saucelabs.com/downloads/sc-4.5.4-linux.tar.gz",
],
named_files = {
"SAUCE_CONNECT": "sc-4.5.4-linux/bin/sc",
},
)
platform_archive(
name = "com_saucelabs_sauce_connect_macos_x64",
licenses = ["by_exception_only"], # SauceLabs EULA
sha256 = "7dd691a46a57c7c39f527688abd4825531d25a8a1c5b074f684783e397529ba6",
urls = [
"https://saucelabs.com/downloads/sc-4.5.4-osx.zip",
],
named_files = {
"SAUCE_CONNECT": "sc-4.5.4-osx/bin/sc",
},
)
platform_archive(
name = "com_saucelabs_sauce_connect_windows_x64",
licenses = ["by_exception_only"], # SauceLabs EULA
sha256 =
"4b2baaeb32624aa4e60ea4a2ca51f7c5656d476ba29f650a5dabb0faaf6cb793",
urls = [
"https://saucelabs.com/downloads/sc-4.5.4-win32.zip",
],
named_files = {
"SAUCE_CONNECT": "sc-4.5.4-win32/bin/sc.exe",
},
)
# To update Chromium, do the following:
# Step 1: Go to https://omahaproxy.appspot.com/
# Step 2: Look for branch_base_position of current stable releases
# Step 3: Go to https://commondatastorage.googleapis.com/chromium-browser-snapshots/index.html?prefix=Linux_x64/ etc to verify presence of that branch release for that platform.
# If no results, delete the last digit to broaden your search til you find a result.
# Step 4: Verify both Chromium and ChromeDriver are released at that version.
# Step 5: Update the URL to the new release.
def org_chromium_chromedriver():
platform_archive(
name = "org_chromium_chromedriver_linux_x64",
licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT
sha256 = "1d2e73a19632031f5de876916e12b497d5b0e3dc83d1ce2fbe8665061adfd114",
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Linux_x64/902390/chromedriver_linux64.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chromedriver_linux64.zip",
],
named_files = {
"CHROMEDRIVER": "chromedriver_linux64/chromedriver",
},
)
platform_archive(
name = "org_chromium_chromedriver_macos_x64",
licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT
sha256 = "36cc50c5194767b043913534f6ec16a7d7a85636b319729a67ffff486b30a5f6",
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Mac/902390/chromedriver_mac64.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chromedriver_mac_x64.zip",
],
named_files = {
"CHROMEDRIVER": "chromedriver_mac64/chromedriver",
},
)
platform_archive(
name = "org_chromium_chromedriver_macos_arm64",
licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT
sha256 = "1f100aacf4bab4b3ac4218ecf654b17d66f2e07dd455f887bb3d9aa8d21862e1",
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Mac_Arm/902390/chromedriver_mac64.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chromedriver_mac_arm64.zip",
],
named_files = {
"CHROMEDRIVER": "chromedriver_mac64/chromedriver",
},
)
platform_archive(
name = "org_chromium_chromedriver_windows_x64",
licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT
sha256 = "48392698f2ba338a0b9192f7c2154058a0b0b926aef0a5ef22aa6706b2bbc7b6",
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Win/902390/chromedriver_win32.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chromedriver_win32.zip",
],
named_files = {
"CHROMEDRIVER": "chromedriver_win32/chromedriver.exe",
},
)
def org_chromium_chromium():
platform_archive(
name = "org_chromium_chromium_linux_x64",
licenses = ["notice"], # BSD 3-clause (maybe more?)
# 94.0.4578.0
sha256 = "673ee08b4cfaff128ef0b4f7517acb6b6b25c9315fc6494ec328ab38aaf952d1",
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Linux_x64/902390/chrome-linux.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chrome-linux.zip",
],
named_files = {
"CHROMIUM": "chrome-linux/chrome",
},
)
platform_archive(
name = "org_chromium_chromium_macos_x64",
licenses = ["notice"], # BSD 3-clause (maybe more?)
sha256 = "75f6bd26744368cd0fcbbec035766dea82e34def60e938fb48630be6799d46c7",
# 94.0.4578.0
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Mac/902390/chrome-mac.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chrome-mac_x64.zip",
],
named_files = {
"CHROMIUM": "chrome-mac/Chromium.app/Contents/MacOS/Chromium",
},
)
platform_archive(
name = "org_chromium_chromium_macos_arm64",
licenses = ["notice"], # BSD 3-clause (maybe more?)
sha256 = "4845ce895d030aeb8bfd877a599f1f07d8c7a77d1e08513e80e60bb0093fca24",
# 94.0.4578.0
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Mac_Arm/902390/chrome-mac.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chrome-mac_arm64.zip",
],
named_files = {
"CHROMIUM": "chrome-mac/Chromium.app/Contents/MacOS/Chromium",
},
)
platform_archive(
name = "org_chromium_chromium_windows_x64",
licenses = ["notice"], # BSD 3-clause (maybe more?)
sha256 = "8919cd2f8a4676af4acc50d022b6a946a5b21a5fec4e078b0ebb0c8e18f1ce90",
# 94.0.4578.0
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Win/902390/chrome-win.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chrome-win.zip",
],
named_files = {
"CHROMIUM": "chrome-win/chrome.exe",
},
)
def org_mozilla_firefox():
platform_archive(
name = "org_mozilla_firefox_linux_x64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "998607f028043b3780f296eee03027279ef059acab5b50f9754df2bd69ca42b3",
# Firefox v90.0.1
urls = [
"https://ftp.mozilla.org/pub/firefox/releases/90.0.1/linux-x86_64/en-US/firefox-90.0.1.tar.bz2",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/firefox/firefox-90.0.1.tar.bz2",
],
named_files = {
"FIREFOX": "firefox/firefox",
},
)
platform_archive(
# Firefox has a launcher that conditionally starts x64/arm64
name = "org_mozilla_firefox_macos_x64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "76c1b9c42b52c7e5be4c112a98b7d3762a18841367f778a179679ac0de751f05",
# Firefox v90.0.1
urls = [
"https://ftp.mozilla.org/pub/firefox/releases/90.0.1/mac/en-US/Firefox%2090.0.1.dmg",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/firefox/Firefox%2090.0.1.dmg",
],
named_files = {
"FIREFOX": "Firefox.app/Contents/MacOS/firefox",
},
)
platform_archive(
# Firefox has a launcher that conditionally starts x64/arm64. This means that the
# x64 and arm64 repositories download the same binaries. We preserve separate
# repositories to allow for dedicated ARM/x64 binaries if needed in the future.
name = "org_mozilla_firefox_macos_arm64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "76c1b9c42b52c7e5be4c112a98b7d3762a18841367f778a179679ac0de751f05",
# Firefox v90.0.1
urls = [
"https://ftp.mozilla.org/pub/firefox/releases/90.0.1/mac/en-US/Firefox%2090.0.1.dmg",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/firefox/Firefox%2090.0.1.dmg",
],
named_files = {
"FIREFOX": "Firefox.app/Contents/MacOS/firefox",
},
)
def org_mozilla_geckodriver():
platform_archive(
name = "org_mozilla_geckodriver_linux_x64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "ec164910a3de7eec71e596bd2a1814ae27ba4c9d112b611680a6470dbe2ce27b",
# Geckodriver v0.29.1
urls = [
"https://github.com/mozilla/geckodriver/releases/download/v0.29.1/geckodriver-v0.29.1-linux64.tar.gz",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/geckodriver/0.29.1/geckodriver-v0.29.1-linux64.tar.gz",
],
named_files = {
"GECKODRIVER": "geckodriver",
},
)
platform_archive(
name = "org_mozilla_geckodriver_macos_x64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "9929c804ad0157ca13fdafca808866c88815b658e7059280a9f08f7e70364963",
# Geckodriver v0.29.1
urls = [
"https://github.com/mozilla/geckodriver/releases/download/v0.29.1/geckodriver-v0.29.1-macos.tar.gz",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/geckodriver/0.29.1/geckodriver-v0.29.1-macos.tar.gz",
],
named_files = {
"GECKODRIVER": "geckodriver",
},
)
platform_archive(
name = "org_mozilla_geckodriver_macos_arm64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "a1ec058b930fbfb684e30071ea47eec61bc18acb489914a9e0d095ede6088eea",
# Geckodriver v0.29.1
urls = [
"https://github.com/mozilla/geckodriver/releases/download/v0.29.1/geckodriver-v0.29.1-macos-aarch64.tar.gz",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/geckodriver/0.29.1/geckodriver-v0.29.1-macos-aarch64.tar.gz",
],
named_files = {
"GECKODRIVER": "geckodriver",
},
) | web/versioned/browsers-0.3.3.bzl | """Sets up repositories for use by rules_webtesting at version 0.3.3."""
load("//web:web.bzl", "platform_archive")
def browser_repositories(firefox = False, chromium = False, sauce = False):
"""Sets up repositories for browsers defined in //browsers/....
Args:
firefox: Configure repositories for //browsers:firefox-native.
chromium: Configure repositories for //browsers:chromium-native.
sauce: Configure repositories for //browser/sauce:chrome-win10-connect.
"""
if chromium:
org_chromium_chromedriver()
org_chromium_chromium()
if firefox:
org_mozilla_firefox()
org_mozilla_geckodriver()
if sauce:
com_saucelabs_sauce_connect()
def com_saucelabs_sauce_connect():
platform_archive(
name = "com_saucelabs_sauce_connect_linux_x64",
licenses = ["by_exception_only"], # SauceLabs EULA
sha256 = "6eb18a5a3f77b190fa0bb48bcda4694d26731703ac3ee56499f72f820fe10ef1",
urls = [
"https://saucelabs.com/downloads/sc-4.5.4-linux.tar.gz",
],
named_files = {
"SAUCE_CONNECT": "sc-4.5.4-linux/bin/sc",
},
)
platform_archive(
name = "com_saucelabs_sauce_connect_macos_x64",
licenses = ["by_exception_only"], # SauceLabs EULA
sha256 = "7dd691a46a57c7c39f527688abd4825531d25a8a1c5b074f684783e397529ba6",
urls = [
"https://saucelabs.com/downloads/sc-4.5.4-osx.zip",
],
named_files = {
"SAUCE_CONNECT": "sc-4.5.4-osx/bin/sc",
},
)
platform_archive(
name = "com_saucelabs_sauce_connect_windows_x64",
licenses = ["by_exception_only"], # SauceLabs EULA
sha256 =
"4b2baaeb32624aa4e60ea4a2ca51f7c5656d476ba29f650a5dabb0faaf6cb793",
urls = [
"https://saucelabs.com/downloads/sc-4.5.4-win32.zip",
],
named_files = {
"SAUCE_CONNECT": "sc-4.5.4-win32/bin/sc.exe",
},
)
# To update Chromium, do the following:
# Step 1: Go to https://omahaproxy.appspot.com/
# Step 2: Look for branch_base_position of current stable releases
# Step 3: Go to https://commondatastorage.googleapis.com/chromium-browser-snapshots/index.html?prefix=Linux_x64/ etc to verify presence of that branch release for that platform.
# If no results, delete the last digit to broaden your search til you find a result.
# Step 4: Verify both Chromium and ChromeDriver are released at that version.
# Step 5: Update the URL to the new release.
def org_chromium_chromedriver():
platform_archive(
name = "org_chromium_chromedriver_linux_x64",
licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT
sha256 = "1d2e73a19632031f5de876916e12b497d5b0e3dc83d1ce2fbe8665061adfd114",
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Linux_x64/902390/chromedriver_linux64.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chromedriver_linux64.zip",
],
named_files = {
"CHROMEDRIVER": "chromedriver_linux64/chromedriver",
},
)
platform_archive(
name = "org_chromium_chromedriver_macos_x64",
licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT
sha256 = "36cc50c5194767b043913534f6ec16a7d7a85636b319729a67ffff486b30a5f6",
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Mac/902390/chromedriver_mac64.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chromedriver_mac_x64.zip",
],
named_files = {
"CHROMEDRIVER": "chromedriver_mac64/chromedriver",
},
)
platform_archive(
name = "org_chromium_chromedriver_macos_arm64",
licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT
sha256 = "1f100aacf4bab4b3ac4218ecf654b17d66f2e07dd455f887bb3d9aa8d21862e1",
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Mac_Arm/902390/chromedriver_mac64.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chromedriver_mac_arm64.zip",
],
named_files = {
"CHROMEDRIVER": "chromedriver_mac64/chromedriver",
},
)
platform_archive(
name = "org_chromium_chromedriver_windows_x64",
licenses = ["reciprocal"], # BSD 3-clause, ICU, MPL 1.1, libpng (BSD/MIT-like), Academic Free License v. 2.0, BSD 2-clause, MIT
sha256 = "48392698f2ba338a0b9192f7c2154058a0b0b926aef0a5ef22aa6706b2bbc7b6",
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Win/902390/chromedriver_win32.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chromedriver_win32.zip",
],
named_files = {
"CHROMEDRIVER": "chromedriver_win32/chromedriver.exe",
},
)
def org_chromium_chromium():
platform_archive(
name = "org_chromium_chromium_linux_x64",
licenses = ["notice"], # BSD 3-clause (maybe more?)
# 94.0.4578.0
sha256 = "673ee08b4cfaff128ef0b4f7517acb6b6b25c9315fc6494ec328ab38aaf952d1",
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Linux_x64/902390/chrome-linux.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chrome-linux.zip",
],
named_files = {
"CHROMIUM": "chrome-linux/chrome",
},
)
platform_archive(
name = "org_chromium_chromium_macos_x64",
licenses = ["notice"], # BSD 3-clause (maybe more?)
sha256 = "75f6bd26744368cd0fcbbec035766dea82e34def60e938fb48630be6799d46c7",
# 94.0.4578.0
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Mac/902390/chrome-mac.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chrome-mac_x64.zip",
],
named_files = {
"CHROMIUM": "chrome-mac/Chromium.app/Contents/MacOS/Chromium",
},
)
platform_archive(
name = "org_chromium_chromium_macos_arm64",
licenses = ["notice"], # BSD 3-clause (maybe more?)
sha256 = "4845ce895d030aeb8bfd877a599f1f07d8c7a77d1e08513e80e60bb0093fca24",
# 94.0.4578.0
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Mac_Arm/902390/chrome-mac.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chrome-mac_arm64.zip",
],
named_files = {
"CHROMIUM": "chrome-mac/Chromium.app/Contents/MacOS/Chromium",
},
)
platform_archive(
name = "org_chromium_chromium_windows_x64",
licenses = ["notice"], # BSD 3-clause (maybe more?)
sha256 = "8919cd2f8a4676af4acc50d022b6a946a5b21a5fec4e078b0ebb0c8e18f1ce90",
# 94.0.4578.0
urls = [
"https://storage.googleapis.com/chromium-browser-snapshots/Win/902390/chrome-win.zip",
"https://storage.googleapis.com/dev-infra-mirror/chromium/902390/chrome-win.zip",
],
named_files = {
"CHROMIUM": "chrome-win/chrome.exe",
},
)
def org_mozilla_firefox():
platform_archive(
name = "org_mozilla_firefox_linux_x64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "998607f028043b3780f296eee03027279ef059acab5b50f9754df2bd69ca42b3",
# Firefox v90.0.1
urls = [
"https://ftp.mozilla.org/pub/firefox/releases/90.0.1/linux-x86_64/en-US/firefox-90.0.1.tar.bz2",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/firefox/firefox-90.0.1.tar.bz2",
],
named_files = {
"FIREFOX": "firefox/firefox",
},
)
platform_archive(
# Firefox has a launcher that conditionally starts x64/arm64
name = "org_mozilla_firefox_macos_x64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "76c1b9c42b52c7e5be4c112a98b7d3762a18841367f778a179679ac0de751f05",
# Firefox v90.0.1
urls = [
"https://ftp.mozilla.org/pub/firefox/releases/90.0.1/mac/en-US/Firefox%2090.0.1.dmg",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/firefox/Firefox%2090.0.1.dmg",
],
named_files = {
"FIREFOX": "Firefox.app/Contents/MacOS/firefox",
},
)
platform_archive(
# Firefox has a launcher that conditionally starts x64/arm64. This means that the
# x64 and arm64 repositories download the same binaries. We preserve separate
# repositories to allow for dedicated ARM/x64 binaries if needed in the future.
name = "org_mozilla_firefox_macos_arm64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "76c1b9c42b52c7e5be4c112a98b7d3762a18841367f778a179679ac0de751f05",
# Firefox v90.0.1
urls = [
"https://ftp.mozilla.org/pub/firefox/releases/90.0.1/mac/en-US/Firefox%2090.0.1.dmg",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/firefox/Firefox%2090.0.1.dmg",
],
named_files = {
"FIREFOX": "Firefox.app/Contents/MacOS/firefox",
},
)
def org_mozilla_geckodriver():
platform_archive(
name = "org_mozilla_geckodriver_linux_x64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "ec164910a3de7eec71e596bd2a1814ae27ba4c9d112b611680a6470dbe2ce27b",
# Geckodriver v0.29.1
urls = [
"https://github.com/mozilla/geckodriver/releases/download/v0.29.1/geckodriver-v0.29.1-linux64.tar.gz",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/geckodriver/0.29.1/geckodriver-v0.29.1-linux64.tar.gz",
],
named_files = {
"GECKODRIVER": "geckodriver",
},
)
platform_archive(
name = "org_mozilla_geckodriver_macos_x64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "9929c804ad0157ca13fdafca808866c88815b658e7059280a9f08f7e70364963",
# Geckodriver v0.29.1
urls = [
"https://github.com/mozilla/geckodriver/releases/download/v0.29.1/geckodriver-v0.29.1-macos.tar.gz",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/geckodriver/0.29.1/geckodriver-v0.29.1-macos.tar.gz",
],
named_files = {
"GECKODRIVER": "geckodriver",
},
)
platform_archive(
name = "org_mozilla_geckodriver_macos_arm64",
licenses = ["reciprocal"], # MPL 2.0
sha256 = "a1ec058b930fbfb684e30071ea47eec61bc18acb489914a9e0d095ede6088eea",
# Geckodriver v0.29.1
urls = [
"https://github.com/mozilla/geckodriver/releases/download/v0.29.1/geckodriver-v0.29.1-macos-aarch64.tar.gz",
"https://storage.googleapis.com/dev-infra-mirror/mozilla/geckodriver/0.29.1/geckodriver-v0.29.1-macos-aarch64.tar.gz",
],
named_files = {
"GECKODRIVER": "geckodriver",
},
) | 0.698741 | 0.306157 |
# Utilities for plotting
import matplotlib.pyplot as plt
import numpy as np
from functools import reduce
from skimage.color import gray2rgb, rgb2hsv, hsv2rgb
from skimage.exposure import rescale_intensity
def colourise(img, hue):
"""Colourise a grayscale images according to a hue.
Inspired by http://nbviewer.jupyter.org/gist/jeanpat/9665267
"""
hsv = rgb2hsv(gray2rgb(img))
hsv[:, :, 0] = hue
hsv[:, :, 1] = 1 # Full saturation
return hsv2rgb(hsv)
def rgbstack(img, hues, rescale=False):
if img.shape[0] != len(hues):
raise ValueError("Number of images in stack must be "
"equal to number of hues")
out = (colourise(i, h) for i, h in zip(img, hues))
out = np.array(reduce(lambda x, y: x + y/3, out,
np.zeros(img[0].shape + (3, ),
dtype=img.dtype)))
if rescale:
out = rescale_intensity(out,
in_range=(out.min(),
out.max()-0.15),
out_range=(0, 255))
return np.uint8(out)
def output_r90_img(img, df, cal, output_path, hues=[0.6, 0, 0.4, 1]):
colour_img = rgbstack(img, hues, rescale=True)
fig, ax0 = plt.subplots()
ax0.imshow(colour_img)
for n, g in df.groupby("Infected"):
ax0.scatter(g["xc"].tolist(), g["yc"].tolist(), s=50,
c="r" if n else "b",
marker="*" if n else "o")
for r in g.itertuples():
circ = plt.Circle((r.xc, r.yc), r._5/cal,
facecolor="none",
edgecolor="red")
ax0.add_artist(circ)
ax0.axis("off")
fig.savefig(output_path, dpi=150)
plt.close(fig)
def vertical_line(x, **kwargs):
plt.axvline(0, **kwargs)
# Polar histogram
def polar_hist(a, bins=10, hist_kws=None, density=False, axlabel=None,
color=None, label=None, ax=None):
if ax is None:
ax = plt.gca()
# Intelligently label the support axis (from seaborn distplot)
label_ax = bool(axlabel)
if axlabel is None and hasattr(a, "name"):
axlabel = a.name
if axlabel is not None:
label_ax = True
a = np.rad2deg(np.asarray(a).squeeze())
# Handle dictionary defaults
if hist_kws is None:
hist_kws = dict()
# Get the color from the current color cycle
if color is None:
line, = ax.plot(a.mean(), 0)
color = line.get_color()
line.remove()
# Plug the label into the right kwarg dictionary
if label is not None:
if hist_kws:
hist_kws["label"] = label
h, b = np.histogram(a, bins=bins, density=density)
centre = (b[:-1] + b[1:]) / 2
width = np.pi*2/bins
hist_kws.setdefault("alpha", 0.4)
hist_color = hist_kws.pop("color", color)
ax.bar(np.deg2rad(centre), h, width=width, bottom=0.0,
color=hist_color, **hist_kws)
if hist_color != color:
hist_kws["color"] = hist_color
if label_ax:
ax.set_xlabel(axlabel)
return ax | mito/plot.py |
# Utilities for plotting
import matplotlib.pyplot as plt
import numpy as np
from functools import reduce
from skimage.color import gray2rgb, rgb2hsv, hsv2rgb
from skimage.exposure import rescale_intensity
def colourise(img, hue):
"""Colourise a grayscale images according to a hue.
Inspired by http://nbviewer.jupyter.org/gist/jeanpat/9665267
"""
hsv = rgb2hsv(gray2rgb(img))
hsv[:, :, 0] = hue
hsv[:, :, 1] = 1 # Full saturation
return hsv2rgb(hsv)
def rgbstack(img, hues, rescale=False):
if img.shape[0] != len(hues):
raise ValueError("Number of images in stack must be "
"equal to number of hues")
out = (colourise(i, h) for i, h in zip(img, hues))
out = np.array(reduce(lambda x, y: x + y/3, out,
np.zeros(img[0].shape + (3, ),
dtype=img.dtype)))
if rescale:
out = rescale_intensity(out,
in_range=(out.min(),
out.max()-0.15),
out_range=(0, 255))
return np.uint8(out)
def output_r90_img(img, df, cal, output_path, hues=[0.6, 0, 0.4, 1]):
colour_img = rgbstack(img, hues, rescale=True)
fig, ax0 = plt.subplots()
ax0.imshow(colour_img)
for n, g in df.groupby("Infected"):
ax0.scatter(g["xc"].tolist(), g["yc"].tolist(), s=50,
c="r" if n else "b",
marker="*" if n else "o")
for r in g.itertuples():
circ = plt.Circle((r.xc, r.yc), r._5/cal,
facecolor="none",
edgecolor="red")
ax0.add_artist(circ)
ax0.axis("off")
fig.savefig(output_path, dpi=150)
plt.close(fig)
def vertical_line(x, **kwargs):
plt.axvline(0, **kwargs)
# Polar histogram
def polar_hist(a, bins=10, hist_kws=None, density=False, axlabel=None,
color=None, label=None, ax=None):
if ax is None:
ax = plt.gca()
# Intelligently label the support axis (from seaborn distplot)
label_ax = bool(axlabel)
if axlabel is None and hasattr(a, "name"):
axlabel = a.name
if axlabel is not None:
label_ax = True
a = np.rad2deg(np.asarray(a).squeeze())
# Handle dictionary defaults
if hist_kws is None:
hist_kws = dict()
# Get the color from the current color cycle
if color is None:
line, = ax.plot(a.mean(), 0)
color = line.get_color()
line.remove()
# Plug the label into the right kwarg dictionary
if label is not None:
if hist_kws:
hist_kws["label"] = label
h, b = np.histogram(a, bins=bins, density=density)
centre = (b[:-1] + b[1:]) / 2
width = np.pi*2/bins
hist_kws.setdefault("alpha", 0.4)
hist_color = hist_kws.pop("color", color)
ax.bar(np.deg2rad(centre), h, width=width, bottom=0.0,
color=hist_color, **hist_kws)
if hist_color != color:
hist_kws["color"] = hist_color
if label_ax:
ax.set_xlabel(axlabel)
return ax | 0.827201 | 0.49469 |
import numpy as np
def get_features():
features = {
"sweep": {
"name": "Range segment",
"class": FeatureSweep,
"model": "2D",
"data_type": "envelope",
},
"peak": {
"name": "Peak",
"class": FeaturePeak,
"model": "2D",
"data_type": "envelope",
},
"averages_1d": {
"name": "Averages 1D",
"class": FeatureAverages1D,
"model": "1D",
"data_type": "envelope",
},
"averages_2d": {
"name": "Averages 2D",
"class": FeatureAverages2D,
"model": "2D",
"data_type": "envelope",
},
"amplitude_ratios_1d": {
"name": "Amplitude Ratios 1D",
"class": FeatureAmplitudeRatios1D,
"model": "1D",
"data_type": "envelope",
},
"sparse_fft": {
"name": "Sparse FFT",
"class": FeatureSparseFFT,
"model": "2D",
"data_type": "sparse",
}
}
return features
def m2idx(value, array):
idx = max(0, int(124/60 * (value * 1000 - array[0])))
return int(idx)
class FeaturePeak:
def __init__(self):
# output data
self.data = {
"peak": "Distance",
"amplitude": "Amplitude",
}
# text, value, limits, type
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
("LP filter", 0.1, [0, 1], float),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
arr = win_data["env_data"][sensor_idx, :, :]
except Exception:
print("env_data not available!")
return None
# dist_vec is in mm
data_len, win_len = arr.shape
start = m2idx(options["Start"], dist_vec)
stop = min(m2idx(options["Stop"], dist_vec), data_len)
if start >= stop:
return None
peak = np.zeros((win_len, 4))
for i in range(win_len):
idx = np.argmax(arr[start:stop, i]) + start
peak[i, 0] = dist_vec[int(idx)]
peak[i, 1] = arr[int(idx), i]
data = {
"peak": peak[:, 0],
"amplitude": peak[:, 1],
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
return 1
class FeatureAverages1D():
def __init__(self):
# output data
self.data = {
"avg_dist": "Avg. dist.",
"avg_std": "Avg. std.",
"avg_ampl": "Avg. ampl.",
"avg_total": "Avg. total",
}
# text, value, limits
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
arr = win_data["env_data"][sensor_idx, :, :]
except Exception:
print("env_data not available!")
return None
# dist_vec is in mm
data_len, win_len = arr.shape
start = m2idx(options["Start"], dist_vec)
stop = min(m2idx(options["Stop"], dist_vec), data_len)
if start >= stop:
return None
peak = np.zeros((win_len, 3))
for i in range(win_len):
idx = np.argmax(arr[start:stop, i]) + start
peak[i, 0] = dist_vec[int(idx)]
peak[i, 1] = arr[int(idx), i]
peak[i, 2] = np.sum(arr[start:stop, i])
data = {
"avg_dist": np.mean(peak[:, 0]),
"avg_std": np.std(peak[:, 0]),
"avg_ampl": np.mean(peak[:, 1]),
"avg_total": np.mean(peak[:, 2]),
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
return 1
class FeatureAverages2D():
def __init__(self):
# output data
self.data = {
"avg_dist": "Avg. dist.",
"avg_std": "Avg. std.",
"avg_ampl": "Avg. ampl.",
"avg_total": "Avg. signal",
}
# text, value, limits
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
arr = win_data["env_data"][sensor_idx, :, :]
except Exception:
print("env_data not available!")
return None
# dist_vec is in mm
data_len, win_len = arr.shape
start = m2idx(options["Start"], dist_vec)
stop = min(m2idx(options["Stop"], dist_vec), data_len)
if start >= stop:
return None
peak = np.zeros((win_len, 3))
for i in range(win_len):
idx = np.argmax(arr[start:stop, i]) + start
peak[i, 0] = dist_vec[int(idx)]
peak[i, 1] = arr[int(idx), i]
peak[i, 2] = np.sum(arr[start:stop, i])
data = {
"avg_dist": np.full(win_len, np.mean(peak[:, 0])),
"avg_std": np.full(win_len, np.std(peak[:, 0])),
"avg_ampl": np.full(win_len, np.mean(peak[:, 1])),
"avg_total": np.full(win_len, np.mean(peak[:, 2])),
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
return 1
class FeatureAmplitudeRatios1D():
def __init__(self):
# output data
self.data = {
"avg_ratio": "Avg. Amp. ratio",
}
# text, value, limits
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
if sensor_idx == 1:
arr = win_data["env_data"]
else:
return None
except Exception:
print("env_data not available!")
return None
# dist_vec is in mm
nr_sensors, data_len, win_len = arr.shape
start = m2idx(options["Start"], dist_vec)
stop = min(m2idx(options["Stop"], dist_vec), data_len)
if start >= stop:
return None
peak = np.zeros((2, win_len, 4))
for s in range(2):
for i in range(win_len):
idx = np.argmax(arr[s, start:stop, i]) + start
peak[s, i, 0] = dist_vec[int(idx)]
peak[s, i, 1] = arr[s, int(idx), i]
peak[s, i, 2] = np.sum(arr[s, start:stop, i])
data = {
"avg_ratio": np.mean(peak[0, :, 1]) / np.mean(peak[1, :, 1]),
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
return 1
class FeatureSweep:
def __init__(self):
# output data
self.data = {
"segment": "Segment",
}
# text, value, limits
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
("Down sample", 8, [1, 124], int),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
arr = win_data["env_data"][sensor_idx, :, :]
except Exception:
print("env_data not available!")
return None
# dist_vec is in mm
data_len, win_len = arr.shape
start = m2idx(options["Start"], dist_vec)
stop = min(m2idx(options["Stop"], dist_vec), data_len)
downsampling = int(max(1, options["Down sample"]))
if start >= stop:
return None
data = {
"segment": arr[start:stop:downsampling, :],
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
if options is None:
return 1
try:
start = float(options["Start"])
stop = float(options["Stop"])
downsample = int(options["Down sample"])
size = (stop - start) * 100 * 124 / downsample / 6 + 1
except Exception as e:
print("Failed to calculate feature hight!\n ", e)
return 1
return int(size)
class FeatureSparseFFT:
def __init__(self):
# output data
self.data = {
"fft": "FFT PSD",
}
# text, value, limits
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
("High pass", 1, [0, 1], float),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
arr = win_data["sparse_data"][sensor_idx, :, :, :]
except Exception:
print("sparse_data not available!")
return None
point_repeats, data_len, win_len = arr.shape
data_start = dist_vec[0]
data_stop = dist_vec[-1]
# dist_vec is in m
start = max(data_start, options["Start"])
stop = min(data_stop, options["Stop"])
high_pass = options["High pass"]
if start >= data_stop:
return None
start_idx = np.argmin((dist_vec - start)**2)
stop_idx = np.argmin((dist_vec - stop)**2)
stop_idx = max(start_idx + 1, stop_idx)
arr = arr[:, start_idx:stop_idx, :]
hanning = np.hanning(point_repeats)[:, np.newaxis, np.newaxis]
doppler = abs(np.fft.rfft(hanning * (arr - np.mean(arr, axis=0, keepdims=True)), axis=0))
fft_psd = np.sum(doppler, axis=1)
freq_bins = fft_psd.shape[0]
freq_cutoff = int(high_pass * freq_bins)
data = {
"fft": fft_psd[0:freq_cutoff, :],
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
if options is None or "subsweeps" not in options:
return 1
try:
size = int(np.ceil(options["subsweeps"] * options["High pass"] / 2))
except Exception as e:
print("Failed to calculate feature hight!\n ", e)
return 1
return int(size) | gui/ml/feature_definitions.py | import numpy as np
def get_features():
features = {
"sweep": {
"name": "Range segment",
"class": FeatureSweep,
"model": "2D",
"data_type": "envelope",
},
"peak": {
"name": "Peak",
"class": FeaturePeak,
"model": "2D",
"data_type": "envelope",
},
"averages_1d": {
"name": "Averages 1D",
"class": FeatureAverages1D,
"model": "1D",
"data_type": "envelope",
},
"averages_2d": {
"name": "Averages 2D",
"class": FeatureAverages2D,
"model": "2D",
"data_type": "envelope",
},
"amplitude_ratios_1d": {
"name": "Amplitude Ratios 1D",
"class": FeatureAmplitudeRatios1D,
"model": "1D",
"data_type": "envelope",
},
"sparse_fft": {
"name": "Sparse FFT",
"class": FeatureSparseFFT,
"model": "2D",
"data_type": "sparse",
}
}
return features
def m2idx(value, array):
idx = max(0, int(124/60 * (value * 1000 - array[0])))
return int(idx)
class FeaturePeak:
def __init__(self):
# output data
self.data = {
"peak": "Distance",
"amplitude": "Amplitude",
}
# text, value, limits, type
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
("LP filter", 0.1, [0, 1], float),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
arr = win_data["env_data"][sensor_idx, :, :]
except Exception:
print("env_data not available!")
return None
# dist_vec is in mm
data_len, win_len = arr.shape
start = m2idx(options["Start"], dist_vec)
stop = min(m2idx(options["Stop"], dist_vec), data_len)
if start >= stop:
return None
peak = np.zeros((win_len, 4))
for i in range(win_len):
idx = np.argmax(arr[start:stop, i]) + start
peak[i, 0] = dist_vec[int(idx)]
peak[i, 1] = arr[int(idx), i]
data = {
"peak": peak[:, 0],
"amplitude": peak[:, 1],
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
return 1
class FeatureAverages1D():
def __init__(self):
# output data
self.data = {
"avg_dist": "Avg. dist.",
"avg_std": "Avg. std.",
"avg_ampl": "Avg. ampl.",
"avg_total": "Avg. total",
}
# text, value, limits
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
arr = win_data["env_data"][sensor_idx, :, :]
except Exception:
print("env_data not available!")
return None
# dist_vec is in mm
data_len, win_len = arr.shape
start = m2idx(options["Start"], dist_vec)
stop = min(m2idx(options["Stop"], dist_vec), data_len)
if start >= stop:
return None
peak = np.zeros((win_len, 3))
for i in range(win_len):
idx = np.argmax(arr[start:stop, i]) + start
peak[i, 0] = dist_vec[int(idx)]
peak[i, 1] = arr[int(idx), i]
peak[i, 2] = np.sum(arr[start:stop, i])
data = {
"avg_dist": np.mean(peak[:, 0]),
"avg_std": np.std(peak[:, 0]),
"avg_ampl": np.mean(peak[:, 1]),
"avg_total": np.mean(peak[:, 2]),
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
return 1
class FeatureAverages2D():
def __init__(self):
# output data
self.data = {
"avg_dist": "Avg. dist.",
"avg_std": "Avg. std.",
"avg_ampl": "Avg. ampl.",
"avg_total": "Avg. signal",
}
# text, value, limits
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
arr = win_data["env_data"][sensor_idx, :, :]
except Exception:
print("env_data not available!")
return None
# dist_vec is in mm
data_len, win_len = arr.shape
start = m2idx(options["Start"], dist_vec)
stop = min(m2idx(options["Stop"], dist_vec), data_len)
if start >= stop:
return None
peak = np.zeros((win_len, 3))
for i in range(win_len):
idx = np.argmax(arr[start:stop, i]) + start
peak[i, 0] = dist_vec[int(idx)]
peak[i, 1] = arr[int(idx), i]
peak[i, 2] = np.sum(arr[start:stop, i])
data = {
"avg_dist": np.full(win_len, np.mean(peak[:, 0])),
"avg_std": np.full(win_len, np.std(peak[:, 0])),
"avg_ampl": np.full(win_len, np.mean(peak[:, 1])),
"avg_total": np.full(win_len, np.mean(peak[:, 2])),
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
return 1
class FeatureAmplitudeRatios1D():
def __init__(self):
# output data
self.data = {
"avg_ratio": "Avg. Amp. ratio",
}
# text, value, limits
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
if sensor_idx == 1:
arr = win_data["env_data"]
else:
return None
except Exception:
print("env_data not available!")
return None
# dist_vec is in mm
nr_sensors, data_len, win_len = arr.shape
start = m2idx(options["Start"], dist_vec)
stop = min(m2idx(options["Stop"], dist_vec), data_len)
if start >= stop:
return None
peak = np.zeros((2, win_len, 4))
for s in range(2):
for i in range(win_len):
idx = np.argmax(arr[s, start:stop, i]) + start
peak[s, i, 0] = dist_vec[int(idx)]
peak[s, i, 1] = arr[s, int(idx), i]
peak[s, i, 2] = np.sum(arr[s, start:stop, i])
data = {
"avg_ratio": np.mean(peak[0, :, 1]) / np.mean(peak[1, :, 1]),
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
return 1
class FeatureSweep:
def __init__(self):
# output data
self.data = {
"segment": "Segment",
}
# text, value, limits
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
("Down sample", 8, [1, 124], int),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
arr = win_data["env_data"][sensor_idx, :, :]
except Exception:
print("env_data not available!")
return None
# dist_vec is in mm
data_len, win_len = arr.shape
start = m2idx(options["Start"], dist_vec)
stop = min(m2idx(options["Stop"], dist_vec), data_len)
downsampling = int(max(1, options["Down sample"]))
if start >= stop:
return None
data = {
"segment": arr[start:stop:downsampling, :],
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
if options is None:
return 1
try:
start = float(options["Start"])
stop = float(options["Stop"])
downsample = int(options["Down sample"])
size = (stop - start) * 100 * 124 / downsample / 6 + 1
except Exception as e:
print("Failed to calculate feature hight!\n ", e)
return 1
return int(size)
class FeatureSparseFFT:
def __init__(self):
# output data
self.data = {
"fft": "FFT PSD",
}
# text, value, limits
self.options = [
("Start", 0.2, [0.06, 7], float),
("Stop", 0.4, [0.06, 7], float),
("High pass", 1, [0, 1], float),
]
def extract_feature(self, win_data, sensor_idx, options=None, dist_vec=None):
try:
arr = win_data["sparse_data"][sensor_idx, :, :, :]
except Exception:
print("sparse_data not available!")
return None
point_repeats, data_len, win_len = arr.shape
data_start = dist_vec[0]
data_stop = dist_vec[-1]
# dist_vec is in m
start = max(data_start, options["Start"])
stop = min(data_stop, options["Stop"])
high_pass = options["High pass"]
if start >= data_stop:
return None
start_idx = np.argmin((dist_vec - start)**2)
stop_idx = np.argmin((dist_vec - stop)**2)
stop_idx = max(start_idx + 1, stop_idx)
arr = arr[:, start_idx:stop_idx, :]
hanning = np.hanning(point_repeats)[:, np.newaxis, np.newaxis]
doppler = abs(np.fft.rfft(hanning * (arr - np.mean(arr, axis=0, keepdims=True)), axis=0))
fft_psd = np.sum(doppler, axis=1)
freq_bins = fft_psd.shape[0]
freq_cutoff = int(high_pass * freq_bins)
data = {
"fft": fft_psd[0:freq_cutoff, :],
}
return data
def get_options(self):
return self.data, self.options
def get_size(self, options=None):
if options is None or "subsweeps" not in options:
return 1
try:
size = int(np.ceil(options["subsweeps"] * options["High pass"] / 2))
except Exception as e:
print("Failed to calculate feature hight!\n ", e)
return 1
return int(size) | 0.567577 | 0.630543 |
import astropy.units as u
from astropy.coordinates import ICRS
from astropy.coordinates import Galactic
import astropy.coordinates as coord
import numpy as np
def calc_vb(mean_list):
"""
Calculate latitudinal velocity.
Args:
mean_list (list): list of arrays of,
ra: Right Ascension in degrees.
dec: Declination in degrees.
parallax: parallax in milliarcseconds.
pmra: RA proper motion in milliarcseconds per year.
pmdec: Dec proper motion in milliarcseconds per year.
Returns:
The array of latitudinal velocities.
"""
ra, dec, parallax, pmra, pmdec = mean_list
# icrs = ICRS(ra=ra*u.degree,
# dec=dec*u.degree,
# distance=distance*u.pc,
# pm_ra_cosdec=pmra*u.mas/u.yr,
# pm_dec=pmdec*u.mas/u.yr)
# vels = icrs.transform_to(Galactic)
d = coord.Distance(parallax=parallax*u.mas)
vra = (pmra*u.mas/u.yr * d).to(u.km/u.s, u.dimensionless_angles())
vdec = (pmdec*u.mas/u.yr * d).to(u.km/u.s, u.dimensionless_angles())
c = coord.SkyCoord(ra=ra*u.deg, dec=dec*u.deg, distance=d,
pm_ra_cosdec=pmra*u.mas/u.yr,
pm_dec=pmdec*u.mas/u.yr)
gal = c.galactic
v_b = (gal.pm_b * gal.distance).to(u.km/u.s, u.dimensionless_angles())
return v_b
def vb_with_err(mean_list, cov_list, Nsamples):
"""
Calculate latitudinal velocities with uncertainties.
Args:
mean_list (list): A list of arrays of astrometric data.
[ra, dec, plx, pmra, pmdec]
cov_list (array): A list of all the uncertainties and covariances:
[ra_err, dec_err, plx_err, pmra_err, pmdec_err, ra_dec_corr,
ra_plx_corr, ra_pmra_corr, ra_pmdec_corr, dec_plx_corr,
dec_pmra_corr, dec_pmdec_corr, plx_pmra_corr, plx_pmdec_corr,
pmra_pmdec_corr]
Nsamples: (int): The number of samples.
"""
def sample_from_cov(mean_list, cov_list, Nsamples):
"""
Sample from the multivariate Gaussian of Gaia astrometric data.
Args:
mean_list (list): A list of arrays of astrometric data.
[ra, dec, plx, pmra, pmdec]
cov_list (array): A list of all the uncertainties and covariances:
[ra_err, dec_err, plx_err, pmra_err, pmdec_err, ra_dec_corr,
ra_plx_corr, ra_pmra_corr, ra_pmdec_corr, dec_plx_corr,
dec_pmra_corr, dec_pmdec_corr, plx_pmra_corr, plx_pmdec_corr,
pmra_pmdec_corr]
Nsamples: (int): The number of samples.
"""
Ndim = len(mean_list) # 5 dimensions: ra, dec, plx, pmra, pmdec
Nstars = len(mean_list[0])
# Construct the mean and covariance matrices.
mean = np.vstack(([i for i in mean_list]))
cov = construct_cov(cov_list, Ndim)
# Sample from the multivariate Gaussian
samples = np.zeros((Nsamples, Ndim, Nstars))
for i in range(Nstars):
samples[:, :, i] = np.random.multivariate_normal(
mean[:, i], cov[:, :, i], Nsamples)
return samples
def construct_cov(element_list, Ndim):
"""
Construct the covariance matrix between ra, dec, pmra, pmdec and parallax.
Args:
element_list (list): All the uncertainties and covariances:
[ra_err, dec_err, plx_err, pmra_err, pmdec_err, ra_dec_corr,
ra_plx_corr, ra_pmra_corr, ra_pmdec_corr, dec_plx_corr,
dec_pmra_corr, dec_pmdec_corr, plx_pmra_corr, plx_pmdec_corr,
pmra_pmdec_corr]
Ndim (int): The number of dimensions. 5 for Gaia (ra, dec, plx, pmra,
pmdec)
Returns:
cov (array): The 5 x 5 x Nstar covariance matrix.
cov = [[ra**2 rad_c raplx_c rapmra_c rapmd_c]
[rad_c d**2 dplx_c dpmra_c dpmdec_c]
[raplx_c dplx_c plx**2 plxpmra_c plxpmdec_c]
[rapmra_c dpmra_c plxpmra_c pmra**2 pmrapmdec_c]
[rapmd_c dpmdec_c plxpmdec_c pmrapmdec_c pmdec**2]]
"""
ra_err, dec_err, plx_err, pmra_err, pmdec_err, ra_dec_corr, ra_plx_corr, \
ra_pmra_corr, ra_pmdec_corr, dec_plx_corr, dec_pmra_corr, \
dec_pmdec_corr, plx_pmra_corr, plx_pmdec_corr, pmra_pmdec_corr \
= element_list
cov = np.zeros((Ndim, Ndim, len(ra_err)))
cov[0, 0, :] = ra_err**2
cov[0, 1, :] = ra_dec_corr
cov[0, 2, :] = ra_plx_corr
cov[0, 3, :] = ra_pmra_corr
cov[0, 4, :] = ra_pmdec_corr
cov[1, 0, :] = ra_dec_corr
cov[1, 1, :] = dec_err**2
cov[1, 2, :] = dec_plx_corr
cov[1, 3, :] = dec_pmra_corr
cov[1, 4, :] = dec_pmdec_corr
cov[2, 0, :] = ra_plx_corr
cov[2, 1, :] = dec_plx_corr
cov[2, 2, :] = plx_err**2
cov[2, 3, :] = plx_pmra_corr
cov[2, 4, :] = plx_pmdec_corr
cov[3, 0, :] = ra_pmra_corr
cov[3, 1, :] = dec_pmra_corr
cov[3, 2, :] = plx_pmra_corr
cov[3, 3, :] = pmra_err**2
cov[3, 4, :] = pmra_pmdec_corr
cov[4, 0, :] = ra_pmdec_corr
cov[4, 1, :] = dec_pmdec_corr
cov[4, 2, :] = plx_pmdec_corr
cov[4, 3, :] = pmra_pmdec_corr
cov[4, 4, :] = pmdec_err**2
return cov | kinematics_and_rotation/kinematics.py | import astropy.units as u
from astropy.coordinates import ICRS
from astropy.coordinates import Galactic
import astropy.coordinates as coord
import numpy as np
def calc_vb(mean_list):
"""
Calculate latitudinal velocity.
Args:
mean_list (list): list of arrays of,
ra: Right Ascension in degrees.
dec: Declination in degrees.
parallax: parallax in milliarcseconds.
pmra: RA proper motion in milliarcseconds per year.
pmdec: Dec proper motion in milliarcseconds per year.
Returns:
The array of latitudinal velocities.
"""
ra, dec, parallax, pmra, pmdec = mean_list
# icrs = ICRS(ra=ra*u.degree,
# dec=dec*u.degree,
# distance=distance*u.pc,
# pm_ra_cosdec=pmra*u.mas/u.yr,
# pm_dec=pmdec*u.mas/u.yr)
# vels = icrs.transform_to(Galactic)
d = coord.Distance(parallax=parallax*u.mas)
vra = (pmra*u.mas/u.yr * d).to(u.km/u.s, u.dimensionless_angles())
vdec = (pmdec*u.mas/u.yr * d).to(u.km/u.s, u.dimensionless_angles())
c = coord.SkyCoord(ra=ra*u.deg, dec=dec*u.deg, distance=d,
pm_ra_cosdec=pmra*u.mas/u.yr,
pm_dec=pmdec*u.mas/u.yr)
gal = c.galactic
v_b = (gal.pm_b * gal.distance).to(u.km/u.s, u.dimensionless_angles())
return v_b
def vb_with_err(mean_list, cov_list, Nsamples):
"""
Calculate latitudinal velocities with uncertainties.
Args:
mean_list (list): A list of arrays of astrometric data.
[ra, dec, plx, pmra, pmdec]
cov_list (array): A list of all the uncertainties and covariances:
[ra_err, dec_err, plx_err, pmra_err, pmdec_err, ra_dec_corr,
ra_plx_corr, ra_pmra_corr, ra_pmdec_corr, dec_plx_corr,
dec_pmra_corr, dec_pmdec_corr, plx_pmra_corr, plx_pmdec_corr,
pmra_pmdec_corr]
Nsamples: (int): The number of samples.
"""
def sample_from_cov(mean_list, cov_list, Nsamples):
"""
Sample from the multivariate Gaussian of Gaia astrometric data.
Args:
mean_list (list): A list of arrays of astrometric data.
[ra, dec, plx, pmra, pmdec]
cov_list (array): A list of all the uncertainties and covariances:
[ra_err, dec_err, plx_err, pmra_err, pmdec_err, ra_dec_corr,
ra_plx_corr, ra_pmra_corr, ra_pmdec_corr, dec_plx_corr,
dec_pmra_corr, dec_pmdec_corr, plx_pmra_corr, plx_pmdec_corr,
pmra_pmdec_corr]
Nsamples: (int): The number of samples.
"""
Ndim = len(mean_list) # 5 dimensions: ra, dec, plx, pmra, pmdec
Nstars = len(mean_list[0])
# Construct the mean and covariance matrices.
mean = np.vstack(([i for i in mean_list]))
cov = construct_cov(cov_list, Ndim)
# Sample from the multivariate Gaussian
samples = np.zeros((Nsamples, Ndim, Nstars))
for i in range(Nstars):
samples[:, :, i] = np.random.multivariate_normal(
mean[:, i], cov[:, :, i], Nsamples)
return samples
def construct_cov(element_list, Ndim):
"""
Construct the covariance matrix between ra, dec, pmra, pmdec and parallax.
Args:
element_list (list): All the uncertainties and covariances:
[ra_err, dec_err, plx_err, pmra_err, pmdec_err, ra_dec_corr,
ra_plx_corr, ra_pmra_corr, ra_pmdec_corr, dec_plx_corr,
dec_pmra_corr, dec_pmdec_corr, plx_pmra_corr, plx_pmdec_corr,
pmra_pmdec_corr]
Ndim (int): The number of dimensions. 5 for Gaia (ra, dec, plx, pmra,
pmdec)
Returns:
cov (array): The 5 x 5 x Nstar covariance matrix.
cov = [[ra**2 rad_c raplx_c rapmra_c rapmd_c]
[rad_c d**2 dplx_c dpmra_c dpmdec_c]
[raplx_c dplx_c plx**2 plxpmra_c plxpmdec_c]
[rapmra_c dpmra_c plxpmra_c pmra**2 pmrapmdec_c]
[rapmd_c dpmdec_c plxpmdec_c pmrapmdec_c pmdec**2]]
"""
ra_err, dec_err, plx_err, pmra_err, pmdec_err, ra_dec_corr, ra_plx_corr, \
ra_pmra_corr, ra_pmdec_corr, dec_plx_corr, dec_pmra_corr, \
dec_pmdec_corr, plx_pmra_corr, plx_pmdec_corr, pmra_pmdec_corr \
= element_list
cov = np.zeros((Ndim, Ndim, len(ra_err)))
cov[0, 0, :] = ra_err**2
cov[0, 1, :] = ra_dec_corr
cov[0, 2, :] = ra_plx_corr
cov[0, 3, :] = ra_pmra_corr
cov[0, 4, :] = ra_pmdec_corr
cov[1, 0, :] = ra_dec_corr
cov[1, 1, :] = dec_err**2
cov[1, 2, :] = dec_plx_corr
cov[1, 3, :] = dec_pmra_corr
cov[1, 4, :] = dec_pmdec_corr
cov[2, 0, :] = ra_plx_corr
cov[2, 1, :] = dec_plx_corr
cov[2, 2, :] = plx_err**2
cov[2, 3, :] = plx_pmra_corr
cov[2, 4, :] = plx_pmdec_corr
cov[3, 0, :] = ra_pmra_corr
cov[3, 1, :] = dec_pmra_corr
cov[3, 2, :] = plx_pmra_corr
cov[3, 3, :] = pmra_err**2
cov[3, 4, :] = pmra_pmdec_corr
cov[4, 0, :] = ra_pmdec_corr
cov[4, 1, :] = dec_pmdec_corr
cov[4, 2, :] = plx_pmdec_corr
cov[4, 3, :] = pmra_pmdec_corr
cov[4, 4, :] = pmdec_err**2
return cov | 0.884389 | 0.574604 |
import typing
from abc import abstractmethod
from ...lang.x_event_listener import XEventListener as XEventListener_c7230c4a
if typing.TYPE_CHECKING:
from .tree_expansion_event import TreeExpansionEvent as TreeExpansionEvent_378b0f79
class XTreeExpansionListener(XEventListener_c7230c4a):
"""
An instance of this interface can get notifications from a TreeControl when nodes are expanded or collapsed.
See Also:
`API XTreeExpansionListener <https://api.libreoffice.org/docs/idl/ref/interfacecom_1_1sun_1_1star_1_1awt_1_1tree_1_1XTreeExpansionListener.html>`_
"""
__ooo_ns__: str = 'com.sun.star.awt.tree'
__ooo_full_ns__: str = 'com.sun.star.awt.tree.XTreeExpansionListener'
__ooo_type_name__: str = 'interface'
__pyunointerface__: str = 'com.sun.star.awt.tree.XTreeExpansionListener'
@abstractmethod
def requestChildNodes(self, Event: 'TreeExpansionEvent_378b0f79') -> None:
"""
Invoked when a node with children on demand is about to be expanded.
This event is invoked before the treeExpanding() event.
"""
@abstractmethod
def treeCollapsed(self, Event: 'TreeExpansionEvent_378b0f79') -> None:
"""
Called whenever a node in the tree has been successfully collapsed.
"""
@abstractmethod
def treeCollapsing(self, Event: 'TreeExpansionEvent_378b0f79') -> None:
"""
Invoked whenever a node in the tree is about to be collapsed.
Raises:
ExpandVetoException: ``ExpandVetoException``
"""
@abstractmethod
def treeExpanded(self, Event: 'TreeExpansionEvent_378b0f79') -> None:
"""
Called whenever a node in the tree has been successfully expanded.
"""
@abstractmethod
def treeExpanding(self, Event: 'TreeExpansionEvent_378b0f79') -> None:
"""
Invoked whenever a node in the tree is about to be expanded.
Raises:
ExpandVetoException: ``ExpandVetoException``
"""
__all__ = ['XTreeExpansionListener'] | ooobuild/lo/awt/tree/x_tree_expansion_listener.py | import typing
from abc import abstractmethod
from ...lang.x_event_listener import XEventListener as XEventListener_c7230c4a
if typing.TYPE_CHECKING:
from .tree_expansion_event import TreeExpansionEvent as TreeExpansionEvent_378b0f79
class XTreeExpansionListener(XEventListener_c7230c4a):
"""
An instance of this interface can get notifications from a TreeControl when nodes are expanded or collapsed.
See Also:
`API XTreeExpansionListener <https://api.libreoffice.org/docs/idl/ref/interfacecom_1_1sun_1_1star_1_1awt_1_1tree_1_1XTreeExpansionListener.html>`_
"""
__ooo_ns__: str = 'com.sun.star.awt.tree'
__ooo_full_ns__: str = 'com.sun.star.awt.tree.XTreeExpansionListener'
__ooo_type_name__: str = 'interface'
__pyunointerface__: str = 'com.sun.star.awt.tree.XTreeExpansionListener'
@abstractmethod
def requestChildNodes(self, Event: 'TreeExpansionEvent_378b0f79') -> None:
"""
Invoked when a node with children on demand is about to be expanded.
This event is invoked before the treeExpanding() event.
"""
@abstractmethod
def treeCollapsed(self, Event: 'TreeExpansionEvent_378b0f79') -> None:
"""
Called whenever a node in the tree has been successfully collapsed.
"""
@abstractmethod
def treeCollapsing(self, Event: 'TreeExpansionEvent_378b0f79') -> None:
"""
Invoked whenever a node in the tree is about to be collapsed.
Raises:
ExpandVetoException: ``ExpandVetoException``
"""
@abstractmethod
def treeExpanded(self, Event: 'TreeExpansionEvent_378b0f79') -> None:
"""
Called whenever a node in the tree has been successfully expanded.
"""
@abstractmethod
def treeExpanding(self, Event: 'TreeExpansionEvent_378b0f79') -> None:
"""
Invoked whenever a node in the tree is about to be expanded.
Raises:
ExpandVetoException: ``ExpandVetoException``
"""
__all__ = ['XTreeExpansionListener'] | 0.681621 | 0.226752 |