text
stringlengths 3
1.05M
|
|---|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import io
import os
# Module API
VERSION = io.open(os.path.join(os.path.dirname(__file__), 'VERSION')).read().strip()
REMOTE_SCHEMES = ['http', 'https', 'ftp', 'ftps', 's3']
TABULAR_FORMATS = ['csv', 'tsv', 'xls', 'xlsx']
DEFAULT_DATA_PACKAGE_PROFILE = 'data-package'
DEFAULT_RESOURCE_PROFILE = 'data-resource'
DEFAULT_FIELD_TYPE = 'string'
DEFAULT_FIELD_FORMAT = 'default'
DEFAULT_MISSING_VALUES = ['']
DEFAULT_DIALECT = {
'delimiter': ',',
'doubleQuote': True,
'lineTerminator': '\r\n',
'quoteChar': '"',
'skipInitialSpace': True,
'header': True,
'caseSensitiveHeader': False,
}
HTTP_HEADERS = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) ' +
'AppleWebKit/537.36 (KHTML, like Gecko) ' +
'Chrome/54.0.2840.87 Safari/537.36'
}
|
"""Symbolic primitives + unicode/ASCII abstraction for pretty.py"""
import sys
warnings = ''
# first, setup unicodedate environment
try:
import unicodedata
def U(name):
"""unicode character by name or None if not found"""
try:
u = unicodedata.lookup(name)
except KeyError:
u = None
global warnings
warnings += 'W: no \'%s\' in unocodedata\n' % name
return u
except ImportError:
warnings += 'W: no unicodedata available\n'
U = lambda name: None
from sympy.printing.conventions import split_super_sub
# prefix conventions when constructing tables
# L - LATIN i
# G - GREEK beta
# D - DIGIT 0
# S - SYMBOL +
__all__ = ['greek','sub','sup','xsym','vobj','hobj','pretty_symbol',
'annotated']
_use_unicode = False
def pretty_use_unicode(flag = None):
"""Set whether pretty-printer should use unicode by default"""
global _use_unicode
global warnings
if flag is None:
return _use_unicode
if flag and warnings:
# print warnings (if any) on first unicode usage
print "I: pprint -- we are going to use unicode, but there are following problems:"
print warnings
warnings = ''
use_unicode_prev = _use_unicode
_use_unicode = flag
return use_unicode_prev
def pretty_try_use_unicode():
"""See if unicode output is available and leverage it if possible"""
try:
symbols = []
# see, if we can represent greek alphabet
for g,G in greek.itervalues():
symbols.append(g)
symbols.append(G)
# and atoms
symbols += atoms_table.values()
for s in symbols:
if s is None:
return # common symbols not present!
encoding = getattr(sys.stdout, 'encoding', None)
# this happens when e.g. stdout is redirected through a pipe, or is
# e.g. a cStringIO.StringO
if encoding is None:
return # sys.stdout has no encoding
# try to encode
s.encode(encoding)
except UnicodeEncodeError:
pass
else:
pretty_use_unicode(True)
def xstr(*args):
"""call str or unicode depending on current mode"""
if _use_unicode:
return unicode(*args)
else:
return str(*args)
# GREEK
g = lambda l: U('GREEK SMALL LETTER %s' % l.upper())
G = lambda l: U('GREEK CAPITAL LETTER %s' % l.upper())
greek_letters = [
'alpha', 'beta', 'gamma', 'delta', 'epsilon', 'zeta', 'eta', 'theta',
'iota', 'kappa', 'lamda', 'mu', 'nu', 'xi', 'omicron', 'pi', 'rho',
'sigma', 'tau', 'upsilon', 'phi', 'chi', 'psi', 'omega' ]
# {} greek letter -> (g,G)
greek = dict([(l, (g(l), G(l))) for l in greek_letters])
# aliases
greek['lambda'] = greek['lamda']
digit_2txt = {
'0' : 'ZERO',
'1' : 'ONE',
'2' : 'TWO',
'3' : 'THREE',
'4' : 'FOUR',
'5' : 'FIVE',
'6' : 'SIX',
'7' : 'SEVEN',
'8' : 'EIGHT',
'9' : 'NINE',
}
symb_2txt = {
'+' : 'PLUS SIGN',
'-' : 'MINUS',
'=' : 'EQUALS SIGN',
'(' : 'LEFT PARENTHESIS',
')' : 'RIGHT PARENTHESIS',
'[' : 'LEFT SQUARE BRACKET',
']' : 'RIGHT SQUARE BRACKET',
'{' : 'LEFT CURLY BRACKET',
'}' : 'RIGHT CURLY BRACKET',
# non-std
'{}' : 'CURLY BRACKET',
'sum': 'SUMMATION',
'int': 'INTEGRAL',
}
# SUBSCRIPT & SUPERSCRIPT
LSUB = lambda letter: U('LATIN SUBSCRIPT SMALL LETTER %s' % letter.upper())
GSUB = lambda letter: U('GREEK SUBSCRIPT SMALL LETTER %s' % letter.upper())
DSUB = lambda digit: U('SUBSCRIPT %s' % digit_2txt[digit])
SSUB = lambda symb: U('SUBSCRIPT %s' % symb_2txt[symb])
LSUP = lambda letter: U('SUPERSCRIPT LATIN SMALL LETTER %s' % letter.upper())
DSUP = lambda digit: U('SUPERSCRIPT %s' % digit_2txt[digit])
SSUP = lambda symb: U('SUPERSCRIPT %s' % symb_2txt[symb])
sub = {} # symb -> subscript symbol
sup = {} # symb -> superscript symbol
# latin subscripts
for l in 'aeioruvx':
sub[l] = LSUB(l)
for l in 'in':
sup[l] = LSUP(l)
for gl in ['beta', 'gamma', 'rho', 'phi', 'chi']:
sub[gl] = GSUB(gl)
for d in [str(i) for i in range(10)]:
sub[d] = DSUB(d)
sup[d] = DSUP(d)
for s in '+-=()':
sub[s] = SSUB(s)
sup[s] = SSUP(s)
# VERTICAL OBJECTS
HUP = lambda symb: U('%s UPPER HOOK' % symb_2txt[symb])
CUP = lambda symb: U('%s UPPER CORNER' % symb_2txt[symb])
MID = lambda symb: U('%s MIDDLE PIECE' % symb_2txt[symb])
EXT = lambda symb: U('%s EXTENSION' % symb_2txt[symb])
HLO = lambda symb: U('%s LOWER HOOK' % symb_2txt[symb])
CLO = lambda symb: U('%s LOWER CORNER' % symb_2txt[symb])
TOP = lambda symb: U('%s TOP' % symb_2txt[symb])
BOT = lambda symb: U('%s BOTTOM' % symb_2txt[symb])
# {} '(' -> (extension, start, end, middle) 1-character
_xobj_unicode = {
# vertical symbols
# ext top bot mid c1
'(' : (( EXT('('), HUP('('), HLO('(') ), '('),
')' : (( EXT(')'), HUP(')'), HLO(')') ), ')'),
'[' : (( EXT('['), CUP('['), CLO('[') ), '['),
']' : (( EXT(']'), CUP(']'), CLO(']') ), ']'),
'{' : (( EXT('{}'), HUP('{'), HLO('{'), MID('{') ), '{'),
'}' : (( EXT('{}'), HUP('}'), HLO('}'), MID('}') ), '}'),
'|' : U('BOX DRAWINGS LIGHT VERTICAL'),
'lfloor' : (( EXT('['), EXT('['), CLO('[') ), U('LEFT FLOOR')),
'rfloor' : (( EXT(']'), EXT(']'), CLO(']') ), U('RIGHT FLOOR')),
'lceil' : (( EXT('['), CUP('['), EXT('[') ), U('LEFT CEILING')),
'rceil' : (( EXT(']'), CUP(']'), EXT(']') ), U('RIGHT CEILING')),
'int': (( EXT('int'), U('TOP HALF INTEGRAL'), U('BOTTOM HALF INTEGRAL') ), U('INTEGRAL')),
'sum': (( U('BOX DRAWINGS LIGHT DIAGONAL UPPER LEFT TO LOWER RIGHT'), '_', U('OVERLINE'), U('BOX DRAWINGS LIGHT DIAGONAL UPPER RIGHT TO LOWER LEFT')), U('N-ARY SUMMATION')),
# horizontal objects
#'-' : '-',
'-' : U('BOX DRAWINGS LIGHT HORIZONTAL'),
'_' : U('LOW LINE'),
# We used to use this, but LOW LINE looks better for roots, as it's a
# little lower (i.e., it lines up with the / perfectly. But perhaps this
# one would still be wanted for some cases?
# '_' : U('HORIZONTAL SCAN LINE-9'),
# diagonal objects '\' & '/' ?
'/' : U('BOX DRAWINGS LIGHT DIAGONAL UPPER RIGHT TO LOWER LEFT'),
'\\': U('BOX DRAWINGS LIGHT DIAGONAL UPPER LEFT TO LOWER RIGHT'),
}
_xobj_ascii = {
# vertical symbols
# ext top bot mid c1
'(' : (( '|', '/', '\\' ), '('),
')' : (( '|', '\\', '/' ), ')'),
# XXX this looks ugly
# '[' : (( '|', '-', '-' ), '['),
# ']' : (( '|', '-', '-' ), ']'),
# XXX not so ugly :(
'[' : (( '[', '[', '[' ), '['),
']' : (( ']', ']', ']' ), ']'),
'{' : (( '|', '/', '\\', '<' ), '{'),
'}' : (( '|', '\\', '/', '>' ), '}'),
'|' : '|',
'int': ( ' | ', ' /', '/ ' ),
# horizontal objects
'-' : '-',
'_' : '_',
# diagonal objects '\' & '/' ?
'/' : '/',
'\\': '\\',
}
def xobj(symb, length):
"""Construct spatial object of given length.
return: [] of equal-length strings
"""
assert length > 0
# TODO robustify when no unicodedat available
if _use_unicode:
_xobj = _xobj_unicode
else:
_xobj = _xobj_ascii
vinfo = _xobj[symb]
c1 = top = bot = mid = None
if not isinstance(vinfo, tuple): # 1 entry
ext = vinfo
else:
if isinstance(vinfo[0], tuple): # (vlong), c1
vlong = vinfo[0]
c1 = vinfo[1]
else: # (vlong), c1
vlong = vinfo
ext = vlong[0]
try:
top = vlong[1]
bot = vlong[2]
mid = vlong[3]
except IndexError:
pass
if c1 is None: c1 = ext
if top is None: top = ext
if bot is None: bot = ext
if mid is not None:
if (length % 2) == 0:
# even height, but we have to print it somehow anyway...
# XXX is it ok?
length += 1
else:
mid = ext
if length == 1:
return c1
res = []
next= (length-2)//2
nmid= (length-2) - next*2
res += [top]
res += [ext]*next
res += [mid]*nmid
res += [ext]*next
res += [bot]
return res
def vobj(symb, height):
"""Construct vertical object of a given height
see: xobj
"""
return '\n'.join( xobj(symb, height) )
def hobj(symb, width):
"""Construct horizontal object of a given width
see: xobj
"""
return ''.join( xobj(symb, width) )
# RADICAL
# n -> symbol
root = {
2 : U('SQUARE ROOT'), # U('RADICAL SYMBOL BOTTOM')
3 : U('CUBE ROOT'),
4 : U('FOURTH ROOT'),
}
# RATIONAL
VF = lambda txt: U('VULGAR FRACTION %s' % txt)
# (p,q) -> symbol
frac = {
(1,2) : VF('ONE HALF'),
(1,3) : VF('ONE THIRD'),
(2,3) : VF('TWO THIRDS'),
(1,4) : VF('ONE QUARTER'),
(3,4) : VF('THREE QUARTERS'),
(1,5) : VF('ONE FIFTH'),
(2,5) : VF('TWO FIFTHS'),
(3,5) : VF('THREE FIFTHS'),
(4,5) : VF('FOUR FIFTHS'),
(1,6) : VF('ONE SIXTH'),
(5,6) : VF('FIVE SIXTHS'),
(1,8) : VF('ONE EIGHTH'),
(3,8) : VF('THREE EIGHTHS'),
(5,8) : VF('FIVE EIGHTHS'),
(7,8) : VF('SEVEN EIGHTHS'),
}
# atom symbols
_xsym = {
'==' : ( '=', '='),
'<' : ( '<', '<'),
'>' : ( '>', '>'),
'<=' : ('<=', U('LESS-THAN OR EQUAL TO')),
'>=' : ('>=', U('GREATER-THAN OR EQUAL TO')),
'!=' : ('!=', U('NOT EQUAL TO')),
'*' : ('*', U('DOT OPERATOR')),
}
def xsym(sym):
"""get symbology for a 'character'"""
op = _xsym[sym]
if _use_unicode:
return op[1]
else:
return op[0]
# SYMBOLS
atoms_table = {
# class how-to-display
'Exp1' : U('SCRIPT SMALL E'),
'Pi' : U('GREEK SMALL LETTER PI'),
'Infinity' : U('INFINITY'),
'NegativeInfinity' : U('INFINITY') and ('-'+U('INFINITY')), # XXX what to do here
#'ImaginaryUnit' : U('GREEK SMALL LETTER IOTA'),
#'ImaginaryUnit' : U('MATHEMATICAL ITALIC SMALL I'),
'ImaginaryUnit' : U('DOUBLE-STRUCK ITALIC SMALL I'),
'EmptySet' : U('EMPTY SET'),
'Naturals' : U('DOUBLE-STRUCK CAPITAL N'),
'Integers' : U('DOUBLE-STRUCK CAPITAL Z'),
'Reals' : U('DOUBLE-STRUCK CAPITAL R'),
'Union' : U('UNION'),
'Intersection' : U('INTERSECTION')
}
def pretty_atom(atom_name, default=None):
"""return pretty representation of an atom"""
if _use_unicode:
return atoms_table[atom_name]
else:
if default is not None:
return default
raise KeyError('only unicode') # send it default printer
def pretty_symbol(symb_name):
"""return pretty representation of a symbol"""
# let's split symb_name into symbol + index
# UC: beta1
# UC: f_beta
if not _use_unicode:
return symb_name
name, sups, subs = split_super_sub(symb_name)
# let's prettify name
gG = greek.get(name.lower())
if gG is not None:
if name.islower():
greek_name = greek.get(name.lower())[0]
else:
greek_name = greek.get(name.lower())[1]
# some letters may not be available
if greek_name is not None:
name = greek_name
# Let's prettify sups/subs. If it fails at one of them, pretty sups/subs are
# not used at all.
def pretty_list(l, mapping):
result = []
for s in l:
pretty = mapping.get(s)
if pretty is None:
try: # match by separate characters
pretty = ''.join([mapping[c] for c in s])
except KeyError:
return None
result.append(pretty)
return result
pretty_sups = pretty_list(sups, sup)
if pretty_sups is not None:
pretty_subs = pretty_list(subs, sub)
else:
pretty_subs = None
# glue the results into one string
if pretty_subs is None: # nice formatting of sups/subs did not work
return symb_name
else:
sups_result = ' '.join(pretty_sups)
subs_result = ' '.join(pretty_subs)
return ''.join([name, sups_result, subs_result])
def annotated(letter):
"""
Return a stylised drawing of the letter ``letter``, together with
information on how to put annotations (super- and subscripts to the
left and to the right) on it.
See pretty.py functions _print_meijerg, _print_hyper on how to use this
information.
"""
ucode_pics = {
'F': (2, 0, 2, 0, u'\u250c\u2500\n\u251c\u2500\n\u2575'),
'G': (3, 0, 3, 1,
u'\u256d\u2500\u256e\n\u2502\u2576\u2510\n\u2570\u2500\u256f')
}
ascii_pics = {
'F': (3, 0, 3, 0, ' _\n|_\n|\n'),
'G': (3, 0, 3, 1, ' __\n/__\n\_|')
}
if _use_unicode:
return ucode_pics[letter]
else:
return ascii_pics[letter]
|
import torch
import torch.nn.functional as F
from torch.optim import Adam
from sac.utils import soft_update, hard_update
from sac.model import TransferQNetwork, TransferGaussianPolicy, Encoder, QNetwork
class HARDSAC(object):
def __init__(self, num_inputs, action_space, dynamics_model, dynamics_action_encode, reward_model, reward_action_encode, args):
torch.autograd.set_detect_anomaly(True)
self.gamma = args.gamma
self.tau = args.tau
self.alpha = args.alpha
self.args = args
self.dynamics_model = dynamics_model
self.dynamics_action_encode = dynamics_action_encode
self.reward_model = reward_model
self.reward_action_encode = reward_action_encode
self.target_update_interval = self.args.target_update_interval
self.automatic_entropy_tuning = self.args.automatic_entropy_tuning
self.device = torch.device("cuda")
self.encoder = Encoder(num_inputs, self.args.hidden_size, self.args.feature_size).to(self.device)
self.policy = TransferGaussianPolicy(self.args.feature_size, action_space.shape[0], self.args.hidden_size, action_space).to(self.device)
self.critic = TransferQNetwork(num_inputs, action_space.shape[0], self.args.hidden_size).to(device=self.device)
self.critic_target = TransferQNetwork(num_inputs, action_space.shape[0], self.args.hidden_size).to(self.device)
hard_update(self.critic_target, self.critic)
if self.automatic_entropy_tuning == True:
self.target_entropy = -torch.prod(torch.Tensor(action_space.shape).to(self.device)).item()
self.log_alpha = torch.zeros(1, requires_grad=True, device=self.device)
self.alpha_optim = Adam([self.log_alpha], lr=args.lr)
self.critic_optim = Adam(self.critic.parameters(), lr=args.lr)
self.policy_optim = Adam(self.policy.parameters(), lr=args.lr)
self.policy_encode_optim = Adam([{'params': self.encoder.parameters()},
{'params': self.policy.parameters()}], lr=args.lr)
if args.is_transfer:
self.encoder_optim = Adam([{'params': self.encoder.parameters()},
{'params': filter(lambda p: p.requires_grad, self.dynamics_model.parameters()),
'lr': 0},
{'params': filter(lambda p: p.requires_grad,
self.dynamics_action_encode.parameters()), 'lr': 0},
{'params': filter(lambda p: p.requires_grad, self.reward_model.parameters()),
'lr': 0},
{'params': filter(lambda p: p.requires_grad,
self.reward_action_encode.parameters()), 'lr': 0}], lr=args.lr)
for param in self.dynamics_model.parameters():
param.requires_grad = False
for param in self.dynamics_action_encode.parameters():
param.requires_grad = False
for param in self.reward_model.parameters():
param.requires_grad = False
for param in self.reward_action_encode.parameters():
param.requires_grad = False
def select_action(self, state, eval=False):
state = torch.FloatTensor(state).to(self.device).unsqueeze(0)
feature = self.encoder(state)
if eval == False:
action, _, _ = self.policy.sample(feature)
else:
_, _, action = self.policy.sample(feature)
return action.detach().cpu().numpy()[0]
def update_parameters(self, memory, batch_size, updates):
state_batch, action_batch, reward_batch, next_state_batch, mask_batch = memory
state_batch = torch.FloatTensor(state_batch).to(self.device)
next_state_batch = torch.FloatTensor(next_state_batch).to(self.device)
action_batch = torch.FloatTensor(action_batch).to(self.device)
reward_batch = torch.FloatTensor(reward_batch).to(self.device).unsqueeze(1)
mask_batch = torch.FloatTensor(mask_batch).to(self.device).unsqueeze(1)
feature_batch = self.encoder(state_batch)
with torch.no_grad():
feature_policy_batch = self.encoder(state_batch)
next_feature_batch = self.encoder(next_state_batch)
next_state_action, next_state_log_pi, _ = self.policy.sample(next_feature_batch)
qf1_next_target, qf2_next_target = self.critic_target(next_state_batch, next_state_action)
min_qf_next_target = torch.min(qf1_next_target, qf2_next_target) - self.alpha * next_state_log_pi
next_q_value = reward_batch + mask_batch * self.gamma * (min_qf_next_target)
dynamics_model_label = next_feature_batch
reward_model_label = reward_batch
qf1, qf2 = self.critic(state_batch, action_batch)
qf1_loss = F.mse_loss(qf1, next_q_value)
qf2_loss = F.mse_loss(qf2, next_q_value)
if self.args.is_transfer:
with torch.no_grad():
dynamics_action_feature = self.dynamics_action_encode(action_batch)
reward_action_feature = self.reward_action_encode(action_batch)
dynamics_model_pred = self.dynamics_model(feature_batch, dynamics_action_feature)
reward_model_pred = self.reward_model(feature_batch, reward_action_feature)
dynamics_model_loss = F.mse_loss(dynamics_model_pred, dynamics_model_label)
reward_model_loss = F.mse_loss(reward_model_pred, reward_model_label)
pi, log_pi, _ = self.policy.sample(feature_policy_batch)
qf1_pi, qf2_pi = self.critic(state_batch, pi)
min_qf_pi = torch.min(qf1_pi, qf2_pi)
policy_loss = ((self.alpha * log_pi) - min_qf_pi).mean()
policy_loss += (dynamics_model_loss + reward_model_loss)
self.policy_optim.zero_grad()
policy_loss.backward(retain_graph=True)
self.policy_optim.step()
else:
pi, log_pi, _ = self.policy.sample(feature_batch)
qf1_pi, qf2_pi = self.critic(state_batch, pi)
min_qf_pi = torch.min(qf1_pi, qf2_pi)
policy_loss = ((self.alpha * log_pi) - min_qf_pi).mean()
dynamics_model_loss = torch.tensor(0.).to(self.device)
reward_model_loss = torch.tensor(0.).to(self.device)
self.policy_optim.zero_grad()
policy_loss.backward(retain_graph=True)
self.policy_optim.step()
self.critic_optim.zero_grad()
(qf1_loss + qf2_loss).backward()
self.critic_optim.step()
if self.automatic_entropy_tuning:
alpha_loss = -(self.log_alpha * (log_pi + self.target_entropy).detach()).mean()
self.alpha_optim.zero_grad()
alpha_loss.backward()
self.alpha_optim.step()
self.alpha = self.log_alpha.exp()
alpha_tlogs = self.alpha.clone()
else:
alpha_loss = torch.tensor(0.).to(self.device)
alpha_tlogs = torch.tensor(self.alpha)
if updates % self.target_update_interval == 0:
soft_update(self.critic_target, self.critic, self.tau)
return qf1_loss.item(), qf2_loss.item(), policy_loss.item(), alpha_loss.item(), alpha_tlogs.item(), dynamics_model_loss.item(), reward_model_loss.item()
def update_policy(self, memory, batch_size, updates):
state_batch, action_batch, reward_batch, next_state_batch, mask_batch = memory
state_batch = torch.FloatTensor(state_batch).to(self.device)
next_state_batch = torch.FloatTensor(next_state_batch).to(self.device)
action_batch = torch.FloatTensor(action_batch).to(self.device)
reward_batch = torch.FloatTensor(reward_batch).to(self.device).unsqueeze(1)
mask_batch = torch.FloatTensor(mask_batch).to(self.device).unsqueeze(1)
with torch.no_grad():
feature_policy_batch = self.encoder(state_batch)
next_feature_batch = self.encoder(next_state_batch)
next_state_action, next_state_log_pi, _ = self.policy.sample(next_feature_batch)
qf1_next_target, qf2_next_target = self.critic_target(next_state_batch, next_state_action)
min_qf_next_target = torch.min(qf1_next_target, qf2_next_target) - self.alpha * next_state_log_pi
next_q_value = reward_batch + mask_batch * self.gamma * (min_qf_next_target)
qf1, qf2 = self.critic(state_batch, action_batch)
qf1_loss = F.mse_loss(qf1, next_q_value)
qf2_loss = F.mse_loss(qf2, next_q_value)
if self.args.is_transfer:
pi, log_pi, _ = self.policy.sample(feature_policy_batch)
qf1_pi, qf2_pi = self.critic(state_batch, pi)
min_qf_pi = torch.min(qf1_pi, qf2_pi)
policy_loss = ((self.alpha * log_pi) - min_qf_pi).mean()
self.policy_optim.zero_grad()
policy_loss.backward(retain_graph=True)
self.policy_optim.step()
else:
feature_batch = self.encoder(state_batch)
pi, log_pi, _ = self.policy.sample(feature_batch)
qf1_pi, qf2_pi = self.critic(state_batch, pi)
min_qf_pi = torch.min(qf1_pi, qf2_pi)
policy_loss = ((self.alpha * log_pi) - min_qf_pi).mean()
self.policy_encode_optim.zero_grad()
policy_loss.backward(retain_graph=True)
self.policy_encode_optim.step()
self.critic_optim.zero_grad()
(qf1_loss + qf2_loss).backward()
self.critic_optim.step()
if self.automatic_entropy_tuning:
alpha_loss = -(self.log_alpha * (log_pi + self.target_entropy).detach()).mean()
self.alpha_optim.zero_grad()
alpha_loss.backward()
self.alpha_optim.step()
self.alpha = self.log_alpha.exp()
alpha_tlogs = self.alpha.clone()
else:
alpha_loss = torch.tensor(0.).to(self.device)
alpha_tlogs = torch.tensor(self.alpha)
if updates % self.target_update_interval == 0:
soft_update(self.critic_target, self.critic, self.tau)
return qf1_loss.item(), qf2_loss.item(), policy_loss.item(), alpha_loss.item(), alpha_tlogs.item()
def update_encoder(self, memory):
state_batch, action_batch, reward_batch, next_state_batch, mask_batch = memory
state_batch = torch.FloatTensor(state_batch).to(self.device)
next_state_batch = torch.FloatTensor(next_state_batch).to(self.device)
action_batch = torch.FloatTensor(action_batch).to(self.device)
reward_batch = torch.FloatTensor(reward_batch).to(self.device).unsqueeze(1)
feature_batch = self.encoder(state_batch)
with torch.no_grad():
next_feature_batch = self.encoder(next_state_batch)
next_state_action, next_state_log_pi, _ = self.policy.sample(next_feature_batch)
dynamics_model_label = next_feature_batch
reward_model_label = reward_batch
dynamics_action_feature = self.dynamics_action_encode(action_batch)
reward_action_feature = self.reward_action_encode(action_batch)
dynamics_model_pred = self.dynamics_model(feature_batch, dynamics_action_feature)
reward_model_pred = self.reward_model(feature_batch, reward_action_feature)
dynamics_model_loss = F.mse_loss(dynamics_model_pred, dynamics_model_label)
reward_model_loss = F.mse_loss(reward_model_pred, reward_model_label)
print(dynamics_model_loss.item(), reward_model_loss.item())
self.encoder_optim.zero_grad()
(dynamics_model_loss + reward_model_loss).backward()
self.encoder_optim.step()
return dynamics_model_loss.item(), reward_model_loss.item()
|
/**
* Copyright 2019 Progress Software Corporation and/or one of its subsidiaries or affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function(f){
if (typeof define === 'function' && define.amd) {
define(["kendo.core"], f);
} else {
f();
}
}(function(){
(function( window, undefined ) {
kendo.cultures["ti-ET"] = {
name: "ti-ET",
numberFormat: {
pattern: ["-n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
percent: {
pattern: ["-n%","n%"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "%"
},
currency: {
name: "Ethiopian Birr",
abbr: "ETB",
pattern: ["-$n","$n"],
decimals: 2,
",": ",",
".": ".",
groupSize: [3],
symbol: "Br"
}
},
calendars: {
standard: {
days: {
names: ["ሰንበት","ሰኑይ","ሠሉስ","ረቡዕ","ኃሙስ","ዓርቢ","ቀዳም"],
namesAbbr: ["ሰንበት","ሰኑይ","ሠሉስ","ረቡዕ","ኃሙስ","ዓርቢ","ቀዳም"],
namesShort: ["ሰንበት","ሰኑይ","ሠሉስ","ረቡዕ","ኃሙስ","ዓርቢ","ቀዳም"]
},
months: {
names: ["ጃንዩወሪ","ፌብሩወሪ","ማርች","ኤፕረል","ሜይ","ጁን","ጁላይ","ኦገስት","ሴፕቴምበር","ኦክተውበር","ኖቬምበር","ዲሴምበር"],
namesAbbr: ["ጃንዩ","ፌብሩ","ማርች","ኤፕረ","ሜይ","ጁን","ጁላይ","ኦገስ","ሴፕቴ","ኦክተ","ኖቬም","ዲሴም"]
},
AM: ["AM","am","AM"],
PM: ["PM","pm","PM"],
patterns: {
d: "dd/MM/yyyy",
D: "dddd፣ dd MMMM መዓልቲ yyyy gg",
F: "dddd፣ dd MMMM መዓልቲ yyyy gg h:mm:ss tt",
g: "dd/MM/yyyy h:mm tt",
G: "dd/MM/yyyy h:mm:ss tt",
m: "MMMM d",
M: "MMMM d",
s: "yyyy'-'MM'-'dd'T'HH':'mm':'ss",
t: "h:mm tt",
T: "h:mm:ss tt",
u: "yyyy'-'MM'-'dd HH':'mm':'ss'Z'",
y: "MMMM yyyy",
Y: "MMMM yyyy"
},
"/": "/",
":": ":",
firstDay: 0
}
}
}
})(this);
}));
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from localflavor.pl.forms import PLPostalCodeField
from postal.forms import PostalAddressForm
class PLPostalAddressForm(PostalAddressForm):
line1 = forms.CharField(label=_(u"Street"), max_length=100)
city = forms.CharField(label=_(u"City"), max_length=100)
code = PLPostalCodeField(label=_(u"Zip code"), max_length=6)
def __init__(self, *args, **kwargs):
super(PLPostalAddressForm, self).__init__(*args, **kwargs)
self.fields.pop('line2')
self.fields.pop('state')
self.fields['country'].initial = "PL"
|
# Generated by Django 2.1.15 on 2022-03-02 17:08
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
"""
A collection of modules for collecting, analyzing and plotting
financial data. User contributions welcome!
"""
#from __future__ import division
import os, time, warnings, md5
from urllib import urlopen
try: import datetime
except ImportError:
raise SystemExit('The finance module requires datetime support (python2.3)')
from matplotlib import verbose, get_configdir
from artist import Artist
from dates import date2num, num2date
from matplotlib.cbook import Bunch
from matplotlib.collections import LineCollection, PolyCollection
from matplotlib.colors import colorConverter
from lines import Line2D, TICKLEFT, TICKRIGHT
from patches import Rectangle
import matplotlib.numerix as nx
from matplotlib.transforms import scale_transform, Value, zero, one, \
scale_sep_transform, blend_xy_sep_transform
from pylab import gca
configdir = get_configdir()
cachedir = os.path.join(configdir, 'finance.cache')
def parse_yahoo_historical(fh, asobject=False, adjusted=True):
"""
Parse the historical data in file handle fh from yahoo finance and return
results as a list of
d, open, close, high, low, volume
where d is a floating poing representation of date, as returned by date2num
if adjust=True, use adjusted prices
"""
results = []
lines = fh.readlines()
for line in lines[1:]:
vals = line.split(',')
if len(vals)!=7: continue
datestr = vals[0]
dt = datetime.date(*time.strptime(datestr, '%d-%b-%y')[:3])
d = date2num(dt)
open, high, low, close = [float(val) for val in vals[1:5]]
volume = int(vals[5])
if adjusted:
aclose = float(vals[6])
m = aclose/close
open *= m
high *= m
low *= m
close = aclose
results.append((d, open, close, high, low, volume))
results.reverse()
if asobject:
date, open, close, high, low, volume = map(nx.asarray, zip(*results))
return Bunch(date=date, open=open, close=close, high=high, low=low, volume=volume)
else:
return results
def quotes_historical_yahoo(ticker, date1, date2, asobject=False, adjusted=True):
"""
Get historical data for ticker between date1 and date2. date1 and
date2 are datetime instances
results are a list of tuples
(d, open, close, high, low, volume)
where d is a floating poing representation of date, as returned by date2num
if asobject is True, the return val is an object with attrs date,
open, close, high, low, volume, which are equal length arrays
if adjust=True, use adjusted prices
Ex:
sp = f.quotes_historical_yahoo('^GSPC', d1, d2, asobject=True, adjusted=True)
returns = (sp.open[1:] - sp.open[:-1])/sp.open[1:]
[n,bins,patches] = hist(returns, 100)
mu = mean(returns)
sigma = std(returns)
x = normpdf(bins, mu, sigma)
plot(bins, x, color='red', lw=2)
"""
d1 = (date1.month-1, date1.day, date1.year)
d2 = (date2.month-1, date2.day, date2.year)
## urlFmt = 'http://table.finance.yahoo.com/table.csv?a=%d&b=%d&c=%d&d=%d&e=%d&f=%d&s=%s&y=0&g=d&ignore=.csv'
##
##
## url = urlFmt % (d1[0], d1[1], d1[2],
## d2[0], d2[1], d2[2], ticker)
urlFmt = 'http://ichart.finance.yahoo.com/table.csv?s=%s&d=%d&e=%d&f=%d&g=d&a=%d&b=%d&c=%d&ignore=.csv'
url = urlFmt % (ticker, d2[0], d2[1], d2[2], d1[0], d1[1], d1[2] )
print url
cachename = os.path.join(cachedir, md5.md5(url).hexdigest())
if os.path.exists(cachename):
fh = file(cachename)
verbose.report('Using cachefile %s for %s'%(cachename, ticker))
else:
if not os.path.isdir(cachedir): os.mkdir(cachedir)
fh = file(cachename, 'w')
fh.write(urlopen(url).read())
fh.close()
verbose.report('Saved %s data to cache file %s'%(ticker, cachename))
fh = file(cachename, 'r')
ticker = ticker.upper()
try: ret = parse_yahoo_historical(fh, asobject, adjusted)
except IOError, exc:
warnings.warn('urlopen() failure\n' + url + '\n' + exc.strerror[1])
return None
return ret
def plot_day_summary(ax, quotes, ticksize=3,
colorup='k', colordown='r',
):
"""
quotes is a list of (time, open, close, high, low, ...) tuples
Represent the time, open, close, high, low as a vertical line
ranging from low to high. The left tick is the open and the right
tick is the close.
time must be in float date format - see date2num
ax : an Axes instance to plot to
ticksize : open/close tick marker in points
colorup : the color of the lines where close >= open
colordown : the color of the lines where close < open
return value is a list of lines added
"""
lines = []
for q in quotes:
t, open, close, high, low = q[:5]
if close>=open : color = colorup
else : color = colordown
vline = Line2D(
xdata=(t, t), ydata=(low, high),
color=color,
antialiased=False, # no need to antialias vert lines
)
oline = Line2D(
xdata=(t, t), ydata=(open, open),
color=color,
antialiased=False,
marker=TICKLEFT,
markersize=ticksize,
)
cline = Line2D(
xdata=(t, t), ydata=(close, close),
color=color,
antialiased=False,
markersize=ticksize,
marker=TICKRIGHT)
lines.extend((vline, oline, cline))
ax.add_line(vline)
ax.add_line(oline)
ax.add_line(cline)
ax.autoscale_view()
return lines
def candlestick(ax, quotes, width=0.2, colorup='k', colordown='r',
alpha=1.0):
"""
quotes is a list of (time, open, close, high, low, ...) tuples.
As long as the first 5 elements of the tuples are these values,
the tuple can be as long as you want (eg it may store volume).
time must be in float days format - see date2num
Plot the time, open, close, high, low as a vertical line ranging
from low to high. Use a rectangular bar to represent the
open-close span. If close >= open, use colorup to color the bar,
otherwise use colordown
ax : an Axes instance to plot to
width : fraction of a day for the rectangle width
colorup : the color of the rectangle where close >= open
colordown : the color of the rectangle where close < open
alpha : the rectangle alpha level
return value is lines, patches where lines is a list of lines
added and patches is a list of the rectangle patches added
"""
OFFSET = width/2.0
lines = []
patches = []
for q in quotes:
t, open, close, high, low = q[:5]
if close>=open :
color = colorup
lower = open
height = close-open
else :
color = colordown
lower = close
height = open-close
vline = Line2D(
xdata=(t, t), ydata=(low, high),
color='k',
linewidth=0.5,
antialiased=True,
)
rect = Rectangle(
xy = (t-OFFSET, lower),
width = width,
height = height,
facecolor = color,
edgecolor = color,
)
rect.set_alpha(alpha)
lines.append(vline)
patches.append(rect)
ax.add_line(vline)
ax.add_patch(rect)
ax.autoscale_view()
return lines, patches
def plot_day_summary2(ax, opens, closes, highs, lows, ticksize=4,
colorup='k', colordown='r',
):
"""
Represent the time, open, close, high, low as a vertical line
ranging from low to high. The left tick is the open and the right
tick is the close.
ax : an Axes instance to plot to
ticksize : size of open and close ticks in points
colorup : the color of the lines where close >= open
colordown : the color of the lines where close < open
return value is a list of lines added
"""
# note this code assumes if any value open, close, low, high is
# missing they all are missing
rangeSegments = [ ((i, low), (i, high)) for i, low, high in zip(xrange(len(lows)), lows, highs) if low != -1 ]
# the ticks will be from ticksize to 0 in points at the origin and
# we'll translate these to the i, close location
openSegments = [ ((-ticksize, 0), (0, 0)) ]
# the ticks will be from 0 to ticksize in points at the origin and
# we'll translate these to the i, close location
closeSegments = [ ((0, 0), (ticksize, 0)) ]
offsetsOpen = [ (i, open) for i, open in zip(xrange(len(opens)), opens) if open != -1 ]
offsetsClose = [ (i, close) for i, close in zip(xrange(len(closes)), closes) if close != -1 ]
scale = ax.figure.dpi * Value(1/72.0)
tickTransform = scale_transform( scale, zero())
r,g,b = colorConverter.to_rgb(colorup)
colorup = r,g,b,1
r,g,b = colorConverter.to_rgb(colordown)
colordown = r,g,b,1
colord = { True : colorup,
False : colordown,
}
colors = [colord[open<close] for open, close in zip(opens, closes) if open!=-1 and close !=-1]
assert(len(rangeSegments)==len(offsetsOpen))
assert(len(offsetsOpen)==len(offsetsClose))
assert(len(offsetsClose)==len(colors))
useAA = 0, # use tuple here
lw = 1, # and here
rangeCollection = LineCollection(rangeSegments,
colors = colors,
linewidths = lw,
antialiaseds = useAA,
)
openCollection = LineCollection(openSegments,
colors = colors,
antialiaseds = useAA,
linewidths = lw,
offsets = offsetsOpen,
transOffset = ax.transData,
)
openCollection.set_transform(tickTransform)
closeCollection = LineCollection(closeSegments,
colors = colors,
antialiaseds = useAA,
linewidths = lw,
offsets = offsetsClose,
transOffset = ax.transData,
)
closeCollection.set_transform(tickTransform)
minx, maxx = (0, len(rangeSegments))
miny = min([low for low in lows if low !=-1])
maxy = max([high for high in highs if high != -1])
corners = (minx, miny), (maxx, maxy)
ax.update_datalim(corners)
ax.autoscale_view()
# add these last
ax.add_collection(rangeCollection)
ax.add_collection(openCollection)
ax.add_collection(closeCollection)
return rangeCollection, openCollection, closeCollection
def candlestick2(ax, opens, closes, highs, lows, width=4,
colorup='k', colordown='r',
alpha=0.75,
):
"""
Represent the open, close as a bar line and high low range as a
vertical line.
ax : an Axes instance to plot to
width : the bar width in points
colorup : the color of the lines where close >= open
colordown : the color of the lines where close < open
alpha : bar transparency
return value is lineCollection, barCollection
"""
# note this code assumes if any value open, close, low, high is
# missing they all are missing
right = width/2.0
left = -width/2.0
barVerts = [ ( (left, 0), (left, close-open), (right, close-open), (right, 0) ) for open, close in zip(opens, closes) if open != -1 and close!=-1 ]
rangeSegments = [ ((i, low), (i, high)) for i, low, high in zip(xrange(len(lows)), lows, highs) if low != -1 ]
offsetsBars = [ (i, open) for i,open in zip(xrange(len(opens)), opens) if open != -1 ]
sx = ax.figure.dpi * Value(1/72.0) # scale for points
sy = (ax.bbox.ur().y() - ax.bbox.ll().y()) / (ax.viewLim.ur().y() - ax.viewLim.ll().y())
barTransform = scale_sep_transform(sx,sy)
r,g,b = colorConverter.to_rgb(colorup)
colorup = r,g,b,alpha
r,g,b = colorConverter.to_rgb(colordown)
colordown = r,g,b,alpha
colord = { True : colorup,
False : colordown,
}
colors = [colord[open<close] for open, close in zip(opens, closes) if open!=-1 and close !=-1]
assert(len(barVerts)==len(rangeSegments))
assert(len(rangeSegments)==len(offsetsBars))
assert(len(offsetsBars)==len(colors))
useAA = 0, # use tuple here
lw = 0.5, # and here
rangeCollection = LineCollection(rangeSegments,
colors = ( (0,0,0,1), ),
linewidths = lw,
antialiaseds = useAA,
)
barCollection = PolyCollection(barVerts,
facecolors = colors,
edgecolors = ( (0,0,0,1), ),
antialiaseds = useAA,
linewidths = lw,
offsets = offsetsBars,
transOffset = ax.transData,
)
barCollection.set_transform(barTransform)
minx, maxx = (0, len(rangeSegments))
miny = min([low for low in lows if low !=-1])
maxy = max([high for high in highs if high != -1])
corners = (minx, miny), (maxx, maxy)
ax.update_datalim(corners)
ax.autoscale_view()
# add these last
ax.add_collection(barCollection)
ax.add_collection(rangeCollection)
return rangeCollection, barCollection
def volume_overlay(ax, opens, closes, volumes,
colorup='k', colordown='r',
width=4, alpha=1.0):
"""
Add a volume overlay to the current axes. The opens and closes
are used to determine the color of the bar. -1 is missing. If a
value is missing on one it must be missing on all
ax : an Axes instance to plot to
width : the bar width in points
colorup : the color of the lines where close >= open
colordown : the color of the lines where close < open
alpha : bar transparency
"""
r,g,b = colorConverter.to_rgb(colorup)
colorup = r,g,b,alpha
r,g,b = colorConverter.to_rgb(colordown)
colordown = r,g,b,alpha
colord = { True : colorup,
False : colordown,
}
colors = [colord[open<close] for open, close in zip(opens, closes) if open!=-1 and close !=-1]
right = width/2.0
left = -width/2.0
bars = [ ( (left, 0), (left, v), (right, v), (right, 0)) for v in volumes if v != -1 ]
sx = ax.figure.dpi * Value(1/72.0) # scale for points
sy = (ax.bbox.ur().y() - ax.bbox.ll().y()) / (ax.viewLim.ur().y() - ax.viewLim.ll().y())
barTransform = scale_sep_transform(sx,sy)
offsetsBars = [ (i, 0) for i,v in enumerate(volumes) if v != -1 ]
barCollection = PolyCollection(bars,
facecolors = colors,
edgecolors = ( (0,0,0,1), ),
antialiaseds = (0,),
linewidths = (0.5,),
offsets = offsetsBars,
transOffset = ax.transData,
)
barCollection.set_transform(barTransform)
minx, maxx = (0, len(offsetsBars))
miny = 0
maxy = max([v for v in volumes if v!=-1])
corners = (minx, miny), (maxx, maxy)
ax.update_datalim(corners)
ax.autoscale_view()
# add these last
return barCollection
def index_bar(ax, vals,
facecolor='b', edgecolor='l',
width=4, alpha=1.0, ):
"""
Add a bar collection graph with height vals (-1 is missing).
ax : an Axes instance to plot to
width : the bar width in points
alpha : bar transparency
"""
facecolors = (colorConverter.to_rgba(facecolor, alpha),)
edgecolors = (colorConverter.to_rgba(edgecolor, alpha),)
right = width/2.0
left = -width/2.0
bars = [ ( (left, 0), (left, v), (right, v), (right, 0)) for v in vals if v != -1 ]
sx = ax.figure.dpi * Value(1/72.0) # scale for points
sy = (ax.bbox.ur().y() - ax.bbox.ll().y()) / (ax.viewLim.ur().y() - ax.viewLim.ll().y())
barTransform = scale_sep_transform(sx,sy)
offsetsBars = [ (i, 0) for i,v in enumerate(vals) if v != -1 ]
barCollection = PolyCollection(bars,
facecolors = facecolors,
edgecolors = edgecolors,
antialiaseds = (0,),
linewidths = (0.5,),
offsets = offsetsBars,
transOffset = ax.transData,
)
barCollection.set_transform(barTransform)
minx, maxx = (0, len(offsetsBars))
miny = 0
maxy = max([v for v in vals if v!=-1])
corners = (minx, miny), (maxx, maxy)
ax.update_datalim(corners)
ax.autoscale_view()
# add these last
ax.add_collection(barCollection)
return barCollection
|
const mongoose = require('mongoose')
const { validationResult } = require('express-validator/check');
const Product = require('../models/product');
exports.getAddProduct = (req, res, next) => {
res.render('admin/edit-product', {
pageTitle: 'Add Product',
path: '/admin/add-product',
editing: false,
hasError: false,
errorMessage: null,
validationErrors: []
});
};
exports.postAddProduct = (req, res, next) => {
const title = req.body.title;
const imageUrl = req.body.imageUrl;
const price = req.body.price;
const description = req.body.description;
const errors = validationResult(req);
if (!errors.isEmpty()) {
console.log(errors.array());
return res.status(422).render('admin/edit-product', {
pageTitle: 'Add Product',
path: '/admin/edit-product',
editing: false,
hasError: true,
product: {
title: title,
imageUrl: imageUrl,
price: price,
description: description
},
errorMessage: errors.array()[0].msg,
validationErrors: errors.array()
});
}
const product = new Product({
_id: mongoose.Types.ObjectId();
title: title,
price: price,
description: description,
imageUrl: imageUrl,
userId: req.user
});
product
.save()
.then(result => {
// console.log(result);
console.log('Created Product');
res.redirect('/admin/products');
})
.catch(err => {
console.log(err);
});
};
exports.getEditProduct = (req, res, next) => {
const editMode = req.query.edit;
if (!editMode) {
return res.redirect('/');
}
const prodId = req.params.productId;
Product.findById(prodId)
.then(product => {
if (!product) {
return res.redirect('/');
}
res.render('admin/edit-product', {
pageTitle: 'Edit Product',
path: '/admin/edit-product',
editing: editMode,
product: product,
hasError: false,
errorMessage: null,
validationErrors: []
});
})
.catch(err => console.log(err));
};
exports.postEditProduct = (req, res, next) => {
const prodId = req.body.productId;
const updatedTitle = req.body.title;
const updatedPrice = req.body.price;
const updatedImageUrl = req.body.imageUrl;
const updatedDesc = req.body.description;
const errors = validationResult(req);
if (!errors.isEmpty()) {
return res.status(422).render('admin/edit-product', {
pageTitle: 'Edit Product',
path: '/admin/edit-product',
editing: true,
hasError: true,
product: {
title: updatedTitle,
imageUrl: updatedImageUrl,
price: updatedPrice,
description: updatedDesc,
_id: prodId
},
errorMessage: errors.array()[0].msg,
validationErrors: errors.array()
});
}
Product.findById(prodId)
.then(product => {
if (product.userId.toString() !== req.user._id.toString()) {
return res.redirect('/');
}
product.title = updatedTitle;
product.price = updatedPrice;
product.description = updatedDesc;
product.imageUrl = updatedImageUrl;
return product.save().then(result => {
console.log('UPDATED PRODUCT!');
res.redirect('/admin/products');
});
})
.catch(err => console.log(err));
};
exports.getProducts = (req, res, next) => {
Product.find({ userId: req.user._id })
// .select('title price -_id')
// .populate('userId', 'name')
.then(products => {
console.log(products);
res.render('admin/products', {
prods: products,
pageTitle: 'Admin Products',
path: '/admin/products'
});
})
.catch(err => console.log(err));
};
exports.postDeleteProduct = (req, res, next) => {
const prodId = req.body.productId;
Product.deleteOne({ _id: prodId, userId: req.user._id })
.then(() => {
console.log('DESTROYED PRODUCT');
res.redirect('/admin/products');
})
.catch(err => console.log(err));
};
|
import os
import sys
import math
import time
import bintrees
import blist
import BTrees.OOBTree
import _src
sys.path.extend(['..', '../..'])
import banyan
def _run_test(fn, type_, num_items, num_its):
if type_ == int:
es = _src.random_ints(num_items)
elif type_ == str:
es = _src.random_strings(num_items, 8)
else:
assert False
t = fn(es)
start = time.time()
sum_ = 1
for _ in range(num_its):
for e in es:
sum_ += t[e]
end = time.time()
diff = (end - start) / num_its
if sum_ == 0:
raise RuntimeError(not_in)
return diff
def run_tests(names, num_items, num_its, type_ = int):
fns = dict([
('btrees', lambda es: BTrees.OOBTree.OOBTree([(e, 1) for e in es])),
('blist', lambda es: blist.sorteddict.fromkeys(es, 1)),
('bintrees', lambda es: bintrees.FastRBTree([(e, 1) for e in es])),
('dict', lambda es: dict.fromkeys(es, 1)),
('banyan_red_black_tree',
lambda es: banyan.SortedDict.fromkeys(es, 1, key_type = type_, alg = banyan.RED_BLACK_TREE)),
('banyan_splay_tree',
lambda es: banyan.SortedDict.fromkeys(es, 1, key_type = type_, alg = banyan.SPLAY_TREE)),
('banyan_sorted_list',
lambda es: banyan.SortedDict.fromkeys(es, 1, key_type = type_, alg = banyan.SORTED_LIST)),
('banyan_red_black_tree_gen',
lambda es: banyan.SortedDict.fromkeys(es, 1, key_type = None, alg = banyan.RED_BLACK_TREE)),
('banyan_splay_tree_gen',
lambda es: banyan.SortedDict.fromkeys(es, 1, key_type = None, alg = banyan.SPLAY_TREE)),
('banyan_sorted_list_gen',
lambda es: banyan.SortedDict.fromkeys(es, 1, key_type = None, alg = banyan.SORTED_LIST))])
t = dict([])
for name in names:
t[name] = _run_test(fns[name], type_, num_items, num_its)
return t
|
/**
* Copyright (c) 2013-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @emails oncall+relay
*/
'use strict';
require('configureForRelayOSS');
jest
.dontMock('GraphQLRange')
.dontMock('GraphQLSegment')
.dontMock('GraphQLStoreQueryResolver');
const GraphQLFragmentPointer = require('GraphQLFragmentPointer');
const GraphQLStoreQueryResolver = require('GraphQLStoreQueryResolver');
const Relay = require('Relay');
const RelayStoreData = require('RelayStoreData');
const RelayTestUtils = require('RelayTestUtils');
const readRelayQueryData = require('readRelayQueryData');
const transformRelayQueryPayload = require('transformRelayQueryPayload');
describe('GraphQLStoreQueryResolver', () => {
var changeEmitter;
var storeData;
var mockCallback;
var mockQueryFragment;
var mockPluralQueryFragment;
var {getNode} = RelayTestUtils;
function mockReader(mockResult) {
readRelayQueryData.mockImplementation((_, __, dataID) => {
return {
dataIDs: {[dataID]: true},
data: mockResult[dataID],
};
});
}
beforeEach(() => {
jest.resetModuleRegistry();
storeData = new RelayStoreData();
changeEmitter = storeData.getChangeEmitter();
mockCallback = jest.genMockFunction();
mockQueryFragment = getNode(Relay.QL`fragment on Node{id,name}`);
mockPluralQueryFragment = getNode(Relay.QL`
fragment on Node @relay(plural:true) {
id,
name,
}
`);
jasmine.addMatchers(RelayTestUtils.matchers);
});
it('should resolve a pointer', () => {
var fragmentPointer = new GraphQLFragmentPointer(
'1038750002',
mockQueryFragment
);
var mockResult = {__dataID__: '1038750002', id: '1038750002', name: 'Tim'};
readRelayQueryData.mockReturnValue({data: mockResult});
var resolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
mockCallback
);
var resolved = resolver.resolve(fragmentPointer);
expect(resolved).toBe(mockResult);
expect(readRelayQueryData).toBeCalled();
expect(readRelayQueryData.mock.calls[0][1]).toBe(mockQueryFragment);
expect(readRelayQueryData.mock.calls[0][2]).toEqual(
fragmentPointer.getDataID()
);
});
it('should subscribe to IDs in resolved pointer', () => {
var fragmentPointer = new GraphQLFragmentPointer(
'1038750002',
mockQueryFragment
);
var mockResult = {
'1038750002': {__dataID__: '1038750002', id: '1038750002', name: 'Tim'},
};
mockReader(mockResult);
var resolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
mockCallback
);
resolver.resolve(fragmentPointer);
var addListenersForIDs = changeEmitter.addListenerForIDs;
expect(addListenersForIDs).toBeCalled();
expect(addListenersForIDs.mock.calls[0][0]).toEqual(['1038750002']);
});
it('should not re-resolve pointers without change events', () => {
var fragmentPointer = new GraphQLFragmentPointer(
'1038750002',
mockQueryFragment
);
var mockResultA = {__dataID__: '1038750002', id: '1038750002', name: 'Tim'};
var mockResultB = {__dataID__: '1038750002', id: '1038750002', name: 'Tim'};
var resolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
mockCallback
);
readRelayQueryData.mockReturnValue({data: mockResultA});
var resolvedA = resolver.resolve(fragmentPointer);
readRelayQueryData.mockReturnValue({data: mockResultB});
var resolvedB = resolver.resolve(fragmentPointer);
expect(readRelayQueryData.mock.calls.length).toBe(1);
expect(resolvedA).toBe(resolvedB);
});
it('should re-resolve pointers with change events', () => {
var fragmentPointer = new GraphQLFragmentPointer(
'1038750002',
mockQueryFragment
);
var mockResultA = {__dataID__: '1038750002', id: '1038750002', name: 'Tim'};
var mockResultB = {__dataID__: '1038750002', id: '1038750002', name: 'Tee'};
var resolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
mockCallback
);
mockReader({
[mockResultA.id]: mockResultA,
});
var resolvedA = resolver.resolve(fragmentPointer);
var callback = changeEmitter.addListenerForIDs.mock.calls[0][1];
callback(['1038750002']);
mockReader({
[mockResultB.id]: mockResultB,
});
var resolvedB = resolver.resolve(fragmentPointer);
expect(readRelayQueryData.mock.calls.length).toBe(2);
expect(resolvedA).toBe(mockResultA);
expect(resolvedB).toBe(mockResultB);
});
it('should re-resolve pointers whose calls differ', () => {
var fragmentPointerA = new GraphQLFragmentPointer(
'client:123_first(10)',
mockQueryFragment
);
var fragmentPointerB = new GraphQLFragmentPointer(
'client:123_first(20)',
mockQueryFragment
);
var resolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointerA,
mockCallback
);
require('GraphQLStoreRangeUtils').getCanonicalClientID =
// The canonical ID of a range customarily excludes the calls
jest.genMockFunction().mockReturnValue('client:123');
resolver.resolve(fragmentPointerA);
resolver.resolve(fragmentPointerB);
expect(readRelayQueryData.mock.calls.length).toBe(2);
});
it('should invoke the callback when change events fire', () => {
var fragmentPointer = new GraphQLFragmentPointer(
'1038750002',
mockQueryFragment
);
var mockResult = {
'1038750002': {__dataID__: '1038750002', id: '1038750002', name: 'Tim'},
};
var resolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
mockCallback
);
mockReader(mockResult);
resolver.resolve(fragmentPointer);
var callback = changeEmitter.addListenerForIDs.mock.calls[0][1];
callback(['1038750002']);
expect(mockCallback).toBeCalled();
});
it('should resolve an array of pointers', () => {
var fragmentPointer = new GraphQLFragmentPointer(
['1', '2'],
mockPluralQueryFragment
);
var mockResults = {
'1': {__dataID__: '1', name: 'One'},
'2': {__dataID__: '2', name: 'Two'},
};
mockReader(mockResults);
var resolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
mockCallback
);
var resolved = resolver.resolve(fragmentPointer);
expect(resolved.length).toBe(2);
expect(resolved[0]).toBe(mockResults['1']);
expect(resolved[1]).toBe(mockResults['2']);
expect(readRelayQueryData.mock.calls[0][2]).toEqual(
fragmentPointer.getDataIDs()[0]
);
expect(readRelayQueryData.mock.calls[1][2]).toEqual(
fragmentPointer.getDataIDs()[1]
);
});
it('should not re-resolve if the pointer array has no changes', () => {
var fragmentPointer = new GraphQLFragmentPointer(
['1', '2'],
mockPluralQueryFragment
);
var mockResults = {
'1': {__dataID__: '1', name: 'One'},
'2': {__dataID__: '2', name: 'Two'},
};
mockReader(mockResults);
var resolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
mockCallback
);
var resolvedA = resolver.resolve(fragmentPointer);
var resolvedB = resolver.resolve(fragmentPointer);
expect(resolvedA).toBe(resolvedB);
});
it('should only re-resolve pointers with changes in an array', () => {
var fragmentPointer = new GraphQLFragmentPointer(
['1', '2'],
mockPluralQueryFragment
);
var mockResults = {
'1': {__dataID__: '1', name: 'One'},
'2': {__dataID__: '2', name: 'Two'},
};
mockReader(mockResults);
var resolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
mockCallback
);
var resolvedA = resolver.resolve(fragmentPointer);
mockResults['1'] = {__dataID__: '1', name: 'Won'};
var callback = changeEmitter.addListenerForIDs.mock.calls[0][1];
callback(['1']);
var resolvedB = resolver.resolve(fragmentPointer);
expect(resolvedA).not.toBe(resolvedB);
expect(resolvedB.length).toBe(2);
expect(resolvedB[0]).toBe(mockResults['1']);
expect(resolvedB[1]).toBe(mockResults['2']);
expect(readRelayQueryData.mock.calls.length).toBe(3);
expect(readRelayQueryData.mock.calls[2][2]).toEqual('1');
});
it('should create a new array if the pointer array shortens', () => {
var fragmentPointer = new GraphQLFragmentPointer(
['1', '2'],
mockPluralQueryFragment
);
var fragmentPointerB = new GraphQLFragmentPointer(
['1'],
mockPluralQueryFragment
);
var mockResults = {
'1': {__dataID__: '1', name: 'One'},
'2': {__dataID__: '2', name: 'Two'},
};
mockReader(mockResults);
var resolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
mockCallback
);
var resolvedA = resolver.resolve(fragmentPointer);
var resolvedB = resolver.resolve(fragmentPointerB);
expect(resolvedA).not.toBe(resolvedB);
expect(resolvedA.length).toBe(2);
expect(resolvedB.length).toBe(1);
});
describe('garbage collection', () => {
let fragment;
beforeEach(() => {
storeData.initializeGarbageCollector(run => {
while (run()) {}
});
const containerFragment = RelayTestUtils.createContainerFragment(Relay.QL`
fragment on NewsFeedConnection {
edges {
node {
id
}
}
}
`);
fragment = Relay.QL`
fragment on Viewer {
actor {
id
}
newsFeed(first: "1") {
${containerFragment}
}
}
`;
const query = getNode(Relay.QL`
query {
viewer {
${fragment}
}
}
`);
const payload = {
viewer: {
actor: {
id: '123',
},
newsFeed: {
edges: [
{
node: {
id: '456',
},
},
],
},
},
};
storeData.handleQueryPayload(
query,
transformRelayQueryPayload(query, payload),
1
);
});
it('increments references to read data', () => {
const fragmentPointer = new GraphQLFragmentPointer(
'client:1',
getNode(fragment)
);
const queryResolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
jest.genMockFunction()
);
// read data and set up subscriptions
queryResolver.resolve(fragmentPointer);
// evict unreferenced nodes
storeData.getGarbageCollector().collect();
// nodes referenced by the fragment should not be evicted
expect(Object.keys(storeData.getNodeData())).toEqual([
'123', // viewer.actor
'client:1', // viewer
'client:2', // viewer.newsFeed
]);
});
it('decrements references to previously read fields', () => {
const fragmentPointer = new GraphQLFragmentPointer(
'client:1',
getNode(fragment)
);
const queryResolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
jest.genMockFunction()
);
// read data and increment GC ref counts
queryResolver.resolve(fragmentPointer);
const callback =
storeData.getChangeEmitter().addListenerForIDs.mock.calls[0][1];
// Remove the link to viewer.actor and broadcast an update
storeData.getRecordWriter().putField('client:1', 'actor', null);
storeData.getRecordWriter().putField('client:1', 'newsFeed', null);
callback(['client:1']);
// re-read and increment/decrement GC ref counts
queryResolver.resolve(fragmentPointer);
// evict unreferenced nodes
storeData.getGarbageCollector().collect();
// nodes referenced by the fragment should not be evicted
expect(Object.keys(storeData.getNodeData())).toEqual([
// '123' (actor) is unreferenced and collected
// 'client:2' (viewer.newsFeed) is unreferenced and collected
'client:1', // viewer
]);
});
it('decrements references when reset', () => {
const fragmentPointer = new GraphQLFragmentPointer(
'client:1',
getNode(fragment)
);
const queryResolver = new GraphQLStoreQueryResolver(
storeData,
fragmentPointer,
jest.genMockFunction()
);
// read data and increment GC ref counts
queryResolver.resolve(fragmentPointer);
// reset the resolver; should unreference all nodes
queryResolver.reset();
// evict unreferenced nodes
storeData.getGarbageCollector().collect();
// all nodes are unreferenced and should be removed
expect(storeData.getNodeData()).toEqual({});
});
});
});
|
module.exports = {
mode: 'jit',
content: ['./pages/**/*.tsx', './components/**/*.tsx', './layout/**/*.tsx'],
darkMode: 'class',
theme: {
extend: {
colors: {
transparent: 'transparent',
current: 'currentColor',
white: '#fff',
black: '#000',
gray: {
50: '#f8f9fa',
100: '#f1f3f5',
200: '#e9ecef',
300: '#dee2e6',
400: '#ced4da',
500: '#adb5bd',
600: '#868e96',
700: '#495057',
800: '#343a40',
900: '#212529'
}
},
fontSize: {
xs: ['12px', '1rem'],
sm: ['14px', '1.25rem'],
base: ['16px', '1.5rem'],
lg: ['18px', '1.75rem'],
xl: ['20px', '1.75rem'],
'2xl': ['24px', '2rem'],
'3xl': ['28px', '2.25rem'],
'4xl': ['32px', '1'],
'5xl': ['36px', '1']
},
typography: ({ theme }) => ({
gray: {
css: {
'--tw-prose-body': theme('colors.gray[800]'),
'--tw-prose-headings': theme('colors.gray[900]'),
'--tw-prose-lead': theme('colors.gray[700]'),
'--tw-prose-links': theme('colors.blue[500]'),
'--tw-prose-bold': theme('colors.gray[900]'),
'--tw-prose-counters': theme('colors.gray[600]'),
'--tw-prose-bullets': theme('colors.gray[400]'),
'--tw-prose-hr': theme('colors.gray[300]'),
'--tw-prose-quotes': theme('colors.gray[900]'),
'--tw-prose-quote-borders': theme('colors.gray[300]'),
'--tw-prose-captions': theme('colors.gray[700]'),
'--tw-prose-code': theme('colors.gray[800]'),
'--tw-prose-pre-code': theme('colors.gray[100]'),
'--tw-prose-pre-bg': theme('colors.gray[900]'),
'--tw-prose-th-borders': theme('colors.gray[300]'),
'--tw-prose-td-borders': theme('colors.gray[300]'),
// invert
'--tw-prose-invert-body': theme('colors.gray[400]'),
'--tw-prose-invert-headings': theme('colors.gray[300]'),
'--tw-prose-invert-lead': theme('colors.gray[300]'),
'--tw-prose-invert-links': theme('colors.blue[500]'),
'--tw-prose-invert-bold': theme('colors.gray[100]'),
'--tw-prose-invert-counters': theme('colors.gray[400]'),
'--tw-prose-invert-bullets': theme('colors.gray[600]'),
'--tw-prose-invert-hr': theme('colors.gray[700]'),
'--tw-prose-invert-quotes': theme('colors.gray[100]'),
'--tw-prose-invert-quote-borders': theme('colors.gray[700]'),
'--tw-prose-invert-captions': theme('colors.gray[400]'),
'--tw-prose-invert-code': theme('colors.gray[200]'),
'--tw-prose-invert-pre-code': theme('colors.pink[300]'),
'--tw-prose-invert-pre-bg': 'rgb(0 0 0 / 50%)',
'--tw-prose-invert-th-borders': theme('colors.gray[700]'),
'--tw-prose-invert-td-borders': theme('colors.gray[700]')
}
}
})
}
},
plugins: [require('@tailwindcss/typography')]
};
|
# Copyright Peznauts <kevin@cloudnull.com>. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from collections import namedtuple
from unittest import mock
from unittest.mock import patch
from directord import main
from directord import tests
class TestMain(unittest.TestCase):
def setUp(self):
self.maxDiff = 20000
self.args = tests.FakeArgs()
self.systemdinstall = main.SystemdInstall()
parse_driver_args_se = lambda x, y, z: x
mock_parse_driver_args = mock.Mock()
mock_parse_driver_args.side_effect = parse_driver_args_se
main._parse_driver_args = mock_parse_driver_args
def tearDown(self):
pass
def test__args_default(self):
self.assertRaises(
SystemExit,
main._args,
[""],
)
def test__args_orchestrate(self):
args, _ = main._args(["orchestrate", "file1 file2"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"check": False,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"finger_print": False,
"force_async": False,
"job_port": 5555,
"backend_port": 5556,
"ignore_cache": False,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "orchestrate",
"wait": False,
"target": None,
"orchestrate_files": ["file1 file2"],
"poll": False,
"restrict": None,
},
)
def test__args_run(self):
args, _ = main._args(["exec", "--verb", "RUN", "command1"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"check": False,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "exec",
"verb": "RUN",
"wait": False,
"target": None,
"exec": ["command1"],
"force_async": False,
"poll": False,
},
)
def test__args_copy(self):
args, _ = main._args(["exec", "--verb", "COPY", "file1 file2"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"check": False,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "exec",
"verb": "COPY",
"wait": False,
"target": None,
"exec": ["file1 file2"],
"force_async": False,
"poll": False,
},
)
def test__args_add(self):
args, _ = main._args(["exec", "--verb", "ADD", "file1 file2"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"check": False,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "exec",
"verb": "ADD",
"wait": False,
"target": None,
"exec": ["file1 file2"],
"force_async": False,
"poll": False,
},
)
def test__args_arg(self):
args, _ = main._args(["exec", "--verb", "ARG", "key value"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"check": False,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "exec",
"verb": "ARG",
"wait": False,
"target": None,
"exec": ["key value"],
"force_async": False,
"poll": False,
},
)
def test__args_env(self):
args, _ = main._args(["exec", "--verb", "ENV", "key value"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"check": False,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "exec",
"verb": "ENV",
"wait": False,
"target": None,
"exec": ["key value"],
"force_async": False,
"poll": False,
},
)
def test__args_workdir(self):
args, _ = main._args(["exec", "--verb", "WORKDIR", "/path"])
print(vars(args))
self.assertDictEqual(
vars(args),
{
"config_file": None,
"check": False,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "exec",
"verb": "WORKDIR",
"wait": False,
"target": None,
"exec": ["/path"],
"force_async": False,
"poll": False,
},
)
def test__args_cachefile(self):
args, _ = main._args(["exec", "--verb", "CACHEFILE", "/path"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"check": False,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "exec",
"verb": "CACHEFILE",
"wait": False,
"target": None,
"exec": ["/path"],
"force_async": False,
"poll": False,
},
)
def test__args_cacheevict(self):
args, _ = main._args(["exec", "--verb", "CACHEEVICT", "all"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"check": False,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "exec",
"verb": "CACHEEVICT",
"wait": False,
"target": None,
"exec": ["all"],
"force_async": False,
"poll": False,
},
)
def test__args_query(self):
args, _ = main._args(["exec", "--verb", "QUERY", "var"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"check": False,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "exec",
"verb": "QUERY",
"wait": False,
"target": None,
"exec": ["var"],
"force_async": False,
"poll": False,
},
)
def test__args_server(self):
args, _ = main._args(["server"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"cache_path": "/var/cache/directord",
"mode": "server",
},
)
def test__args_client(self):
args, _ = main._args(["client"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"cache_path": "/var/cache/directord",
"mode": "client",
"identity": None,
},
)
def test__args_manage_list_nodes(self):
args, _ = main._args(["manage", "--list-nodes"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"dump_cache": False,
"export_jobs": None,
"export_nodes": None,
"filter": None,
"generate_keys": False,
"job_info": None,
"job_port": 5555,
"analyze_job": None,
"analyze_parent": None,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"cache_path": "/var/cache/directord",
"list_jobs": False,
"list_nodes": True,
"mode": "manage",
"purge_jobs": False,
"purge_nodes": False,
},
)
def test__args_manage_list_jobs(self):
args, _ = main._args(["manage", "--list-jobs"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"dump_cache": False,
"export_jobs": None,
"export_nodes": None,
"filter": None,
"generate_keys": False,
"job_info": None,
"job_port": 5555,
"analyze_job": None,
"analyze_parent": None,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"cache_path": "/var/cache/directord",
"list_jobs": True,
"list_nodes": False,
"mode": "manage",
"purge_jobs": False,
"purge_nodes": False,
},
)
def test__args_manage_purge_jobs(self):
args, _ = main._args(["manage", "--purge-jobs"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"dump_cache": False,
"export_jobs": None,
"export_nodes": None,
"filter": None,
"generate_keys": False,
"job_info": None,
"job_port": 5555,
"analyze_job": None,
"analyze_parent": None,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"cache_path": "/var/cache/directord",
"list_jobs": False,
"list_nodes": False,
"mode": "manage",
"purge_jobs": True,
"purge_nodes": False,
},
)
def test__args_manage_purge_nodes(self):
args, _ = main._args(["manage", "--purge-nodes"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"dump_cache": False,
"export_jobs": None,
"export_nodes": None,
"filter": None,
"generate_keys": False,
"job_info": None,
"job_port": 5555,
"analyze_job": None,
"analyze_parent": None,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"cache_path": "/var/cache/directord",
"list_jobs": False,
"list_nodes": False,
"mode": "manage",
"purge_jobs": False,
"purge_nodes": True,
},
)
def test__args_manage_job_info(self):
args, _ = main._args(["manage", "--job-info", "xxxx"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"dump_cache": False,
"export_jobs": None,
"export_nodes": None,
"filter": None,
"generate_keys": False,
"job_info": "xxxx",
"job_port": 5555,
"analyze_job": None,
"analyze_parent": None,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"cache_path": "/var/cache/directord",
"list_jobs": False,
"list_nodes": False,
"mode": "manage",
"purge_jobs": False,
"purge_nodes": False,
},
)
def test__args_manage_export_jobs(self):
args, _ = main._args(["manage", "--export-jobs", "xxxx"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"dump_cache": False,
"export_jobs": "xxxx",
"export_nodes": None,
"filter": None,
"generate_keys": False,
"job_info": None,
"job_port": 5555,
"analyze_job": None,
"analyze_parent": None,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"cache_path": "/var/cache/directord",
"list_jobs": False,
"list_nodes": False,
"mode": "manage",
"purge_jobs": False,
"purge_nodes": False,
},
)
def test__args_manage_export_nodes(self):
args, _ = main._args(["manage", "--export-nodes", "xxxx"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"dump_cache": False,
"export_jobs": None,
"export_nodes": "xxxx",
"filter": None,
"generate_keys": False,
"job_info": None,
"job_port": 5555,
"analyze_job": None,
"analyze_parent": None,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"cache_path": "/var/cache/directord",
"list_jobs": False,
"list_nodes": False,
"mode": "manage",
"purge_jobs": False,
"purge_nodes": False,
},
)
def test__args_manage_generate_keys(self):
args, _ = main._args(["manage", "--generate-keys"])
self.assertDictEqual(
vars(args),
{
"config_file": None,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"dump_cache": False,
"export_jobs": None,
"export_nodes": None,
"filter": None,
"generate_keys": True,
"job_info": None,
"job_port": 5555,
"analyze_job": None,
"analyze_parent": None,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"cache_path": "/var/cache/directord",
"list_jobs": False,
"list_nodes": False,
"mode": "manage",
"purge_jobs": False,
"purge_nodes": False,
},
)
def test__args_manage_bootstrap(self):
m = unittest.mock.mock_open(read_data=tests.TEST_CATALOG.encode())
with patch("builtins.open", m):
args, _ = main._args(["bootstrap", "--catalog", "file"])
self.assertDictEqual(
vars(args),
{
"catalog": mock.ANY,
"config_file": None,
"datastore": "file:///var/cache/directord",
"debug": False,
"driver": "zmq",
"job_port": 5555,
"key_file": None,
"backend_port": 5556,
"heartbeat_interval": 60,
"identity": None,
"socket_group": "0",
"socket_path": "/var/run/directord.sock",
"threads": 10,
"cache_path": "/var/cache/directord",
"mode": "bootstrap",
},
)
@patch("builtins.print")
@patch("os.path.exists", autospec=True)
@patch("os.makedirs", autospec=True)
def test_systemdinstall_path_setup(
self, mock_makedirs, mock_exists, mock_print
):
mock_exists.return_value = False
with patch("builtins.open", unittest.mock.mock_open()) as m:
main.SystemdInstall().path_setup()
m.assert_called()
mock_makedirs.assert_called()
mock_print.assert_called()
@patch("os.path.exists", autospec=True)
@patch("os.makedirs", autospec=True)
def test_systemdinstall_path_setup_exists(
self, mock_makedirs, mock_exists
):
mock_exists.return_value = True
with patch("builtins.open", unittest.mock.mock_open()) as m:
main.SystemdInstall().path_setup()
m.assert_not_called()
mock_makedirs.assert_called()
@patch("jinja2.FileSystemLoader", autospec=True)
@patch("builtins.print")
@patch("os.path.exists", autospec=True)
@patch("os.makedirs", autospec=True)
def test_systemdinstall_writer(
self, mock_makedirs, mock_exists, mock_print, mock_jinja
):
mock_exists.return_value = False
with patch("builtins.open", unittest.mock.mock_open()) as m:
main.SystemdInstall().writer(service_file="testfile")
m.assert_called()
mock_print.assert_called()
mock_jinja.assert_called()
@patch("builtins.print")
@patch("os.path.exists", autospec=True)
@patch("os.makedirs", autospec=True)
def test_systemdinstall_writer_not_called(
self, mock_makedirs, mock_exists, mock_print
):
mock_exists.return_value = True
with patch("builtins.open", unittest.mock.mock_open()) as m:
main.SystemdInstall().writer(service_file="testfile")
m.assert_not_called()
mock_print.assert_called()
@patch("jinja2.FileSystemLoader", autospec=True)
@patch("builtins.print")
@patch("os.path.exists", autospec=True)
@patch("os.makedirs", autospec=True)
def test_systemdinstall_server(
self, mock_makedirs, mock_exists, mock_print, mock_jinja
):
mock_exists.return_value = False
with patch("builtins.open", unittest.mock.mock_open()) as m:
main.SystemdInstall().writer(
service_file="directord-client.service"
)
m.assert_called_with(
"/etc/systemd/system/directord-client.service", "w"
)
mock_print.assert_called()
mock_jinja.assert_called()
@patch("jinja2.FileSystemLoader", autospec=True)
@patch("builtins.print")
@patch("os.path.exists", autospec=True)
@patch("os.makedirs", autospec=True)
def test_systemdinstall_client(
self, mock_makedirs, mock_exists, mock_print, mock_jinja
):
mock_exists.return_value = False
with patch("builtins.open", unittest.mock.mock_open()) as m:
main.SystemdInstall().writer(
service_file="directord-server.service"
)
m.assert_called_with(
"/etc/systemd/system/directord-server.service", "w"
)
mock_print.assert_called()
mock_jinja.assert_called()
@patch("directord.main._args", autospec=True)
def test_main_server(self, mock__args):
_args = {
"config_file": None,
"zmq_shared_key": None,
"zmq_curve_encryption": False,
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "server",
"identity": None,
}
parsed_args = namedtuple("NameSpace", _args.keys())(*_args.values())
mock__args.return_value = [parsed_args, mock.MagicMock()]
with patch("directord.server.Server", autospec=True):
main.main()
@patch("directord.client.Client", autospec=True)
@patch("directord.main._args", autospec=True)
def test_main_client(self, mock__args, mock__client):
_args = {
"config_file": None,
"zmq_shared_key": None,
"zmq_curve_encryption": False,
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "client",
"identity": "client1",
}
parsed_args = namedtuple("NameSpace", _args.keys())(*_args.values())
mock__args.return_value = [parsed_args, mock.MagicMock()]
main.main()
mock__client.assert_called_once_with(args=parsed_args)
@patch("directord.main._args", autospec=True)
def test_main_exec(self, mock__args):
_args = {
"config_file": None,
"zmq_shared_key": None,
"zmq_curve_encryption": False,
"debug": False,
"driver": "zmq",
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "exec",
"verb": "RUN",
"target": None,
"wait": False,
"exec": ["command1"],
"poll": False,
"identity": None,
}
parsed_args = namedtuple("NameSpace", _args.keys())(*_args.values())
mock__args.return_value = [parsed_args, mock.MagicMock()]
with patch("directord.mixin.Mixin.run_exec", autospec=True):
main.main()
@patch("directord.main._args", autospec=True)
def test_main_orchestrate(self, mock__args):
_args = {
"config_file": None,
"zmq_shared_key": None,
"zmq_curve_encryption": False,
"debug": False,
"driver": "zmq",
"finger_print": False,
"job_port": 5555,
"backend_port": 5556,
"ignore_cache": False,
"heartbeat_interval": 60,
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"mode": "orchestrate",
"target": None,
"wait": False,
"orchestrate_files": ["file1 file2"],
"poll": False,
"restrict": None,
"identity": None,
}
parsed_args = namedtuple("NameSpace", _args.keys())(*_args.values())
mock__args.return_value = [parsed_args, mock.MagicMock()]
with patch("directord.mixin.Mixin.run_orchestration", autospec=True):
main.main()
@patch("directord.main._args", autospec=True)
def test_main_manage(self, mock__args):
_args = {
"config_file": None,
"zmq_shared_key": None,
"zmq_curve_encryption": False,
"debug": False,
"driver": "zmq",
"export_jobs": None,
"export_nodes": None,
"generate_keys": False,
"job_info": None,
"job_port": 5555,
"backend_port": 5556,
"heartbeat_interval": 60,
"socket_path": "/var/run/directord.sock",
"stream": False,
"cache_path": "/var/cache/directord",
"list_jobs": False,
"list_nodes": True,
"mode": "manage",
"purge_jobs": False,
"purge_nodes": False,
"identity": None,
}
parsed_args = namedtuple("NameSpace", _args.keys())(*_args.values())
mock__args.return_value = [parsed_args, mock.MagicMock()]
with patch("directord.user.Manage.run", autospec=True) as d:
d.return_value = {}
main.main()
@patch("directord.main._args", autospec=True)
def test_main_bootstrap(self, mock__args):
_args = {
"catalog": mock.ANY,
"config_file": None,
"zmq_shared_key": None,
"zmq_curve_encryption": False,
"debug": False,
"driver": "zmq",
"job_port": 5555,
"key_file": None,
"backend_port": 5556,
"heartbeat_interval": 60,
"socket_path": "/var/run/directord.sock",
"stream": False,
"threads": 10,
"cache_path": "/var/cache/directord",
"mode": "bootstrap",
"identity": None,
}
parsed_args = namedtuple("NameSpace", _args.keys())(*_args.values())
mock__args.return_value = [parsed_args, mock.MagicMock()]
with patch(
"directord.bootstrap.Bootstrap.bootstrap_cluster", autospec=True
):
main.main()
@patch("directord.main._args", autospec=True)
def test_main_fail(self, mock__args):
_args = {
"catalog": mock.ANY,
"config_file": None,
"zmq_shared_key": None,
"zmq_curve_encryption": False,
"debug": False,
"driver": "zmq",
"job_port": 5555,
"key_file": None,
"backend_port": 5556,
"heartbeat_interval": 60,
"socket_path": "/var/run/directord.sock",
"stream": False,
"threads": 10,
"cache_path": "/var/cache/directord",
"mode": "UNDEFINED",
"identity": None,
}
parsed_args = namedtuple("NameSpace", _args.keys())(*_args.values())
parser = mock.MagicMock()
mock__args.return_value = [parsed_args, parser]
self.assertRaises(SystemExit, main.main)
parser.print_help.assert_called()
|
/*分析组件*/
var path = require('path');
module.exports = function(compName){
var jsId = 'components/'+compName+'/'+compName+'.js';
var cssId = 'components/'+compName+'/'+compName+'.css';
var jsPath = path.resolve('components',compName,compName+'.js');
var cssPath = path.resolve('components',compName,compName+'.css');
function getJSID(){
return jsId;
}
function getCSSID(){
return cssId;
}
function getJSPATH(){
return jsPath;
}
function getCSSPATH(){
return cssPath;
}
return {
getJSID:getJSID,
getCSSID:getCSSID,
getJSPATH:getJSPATH,
getCSSPATH:getCSSPATH
}
}
|
import renderer from 'react-test-renderer'
import path from 'path'
import readPkgUp from 'read-pkg-up'
import addons from '@kadira/storybook-addons'
import runWithRequireContext from './require_context'
import createChannel from './storybook-channel-mock'
const { describe, it, expect } = global
let storybook
let configPath
const babel = require('babel-core')
const pkg = readPkgUp.sync().pkg
const isStorybook =
(pkg.devDependencies && pkg.devDependencies['@kadira/storybook']) ||
(pkg.dependencies && pkg.dependencies['@kadira/storybook'])
const isRNStorybook =
(pkg.devDependencies && pkg.devDependencies['@kadira/react-native-storybook']) ||
(pkg.dependencies && pkg.dependencies['@kadira/react-native-storybook'])
export default function testStorySnapshots (options = {}) {
addons.setChannel(createChannel())
if (isStorybook) {
storybook = require.requireActual('@kadira/storybook')
const loadBabelConfig = require('@kadira/storybook/dist/server/babel_config').default
const configDirPath = path.resolve(options.configPath || '.storybook')
configPath = path.join(configDirPath, 'config.js')
const content = babel.transformFileSync(configPath, babelConfig).code
const contextOpts = {
filename: configPath,
dirname: configDirPath
}
const babelConfig = loadBabelConfig(configDirPath)
runWithRequireContext(content, contextOpts)
} else if (isRNStorybook) {
storybook = require.requireActual('@kadira/react-native-storybook')
configPath = path.resolve(options.configPath || 'storybook')
require.requireActual(configPath)
} else {
throw new Error('\'storyshots\' is intended only to be used with react storybook or react native storybook')
}
if (typeof describe !== 'function') {
throw new Error('\'testStorySnapshots\' is intended only to be used inside jest')
}
const suit = options.suit || 'Storyshots'
const stories = storybook.getStorybook()
for (const group of stories) {
describe(suit, () => {
describe(group.kind, () => {
for (const story of group.stories) {
if (options.storyRegex && !story.name.match(options.storyRegex)) {
continue
}
it(story.name, () => {
const context = { kind: group.kind, story: story.name }
const renderedStory = story.render(context)
const tree = renderer.create(renderedStory).toJSON()
expect(tree).toMatchSnapshot()
})
}
})
})
}
}
|
describe('index', () => {
test('1', () => {
expect(true).toBe(true);
});
});
|
"""
Run this script from galaxy's root with
```
ipython -i scripts/celery_shell.py -- -c config/galaxy.yml
```
"""
import logging
import os
WARNING_MODULES = ["parso", "asyncio", "galaxy.datatypes"]
for mod in WARNING_MODULES:
logger = logging.getLogger(mod)
logger.setLevel("WARNING")
from scripts.db_shell import config
os.environ["GALAXY_CONFIG_FILE"] = os.environ.get("GALAXY_CONFIG_FILE", config["config_file"])
from galaxy.celery import tasks # noqa: F401
from galaxy.celery import get_galaxy_app
HELP = """
============
Run celery tasks interactively.
tasks are collected in task module.
To run recalculate_user_disk_usage for user 1 in a celery worker
type
>>> tasks.recalculate_user_disk_usage.deley(user_id=1)
"""
app = get_galaxy_app()
print(HELP)
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
reqs = []
setuptools.setup(
name="fno4vc",
version="0.1",
author="Ali Siahkoohi",
author_email="alisk@gatech.edu",
description="Velocity continuation with Fourier neural operators",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/slimgroup/fno4vc",
license='MIT',
install_requires=reqs,
packages=setuptools.find_packages()
)
|
/**
* Chloe.js: Canvas HTML5 Light Open Engine - Particle.js
* @author daPhyre
* @version 1.0.0, Fr/27/Feb/15
*/
/*jslint bitwise: true, nomen: true */
function Particle(x, y, diameter, life, speed, angle, colorStart, colorEnd) {
this.x = 0;
this.y = 0;
this.ox = 0;
this.oy = 0;
this.diameter = 0;
this.life = 0;
this.olife = 0;
this.speed = 0;
this.angle = 0;
this.rotation = 0;
this.color = '#000';
this.colorList = [];
this.Particle(x, y, diameter, life, speed, angle, colorStart, colorEnd);
}
Particle.prototype = {
Particle: function (x, y, diameter, life, speed, angle, colorStart, colorEnd) {
if (colorStart !== undefined) {
this.x = x;
this.y = y;
this.ox = x;
this.oy = y;
this.diameter = diameter + 1;
this.life = life;
this.olife = life;
this.speed = speed;
this.angle = angle;
this.rotation = angle;
this.color = colorStart;
if (colorEnd !== undefined) {
var cStart = this._hex2rgb(colorStart),
cEnd = this._hex2rgb(colorEnd),
red = ~~((cStart[0] - cEnd[0]) / (life * 1000 + 1)),
green = ~~((cStart[1] - cEnd[1]) / (life * 1000 + 1)),
blue = ~~((cStart[2] - cEnd[2]) / (life * 1000 + 1)),
i = 0,
l = 0;
for (i = 0; i < life * 1000; i += 1) {
this.colorList.push('rgb(' + (cStart[0] - (i * red)) + ',' + (cStart[1] - (i * green)) + ',' + (cStart[2] - (i * blue)) + ')');
}
}
return true;
} else {
if (window.console) {
window.console.error('Data missing in Particle(x, y, diameter, life, speed, angle, colorStart[, colorEnd])');
}
return false;
}
},
/*
_rgb2hex: function (r, g, b) {
return '#' + this._clr2hex(r) + this._clr2hex(g) + this._clr2hex(b);
},
_clr2hex: function (n) {
n = parseInt(n, 10);
if (isNaN(n)) {
return '00';
}
n = Math.max(0, Math.min(n, 255));
return '0123456789ABCDEF'.charAt((n - n % 16) / 16) + '0123456789ABCDEF'.charAt(n % 16);
},
*/
_hex2rgb: function (h) {
if (h.charAt(0) === '#') {
var c = [];
h = h.substring(1, 7);
if (h.length === 3) {
c[0] = parseInt(h.charAt(0), 16) * 17;
c[1] = parseInt(h.charAt(1), 16) * 17;
c[2] = parseInt(h.charAt(2), 16) * 17;
} else {
c[0] = parseInt(h.substr(0, 2), 16);
c[1] = parseInt(h.substr(2, 2), 16);
c[2] = parseInt(h.substr(4, 2), 16);
}
return c;
} else if (window.console) {
window.console.log('Error: Color is not hexadecimal');
}
return [0, 0, 0];
}
};
|
"use strict";
var mapboxgl = require("mapbox-gl");
var insertCss = require("insert-css");
var fs = require("fs");
mapboxgl.accessToken = window.localStorage.getItem("MapboxAccessToken");
var meta = document.createElement("meta");
meta.name = "viewport";
meta.content = "initial-scale=1,maximum-scale=1,user-scalable=no";
document.getElementsByTagName("head")[0].appendChild(meta);
insertCss(fs.readFileSync("./lib/mapbox-gl-geocoder.css", "utf8"));
insertCss(
fs.readFileSync("./node_modules/mapbox-gl/dist/mapbox-gl.css", "utf8")
);
var MapboxGeocoder = require("../");
var mapDiv = document.body.appendChild(document.createElement("div"));
mapDiv.style.position = "absolute";
mapDiv.style.top = 0;
mapDiv.style.right = 0;
mapDiv.style.left = 0;
mapDiv.style.bottom = 0;
var map = new mapboxgl.Map({
container: mapDiv,
style: "mapbox://styles/mapbox/streets-v9",
center: [-79.4512, 43.6568],
zoom: 13
});
var geocoder = new MapboxGeocoder({
limit: 20,
searchTypes: ["address", "park"]
});
window.geocoder = geocoder;
var button = document.createElement("button");
button.textContent = "click me";
var removeBtn = document.body.appendChild(document.createElement("button"));
removeBtn.style.position = "absolute";
removeBtn.style.zIndex = 10;
removeBtn.style.top = "10px";
removeBtn.style.left = "10px";
removeBtn.textContent = "Remove geocoder control";
map
.getContainer()
.querySelector(".mapboxgl-ctrl-bottom-left")
.appendChild(button);
map.addControl(geocoder);
map.on("load", function() {
button.addEventListener("click", function() {
geocoder.query("Montreal Quebec");
});
removeBtn.addEventListener("click", function() {
map.removeControl(geocoder);
});
});
geocoder.on("results", function(e) {
console.log("results: ", e.features);
});
geocoder.on("error", function(e) {
console.log("Error is", e.error);
});
|
function syntaxHighlight(json) {
json = json.replace(/&/g, '&').replace(/</g, '<').replace(/>/g, '>');
return json.replace(/("(\\u[a-zA-Z0-9]{4}|\\[^u]|[^\\"])*"(\s*:)?|\b(true|false|null)\b|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?)/g, function (match) {
var cls = 'number';
if (/^"/.test(match)) {
if (/:$/.test(match)) {
cls = 'key';
} else {
cls = 'string';
}
} else if (/true|false/.test(match)) {
cls = 'boolean';
} else if (/null/.test(match)) {
cls = 'null';
}
return '<span class="' + cls + '">' + match + '</span>';
});
}
|
const mix = require('laravel-mix');
/*
|--------------------------------------------------------------------------
| Mix Asset Management
|--------------------------------------------------------------------------
|
| Mix provides a clean, fluent API for defining some Webpack build steps
| for your Laravel applications. By default, we are compiling the CSS
| file for the application as well as bundling up all the JS files.
|
*/
mix
.css('resources/css/admin/app.css', 'public/css/admin')
.js('resources/js/admin/app.js', 'public/js/admin')
.js('resources/js/app.js', 'public/js')
.postCss('resources/css/app.css', 'public/css',
[
require('postcss-import'),
require('tailwindcss'),
require('autoprefixer'),
]
);
|
# -*- coding: utf-8 -*- #
# Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to print access tokens for an Anthos cluster on AWS."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.container.aws import clusters
from googlecloudsdk.command_lib.container.aws import resource_args
from googlecloudsdk.command_lib.container.gkemulticloud import endpoint_util
@base.Hidden
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.GA)
class PrintAccessToken(base.Command):
"""Generate an access token for an Anthos cluster on AWS."""
@staticmethod
def Args(parser):
"""Register flags for this command."""
resource_args.AddAwsClusterResourceArg(parser, "to access")
def Run(self, args):
"""Run the command."""
cluster_ref = args.CONCEPTS.cluster.Parse()
with endpoint_util.GkemulticloudEndpointOverride(cluster_ref.locationsId,
self.ReleaseTrack()):
cluster_client = clusters.Client(track=self.ReleaseTrack())
return cluster_client.GenerateAccessToken(cluster_ref)
|
import React from 'react';
import '../../../styles/App.css';
export class FilterTopicsMenu extends React.Component {
constructor(props) {
super(props);
this.state = {"showTopics": false};
this.toggleTopics = this.toggleTopics.bind(this);
}
toggleTopics() {
this.setState({"showTopics": !this.state.showTopics})
}
createGrandChildTopics(rootTopic, selectedChildTopic) {
let show = (selectedChildTopic != null && selectedChildTopic.child_topics != null && selectedChildTopic.child_topics.length > 0);
if (!show) {
return null;
}
let options = selectedChildTopic.child_topics.map((grandChildTopic) => {
return <option className={"font-size--18"}
value={grandChildTopic.filterable_title}>{grandChildTopic.title}</option>
})
let selectedGrandChild = null;
selectedChildTopic.child_topics.forEach((grandChildTopic) => {
if (grandChildTopic.selected) {
selectedGrandChild = grandChildTopic;
}
})
let selectedValue = selectedGrandChild == null ? "all" : selectedGrandChild.filterable_title
return (<div className={"topic-group"}>
<select className={"topic-selection-box font-size--18 margin-left--1 col--md-14"}
name={`topic-${rootTopic.filterable_title}-grandchild`}
id={`topic-${rootTopic.filterable_title}-grandchild`}
defaultValue={"all"}
value={selectedValue}
onChange={(e) => {
let topicLevel = 2;
this.props.checkChanged(e.target.value, topicLevel)
}}>
<option className={"font-size--18"} value="all">All sub topics</option>
{options}
</select>
</div>)
}
createChildTopics(rootTopic) {
if (!rootTopic.selected) {
return null;
}
let options = rootTopic.child_topics.map((childTopic) => {
return <option className={"font-size--18"} value={childTopic.filterable_title}>{childTopic.title}</option>
})
let selectedChildTopic = null;
rootTopic.child_topics.forEach((childTopic) => {
if (childTopic.selected) {
selectedChildTopic = childTopic
}
})
let selectedValue = selectedChildTopic == null ? "all" : selectedChildTopic.filterable_title
let grandChildTopics = this.createGrandChildTopics(rootTopic, selectedChildTopic);
return (<div>
<div className={"topic-group"}>
<select className={"topic-selection-box font-size--18 margin-left--1 col--md-14"}
name={`topic-${rootTopic.filterable_title}`}
id={`topic-${rootTopic.filterable_title}`}
defaultValue={"all"}
value={selectedValue}
onChange={(e) => {
let topicLevel = 1;
this.props.checkChanged(e.target.value, topicLevel)
}}>
<option className={"font-size--18"} value="all">All sub topics</option>
{options}
</select>
</div>
{grandChildTopics}
</div>)
}
makeListModel() {
if (!this.state.showTopics || this.props.topics == null) {
return null;
}
let refinementSelectedRoot = false;
let topics = this.props.topics.map((rootTopic) => {
if (rootTopic.selected) {
refinementSelectedRoot = true;
}
let subTopics = this.createChildTopics(rootTopic);
return <li
className={"filters__item"}
key={`checkbox-topic-${rootTopic.filterable_title}`}>
<div className="filters__field">
<input id={`checkbox-topic-${rootTopic.filterable_title}`}
className="js-auto-submit__input checkbox-topic"
type="radio"
name="filter-topics" value={rootTopic.filterable_title}
checked={rootTopic.selected}
onChange={(e) => {
let topicLevel = 0;
this.props.checkChanged(e.target.value, topicLevel);
}}
/>
<label htmlFor={`checkbox-topic-${rootTopic.filterable_title}`} className={"font-size--18"}>
{rootTopic.title}
</label>
</div>
{subTopics}
</li>
});
return (<ul className="list--neutral margin-top--0 margin-bottom--0">
<li
className={"filters__item"}
key={`checkbox-topic-all`}>
<div className="filters__field">
<input id={`checkbox-topic-all`}
className="js-auto-submit__input checkbox-topic"
type="radio"
name="filter-topics" value={"all"}
checked={!refinementSelectedRoot}
onChange={(e) => {
this.props.checkChanged(e.target.value)
}}
/>
<label htmlFor={`checkbox-topic-all`} className={"font-size--18"}>
All topics
</label>
</div>
</li>
{topics}
</ul>)
}
render() {
let topicFilterList = this.makeListModel()
return <fieldset className="filters__fieldset">
<legend className="filters__sub-title font-size--18 filter-root-title" onClick={() => {
this.toggleTopics()
}}><i className={this.state.showTopics ? "up-arrow" : "down-arrow"}/><span>Topics</span>
</legend>
<div className="js-checkbox-container">
{topicFilterList}
</div>
</fieldset>
}
}
|
# coding: utf-8
from ..module import *
class Network(object):
def __init__(self, params, input_size=784, hidden_size=100, output_size=10):
self.params = {}
self.params['W1'] = params['W1']
self.params['b1'] = params['b1']
self.params['W2'] = params['W2']
self.params['b2'] = params['b2']
def initParams(self, input_size=784, hidden_size=100, output_size=10, weight_init_std=0.01):
self.params['W1'] = weight_init_std * np.random.randn(input_size, hidden_size)
self.params['b1'] = np.zeros(hidden_size)
self.params['W2'] = weight_init_std * np.random.randn(hidden_size, output_size)
self.params['b2'] = np.zeros(output_size)
def predict(self, x):
W1, W2 = self.params['W1'], self.params['W2']
b1, b2 = self.params['b1'], self.params['b2']
a1 = np.dot(x, W1) + b1
z1 = sigmoid(a1)
a2 = np.dot(z1, W2) + b2
y = softmax(a2)
return y
def judge(self, x):
return np.argmax(self.predict(x))
def loss(self, x, t):
y = self.predict(x)
return cross_entropy_error(y, t)
def accuracy(self, x, t):
y = predict(x)
y = np.argmax(y, axis=1)
t = np.argmax(t, axis=1)
accuracy = np.sum(y == t) / float(x.shape[0])
return accuracy
def numerical_gradient(self, x, t):
loss_W = lambda W: self.loss(x, t)
grads = {}
grads['W1'] = numerical_gradient(loss_W, self.params['W1'])
grads['b1'] = numerical_gradient(loss_W, self.params['b1'])
grads['W2'] = numerical_gradient(loss_W, self.params['W2'])
grads['b2'] = numerical_gradient(loss_W, self.params['b2'])
return grads
|
import React from "react";
import "./electronScript";
import { BrowserRouter as Router, Route } from "react-router-dom";
// import login from "./components/loginPage/Login";
import BotListPage from "./components/BotListPage";
import BotBuildPage from "./components/BotBuildPage/";
// import DataSetPage from "./components/DataSetPage/DatasetPage";
import { ModalContextProvider } from "./context/ModalContext";
import ModalManager from "./components/modals/ModalManager";
import { ThemeProvider, CssBaseline, Box } from "@material-ui/core";
import theme from "./theme";
import ToastrManager from "./components/toastrs/ToastrManager";
import SideBar from "./components/layout/general/SideBar";
import Marketplace from "./components/Marketplace";
import Notifications from "./components/Notifications";
import Academy from "./components/Academy";
import Auth from "./components/Auth";
import Dashboard from "./components/Dashboard";
function App() {
return (
<ThemeProvider theme={theme}>
<CssBaseline />
<ModalContextProvider>
<Router>
<ModalManager />
<ToastrManager />
<Route exact path="/" component={Auth} />
<SideBar />
<Box py={4} pl={32} pr={4}>
<Route exact path="/dashboard" component={Dashboard} />
<Route exact path="/bot-list" component={BotListPage} />
<Route exact path="/marketplace" component={Marketplace} />
<Route exact path="/notifications" component={Notifications} />
<Route exact path="/academy" component={Academy} />
<Route exact path="/build" component={BotBuildPage} />
</Box>
</Router>
</ModalContextProvider>
</ThemeProvider>
);
}
export default App;
|
# -*- coding: utf-8 -*-
# Copyright 2020 Green Valley Belgium NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @@license_version:1.7@@
from datetime import datetime
import json
import logging
from google.appengine.api import urlfetch
from google.appengine.ext import ndb, deferred
from mcfw.rpc import returns, arguments
from rogerthat.consts import MIGRATION_QUEUE
from rogerthat.dal.profile import get_user_profile
from rogerthat.models import Message, UserProfileInfo, UserProfileInfoAddress
from rogerthat.rpc import users
from rogerthat.service.api import messaging
from rogerthat.to.messaging import MemberTO, AnswerTO
from rogerthat.to.service import UserDetailsTO
from rogerthat.utils.app import get_app_user_tuple, create_app_user_by_email
from rogerthat.utils.cloud_tasks import schedule_tasks, create_task
from rogerthat.utils.location import geo_code
from solutions.common.migrations.trash_calendar.models import TrashCalendarTransferUser, \
TrashCalendarTransferSettings
POKE_TAG_TRASH_CALENDAR_TRANSFER_ADDRESS = u'trash_calendar.transfer_address'
ANSWER_ID_TRANFER = u'tranfer'
ANSWER_ID_OTHER = u'other'
ANSWER_ID_NONE = u'none'
@returns()
@arguments(service_user=users.User, status=int, answer_id=unicode, received_timestamp=int, member=unicode,
message_key=unicode, tag=unicode, acked_timestamp=int, parent_message_key=unicode, result_key=unicode,
service_identity=unicode, user_details=[UserDetailsTO])
def trash_transfer_address_pressed(service_user, status, answer_id, received_timestamp, member, message_key, tag,
acked_timestamp, parent_message_key, result_key, service_identity, user_details):
app_user = user_details[0].toAppUser()
logging.info('trash_transfer_address_pressed app_user:%s answer_id:%s', app_user, answer_id)
if not answer_id:
return
if answer_id == ANSWER_ID_TRANFER:
deferred.defer(_tranfer_address, service_user, app_user, _queue=MIGRATION_QUEUE)
deferred.defer(_delete_message_from_user, app_user, answer_id, _countdown=5, _queue=MIGRATION_QUEUE)
def send_messages_for_service(service_user, cursor=None):
tcs = TrashCalendarTransferSettings.create_key(service_user).get()
if not tcs:
return
params = {
'app_id': tcs.app_id,
'fetch_size': 100,
'cursor': cursor
}
r = _do_trash_request(tcs, '/plugins/trash_calendar/admin/list_users', params)
tasks = []
for i in r['items']:
app_user = create_app_user_by_email(i['email'], i['app_id'])
tasks.append(create_task(_send_message_to_user, service_user, app_user, tcs.branding))
if r.get('cursor'):
tasks.append(create_task(send_messages_for_service, service_user, r['cursor']))
if tasks:
schedule_tasks(tasks, MIGRATION_QUEUE)
def _send_message_to_user(service_user, app_user, branding):
tcu_key = TrashCalendarTransferUser.create_key(app_user)
tcu = tcu_key.get()
if tcu:
logging.debug('_send_message_to_user TrashCalendarTransferUser already exists')
return
upi = UserProfileInfo.create_key(app_user).get()
if upi and upi.addresses:
logging.debug('_send_message_to_user user already has addresses')
return
up = get_user_profile(app_user)
if not up:
logging.debug('_send_message_to_user user_profile not found')
return
human_user, app_id = get_app_user_tuple(app_user)
member = MemberTO()
member.alert_flags = Message.ALERT_FLAG_VIBRATE
member.member = human_user.email()
member.app_id = app_id
message = u'''Beste inwoner,
Vanaf vandaag is het mogelijk om nieuws te ontvangen op basis van locatie! Hierdoor zal u berichten krijgen van hinder, evenementen, festiviteiten en vele meer!
Om dit te kunnen ontvangen, dient u enkel uw adres in te geven.
We zien dat u dit reeds heeft ingesteld voor de afvalkalender. Wilt u hetzelfde adres gebruiken ? Klik dan op de knop 'Ja, gebruik hetzelfde adres'.
Om een ander adres te gebruiken, drukt u op de knop ' Ik wil een ander adres instellen'.'''
answers = []
btn = AnswerTO()
btn.action = None
btn.caption = u'Ja, gebruik hetzelfde adres'
btn.id = ANSWER_ID_TRANFER
btn.type = u'button'
btn.ui_flags = 0
answers.append(btn)
btn = AnswerTO()
btn.action = u'open://%s' % json.dumps({u'action': u'news'})
btn.caption = u'Ik wil een ander adres instellen'
btn.id = ANSWER_ID_OTHER
btn.type = u'button'
btn.ui_flags = 0
answers.append(btn)
btn = AnswerTO()
btn.action = None
btn.caption = u'Neen bedankt'
btn.id = ANSWER_ID_NONE
btn.type = u'button'
btn.ui_flags = 0
answers.append(btn)
with users.set_user(service_user):
parent_message_key = messaging.send(parent_key=None,
parent_message_key=None,
message=message,
answers=answers,
flags=Message.FLAG_AUTO_LOCK,
members=[member],
branding=branding,
tag=POKE_TAG_TRASH_CALENDAR_TRANSFER_ADDRESS,
service_identity=None)
tcu = TrashCalendarTransferUser(key=tcu_key)
tcu.service_user = service_user
tcu.parent_message_key = parent_message_key
tcu.answer_id = None
tcu.put()
def _delete_message_from_user(app_user, answer_id):
tcu = TrashCalendarTransferUser.create_key(app_user).get()
if not tcu:
return
if tcu.answer_id:
return
tcu.answer_id = answer_id
tcu.put()
human_user, app_id = get_app_user_tuple(app_user)
member = MemberTO()
member.alert_flags = Message.ALERT_FLAG_SILENT
member.member = human_user.email()
member.app_id = app_id
with users.set_user(tcu.service_user):
messaging.delete_conversation(parent_message_key=tcu.parent_message_key,
members=[member],
service_identity=None)
def _tranfer_address(service_user, app_user):
tcs = TrashCalendarTransferSettings.create_key(service_user).get()
if not tcs:
return
human_user, app_id = get_app_user_tuple(app_user)
params = {
'app_id': tcs.app_id,
'user': {
'email': human_user.email(),
'app_id': app_id
}
}
r = _do_trash_request(tcs, '/plugins/trash_calendar/admin/get_user_data', params)
country_code = u'BE'
city = r['address']['city']
zip_code = r['address']['zip_code']
street_name = r['address']['street']
house_nr = unicode(r['address']['house']['number'])
bus_nr = unicode(r['address']['house']['bus'])
address_string = street_name
if house_nr:
address_string += u' %s' % house_nr
address_string += u', %s' % (zip_code)
geocoded = geo_code(address_string, {'components': 'country:%s' % country_code})
label = u'Thuis'
distance = 3000
upi_key = UserProfileInfo.create_key(app_user)
upi = upi_key.get()
if not upi:
upi = UserProfileInfo(key=upi_key)
upi.addresses = []
address_uid = UserProfileInfoAddress.create_uid([country_code,
zip_code,
street_name,
house_nr,
bus_nr])
street_uid = UserProfileInfoAddress.create_uid([country_code,
zip_code,
street_name])
upia = UserProfileInfoAddress(created=datetime.now(),
address_uid=address_uid,
street_uid=street_uid,
label=label,
geo_location=ndb.GeoPt(geocoded['geometry']['location']['lat'],
geocoded['geometry']['location']['lng']),
distance=distance,
street_name=street_name,
house_nr=house_nr,
bus_nr=bus_nr,
zip_code=zip_code,
city=city,
country_code=country_code)
upi.addresses.append(upia)
upi.put()
def _do_trash_request(tcs, base_url, params):
url = '%s%s' % (tcs.base_url, base_url)
headers = {
'X-Nuntiuz-Service-Key': tcs.sik
}
logging.debug('_do_trash_request: %s params %s', url, params)
result = urlfetch.fetch(url, json.dumps(params), method=urlfetch.POST, headers=headers, deadline=30, follow_redirects=False)
if result.status_code != 200:
logging.debug(result.status_code)
logging.debug(result.content)
raise Exception('Failed to execute trash request')
return json.loads(result.content)
|
// https://vuex.vuejs.org/en/getters.html
export default {
saludo(state){
if(!state.usuario) {return ''}
return `¡Bienvenido ${state.usuario.nombres}!`
},
Despadida(state){
if(!state.usuario) {return ''}
return `Espero que regreses pronto por aquí ${state.usuario.nombres}!`
}
}
|
from retinanet.dataset import Ring_Cell_all_dataset
from tqdm import tqdm
import numpy as np
import torch
# from lib.nms.pth_nms import pth_nms
from lib_new.nms.gpu_nms import gpu_nms
def nms(dets, thresh):
"Dispatch to either CPU or GPU NMS implementations.\
Accept dets as tensor"""
dets = dets.cpu().detach().numpy()
return gpu_nms(dets, thresh)
pred_dict_box = {}
pred_dict_score = {}
result_str = ''
round = 0
pred_csv = './test_result_new/retinanet_resnet18_round0_test_fold_0_no_negative_focal_loss_0.2_on_train_data_best_recall.csv'
with open(pred_csv, 'r') as f:
lines = f.readlines()
for line in lines:
line = line[:-1]
line = line.split(',')
image_name = line[0]
pred_dict_box[image_name] = []
pred_dict_score[image_name] = []
if len(line[1]) != 0:
preds = line[1].split(';')[:-1]
for pred in preds:
pred = pred.split(' ')
box = []
for elemet in pred[:-1]:
box.append(float(elemet))
pred_dict_box[image_name].append(box)
pred_dict_score[image_name].append(float(pred[-1]))
test_dataset = Ring_Cell_all_dataset('/data/sqy/code/miccai2019/train_test_4/train_0.txt')
result_dict = {}
nms_threshold = 0.4
score_threshold = 0.05
for i, (image, bbox, image_, image_name) in enumerate(tqdm(test_dataset)):
result_dict[image_name] = []
gt_bbox = bbox
gt_scores = np.ones(len(gt_bbox)).tolist()
gt_labels = np.ones(len(gt_bbox)).tolist()
if len(bbox) != 0:
pred_scores = pred_dict_score[image_name]
pred_bboxs = pred_dict_box[image_name]
pred_labels = np.zeros(len(pred_bboxs)).tolist()
scores = gt_scores
scores.extend(pred_scores)
bboxs = gt_bbox
bboxs.extend(pred_bboxs)
labels = gt_labels
labels.extend(pred_labels)
# nms
bboxs = torch.Tensor(bboxs).unsqueeze(0) # size -> [1, num_box, 4]
scores = torch.Tensor(scores).unsqueeze(0).unsqueeze(-1) # size -> [1, num_box, 1]
labels = torch.Tensor(labels) # size -> [num_box]
anchors_nms_idx = nms(torch.cat([bboxs, scores], dim=2)[0, :, :], nms_threshold)
bboxs = bboxs[0, anchors_nms_idx, :]
scores = scores[0, anchors_nms_idx, 0]
labels = labels[anchors_nms_idx]
bboxs = bboxs[scores >= score_threshold]
scores = scores[scores >= score_threshold]
labels = labels[scores >= score_threshold]
bboxs = bboxs.numpy().tolist()
scores = scores.numpy().tolist()
labels = labels.numpy().tolist()
result_dict[image_name] = [[], [], []]
result_dict[image_name][0].extend(bboxs)
result_dict[image_name][1].extend(labels)
result_dict[image_name][2].extend(scores)
for image_name in result_dict:
result_str += image_name
result_str += ','
if len(result_dict[image_name]) != 0:
results = result_dict[image_name][0]
for result in results:
box = result
for element in box:
result_str += str(element)
result_str += ' '
result_str += ';'
results = result_dict[image_name][1]
for result in results:
result_str += str(result)
result_str += ';'
results = result_dict[image_name][2]
for result in results:
result_str += str(result)
result_str += ';'
result_str += '\n'
result_csv = '../resnet/bbox/retinanet_resnet_18_using_train_prediction_best_recall_nms_0.4_scorethreshold_0.05_for_bagging.csv'
with open(result_csv, 'w') as f:
f.write(result_str)
|
from nj import core, operators
__all__ = ['and_', 'nor_', 'not_', 'or_']
class and_(operators.ArgsOperator):
pass
class nor_(operators.ArgsOperator):
pass
class not_(operators.UnaryOperator):
def prepare(self, value: core.MongoObject_T) -> core.MongoObject: # type: ignore
return core.MongoObject(value)
class or_(operators.ArgsOperator):
pass
|
var searchData=
[
['colors_311',['Colors',['../class_rt_cs_1_1_open_g_l_1_1_g_l_mesh.html#a5b8842017f17f06c80e3605fe1cba5a2',1,'RtCs::OpenGL::GLMesh']]],
['comparefunc_312',['CompareFunc',['../class_rt_cs_1_1_open_g_l_1_1_g_l_texture_sampler.html#a12a8ff9f85787eb7f4b443903bc687f4',1,'RtCs::OpenGL::GLTextureSampler']]],
['comparemode_313',['CompareMode',['../class_rt_cs_1_1_open_g_l_1_1_g_l_texture_sampler.html#a7ce6d1c04fd2d4afe2476c9383681125',1,'RtCs::OpenGL::GLTextureSampler']]],
['currentmatrix_314',['CurrentMatrix',['../class_rt_cs_1_1_open_g_l_1_1_g_l_matrix_stack.html#a038ed2806702bffe8bcbaf26455ba103',1,'RtCs.OpenGL.GLMatrixStack.CurrentMatrix()'],['../class_rt_cs_1_1_open_g_l_1_1_g_l_modelview_matrix_stack.html#aae2015897778f35c68b2b92ed46ab4d0',1,'RtCs.OpenGL.GLModelviewMatrixStack.CurrentMatrix()']]]
];
|
from __future__ import division, absolute_import, print_function
import warnings
import numpy as np
from numpy.core import (array, arange, atleast_1d, atleast_2d, atleast_3d,
block, vstack, hstack, newaxis, concatenate, stack)
from numpy.testing import (assert_, assert_raises,
assert_array_equal, assert_equal, run_module_suite,
assert_raises_regex, assert_almost_equal)
from numpy.compat import long
class TestAtleast1d(object):
def test_0D_array(self):
a = array(1)
b = array(2)
res = [atleast_1d(a), atleast_1d(b)]
desired = [array([1]), array([2])]
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1, 2])
b = array([2, 3])
res = [atleast_1d(a), atleast_1d(b)]
desired = [array([1, 2]), array([2, 3])]
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
res = [atleast_1d(a), atleast_1d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_3D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
a = array([a, a])
b = array([b, b])
res = [atleast_1d(a), atleast_1d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_r1array(self):
""" Test to make sure equivalent Travis O's r1array function
"""
assert_(atleast_1d(3).shape == (1,))
assert_(atleast_1d(3j).shape == (1,))
assert_(atleast_1d(long(3)).shape == (1,))
assert_(atleast_1d(3.0).shape == (1,))
assert_(atleast_1d([[2, 3], [4, 5]]).shape == (2, 2))
class TestAtleast2d(object):
def test_0D_array(self):
a = array(1)
b = array(2)
res = [atleast_2d(a), atleast_2d(b)]
desired = [array([[1]]), array([[2]])]
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1, 2])
b = array([2, 3])
res = [atleast_2d(a), atleast_2d(b)]
desired = [array([[1, 2]]), array([[2, 3]])]
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
res = [atleast_2d(a), atleast_2d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_3D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
a = array([a, a])
b = array([b, b])
res = [atleast_2d(a), atleast_2d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_r2array(self):
""" Test to make sure equivalent Travis O's r2array function
"""
assert_(atleast_2d(3).shape == (1, 1))
assert_(atleast_2d([3j, 1]).shape == (1, 2))
assert_(atleast_2d([[[3, 1], [4, 5]], [[3, 5], [1, 2]]]).shape == (2, 2, 2))
class TestAtleast3d(object):
def test_0D_array(self):
a = array(1)
b = array(2)
res = [atleast_3d(a), atleast_3d(b)]
desired = [array([[[1]]]), array([[[2]]])]
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1, 2])
b = array([2, 3])
res = [atleast_3d(a), atleast_3d(b)]
desired = [array([[[1], [2]]]), array([[[2], [3]]])]
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
res = [atleast_3d(a), atleast_3d(b)]
desired = [a[:,:, newaxis], b[:,:, newaxis]]
assert_array_equal(res, desired)
def test_3D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
a = array([a, a])
b = array([b, b])
res = [atleast_3d(a), atleast_3d(b)]
desired = [a, b]
assert_array_equal(res, desired)
class TestHstack(object):
def test_non_iterable(self):
assert_raises(TypeError, hstack, 1)
def test_empty_input(self):
assert_raises(ValueError, hstack, ())
def test_0D_array(self):
a = array(1)
b = array(2)
res = hstack([a, b])
desired = array([1, 2])
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1])
b = array([2])
res = hstack([a, b])
desired = array([1, 2])
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1], [2]])
b = array([[1], [2]])
res = hstack([a, b])
desired = array([[1, 1], [2, 2]])
assert_array_equal(res, desired)
class TestVstack(object):
def test_non_iterable(self):
assert_raises(TypeError, vstack, 1)
def test_empty_input(self):
assert_raises(ValueError, vstack, ())
def test_0D_array(self):
a = array(1)
b = array(2)
res = vstack([a, b])
desired = array([[1], [2]])
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1])
b = array([2])
res = vstack([a, b])
desired = array([[1], [2]])
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1], [2]])
b = array([[1], [2]])
res = vstack([a, b])
desired = array([[1], [2], [1], [2]])
assert_array_equal(res, desired)
def test_2D_array2(self):
a = array([1, 2])
b = array([1, 2])
res = vstack([a, b])
desired = array([[1, 2], [1, 2]])
assert_array_equal(res, desired)
class TestConcatenate(object):
def test_exceptions(self):
# test axis must be in bounds
for ndim in [1, 2, 3]:
a = np.ones((1,)*ndim)
np.concatenate((a, a), axis=0) # OK
assert_raises(np.AxisError, np.concatenate, (a, a), axis=ndim)
assert_raises(np.AxisError, np.concatenate, (a, a), axis=-(ndim + 1))
# Scalars cannot be concatenated
assert_raises(ValueError, concatenate, (0,))
assert_raises(ValueError, concatenate, (np.array(0),))
# test shapes must match except for concatenation axis
a = np.ones((1, 2, 3))
b = np.ones((2, 2, 3))
axis = list(range(3))
for i in range(3):
np.concatenate((a, b), axis=axis[0]) # OK
assert_raises(ValueError, np.concatenate, (a, b), axis=axis[1])
assert_raises(ValueError, np.concatenate, (a, b), axis=axis[2])
a = np.moveaxis(a, -1, 0)
b = np.moveaxis(b, -1, 0)
axis.append(axis.pop(0))
# No arrays to concatenate raises ValueError
assert_raises(ValueError, concatenate, ())
def test_concatenate_axis_None(self):
a = np.arange(4, dtype=np.float64).reshape((2, 2))
b = list(range(3))
c = ['x']
r = np.concatenate((a, a), axis=None)
assert_equal(r.dtype, a.dtype)
assert_equal(r.ndim, 1)
r = np.concatenate((a, b), axis=None)
assert_equal(r.size, a.size + len(b))
assert_equal(r.dtype, a.dtype)
r = np.concatenate((a, b, c), axis=None)
d = array(['0.0', '1.0', '2.0', '3.0',
'0', '1', '2', 'x'])
assert_array_equal(r, d)
out = np.zeros(a.size + len(b))
r = np.concatenate((a, b), axis=None)
rout = np.concatenate((a, b), axis=None, out=out)
assert_(out is rout)
assert_equal(r, rout)
def test_large_concatenate_axis_None(self):
# When no axis is given, concatenate uses flattened versions.
# This also had a bug with many arrays (see gh-5979).
x = np.arange(1, 100)
r = np.concatenate(x, None)
assert_array_equal(x, r)
# This should probably be deprecated:
r = np.concatenate(x, 100) # axis is >= MAXDIMS
assert_array_equal(x, r)
def test_concatenate(self):
# Test concatenate function
# One sequence returns unmodified (but as array)
r4 = list(range(4))
assert_array_equal(concatenate((r4,)), r4)
# Any sequence
assert_array_equal(concatenate((tuple(r4),)), r4)
assert_array_equal(concatenate((array(r4),)), r4)
# 1D default concatenation
r3 = list(range(3))
assert_array_equal(concatenate((r4, r3)), r4 + r3)
# Mixed sequence types
assert_array_equal(concatenate((tuple(r4), r3)), r4 + r3)
assert_array_equal(concatenate((array(r4), r3)), r4 + r3)
# Explicit axis specification
assert_array_equal(concatenate((r4, r3), 0), r4 + r3)
# Including negative
assert_array_equal(concatenate((r4, r3), -1), r4 + r3)
# 2D
a23 = array([[10, 11, 12], [13, 14, 15]])
a13 = array([[0, 1, 2]])
res = array([[10, 11, 12], [13, 14, 15], [0, 1, 2]])
assert_array_equal(concatenate((a23, a13)), res)
assert_array_equal(concatenate((a23, a13), 0), res)
assert_array_equal(concatenate((a23.T, a13.T), 1), res.T)
assert_array_equal(concatenate((a23.T, a13.T), -1), res.T)
# Arrays much match shape
assert_raises(ValueError, concatenate, (a23.T, a13.T), 0)
# 3D
res = arange(2 * 3 * 7).reshape((2, 3, 7))
a0 = res[..., :4]
a1 = res[..., 4:6]
a2 = res[..., 6:]
assert_array_equal(concatenate((a0, a1, a2), 2), res)
assert_array_equal(concatenate((a0, a1, a2), -1), res)
assert_array_equal(concatenate((a0.T, a1.T, a2.T), 0), res.T)
out = res.copy()
rout = concatenate((a0, a1, a2), 2, out=out)
assert_(out is rout)
assert_equal(res, rout)
def test_bad_out_shape(self):
a = array([1, 2])
b = array([3, 4])
assert_raises(ValueError, concatenate, (a, b), out=np.empty(5))
assert_raises(ValueError, concatenate, (a, b), out=np.empty((4,1)))
assert_raises(ValueError, concatenate, (a, b), out=np.empty((1,4)))
concatenate((a, b), out=np.empty(4))
def test_out_dtype(self):
out = np.empty(4, np.float32)
res = concatenate((array([1, 2]), array([3, 4])), out=out)
assert_(out is res)
out = np.empty(4, np.complex64)
res = concatenate((array([0.1, 0.2]), array([0.3, 0.4])), out=out)
assert_(out is res)
# invalid cast
out = np.empty(4, np.int32)
assert_raises(TypeError, concatenate,
(array([0.1, 0.2]), array([0.3, 0.4])), out=out)
def test_stack():
# non-iterable input
assert_raises(TypeError, stack, 1)
# 0d input
for input_ in [(1, 2, 3),
[np.int32(1), np.int32(2), np.int32(3)],
[np.array(1), np.array(2), np.array(3)]]:
assert_array_equal(stack(input_), [1, 2, 3])
# 1d input examples
a = np.array([1, 2, 3])
b = np.array([4, 5, 6])
r1 = array([[1, 2, 3], [4, 5, 6]])
assert_array_equal(np.stack((a, b)), r1)
assert_array_equal(np.stack((a, b), axis=1), r1.T)
# all input types
assert_array_equal(np.stack(list([a, b])), r1)
assert_array_equal(np.stack(array([a, b])), r1)
# all shapes for 1d input
arrays = [np.random.randn(3) for _ in range(10)]
axes = [0, 1, -1, -2]
expected_shapes = [(10, 3), (3, 10), (3, 10), (10, 3)]
for axis, expected_shape in zip(axes, expected_shapes):
assert_equal(np.stack(arrays, axis).shape, expected_shape)
assert_raises_regex(np.AxisError, 'out of bounds', stack, arrays, axis=2)
assert_raises_regex(np.AxisError, 'out of bounds', stack, arrays, axis=-3)
# all shapes for 2d input
arrays = [np.random.randn(3, 4) for _ in range(10)]
axes = [0, 1, 2, -1, -2, -3]
expected_shapes = [(10, 3, 4), (3, 10, 4), (3, 4, 10),
(3, 4, 10), (3, 10, 4), (10, 3, 4)]
for axis, expected_shape in zip(axes, expected_shapes):
assert_equal(np.stack(arrays, axis).shape, expected_shape)
# empty arrays
assert_(stack([[], [], []]).shape == (3, 0))
assert_(stack([[], [], []], axis=1).shape == (0, 3))
# edge cases
assert_raises_regex(ValueError, 'need at least one array', stack, [])
assert_raises_regex(ValueError, 'must have the same shape',
stack, [1, np.arange(3)])
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.arange(3), 1])
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.arange(3), 1], axis=1)
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.zeros((3, 3)), np.zeros(3)], axis=1)
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.arange(2), np.arange(3)])
# np.matrix
m = np.matrix([[1, 2], [3, 4]])
assert_raises_regex(ValueError, 'shape too large to be a matrix',
stack, [m, m])
class TestBlock(object):
def test_block_simple_row_wise(self):
a_2d = np.ones((2, 2))
b_2d = 2 * a_2d
desired = np.array([[1, 1, 2, 2],
[1, 1, 2, 2]])
result = block([a_2d, b_2d])
assert_equal(desired, result)
def test_block_simple_column_wise(self):
a_2d = np.ones((2, 2))
b_2d = 2 * a_2d
expected = np.array([[1, 1],
[1, 1],
[2, 2],
[2, 2]])
result = block([[a_2d], [b_2d]])
assert_equal(expected, result)
def test_block_with_1d_arrays_row_wise(self):
# # # 1-D vectors are treated as row arrays
a = np.array([1, 2, 3])
b = np.array([2, 3, 4])
expected = np.array([1, 2, 3, 2, 3, 4])
result = block([a, b])
assert_equal(expected, result)
def test_block_with_1d_arrays_multiple_rows(self):
a = np.array([1, 2, 3])
b = np.array([2, 3, 4])
expected = np.array([[1, 2, 3, 2, 3, 4],
[1, 2, 3, 2, 3, 4]])
result = block([[a, b], [a, b]])
assert_equal(expected, result)
def test_block_with_1d_arrays_column_wise(self):
# # # 1-D vectors are treated as row arrays
a_1d = np.array([1, 2, 3])
b_1d = np.array([2, 3, 4])
expected = np.array([[1, 2, 3],
[2, 3, 4]])
result = block([[a_1d], [b_1d]])
assert_equal(expected, result)
def test_block_mixed_1d_and_2d(self):
a_2d = np.ones((2, 2))
b_1d = np.array([2, 2])
result = block([[a_2d], [b_1d]])
expected = np.array([[1, 1],
[1, 1],
[2, 2]])
assert_equal(expected, result)
def test_block_complicated(self):
# a bit more complicated
one_2d = np.array([[1, 1, 1]])
two_2d = np.array([[2, 2, 2]])
three_2d = np.array([[3, 3, 3, 3, 3, 3]])
four_1d = np.array([4, 4, 4, 4, 4, 4])
five_0d = np.array(5)
six_1d = np.array([6, 6, 6, 6, 6])
zero_2d = np.zeros((2, 6))
expected = np.array([[1, 1, 1, 2, 2, 2],
[3, 3, 3, 3, 3, 3],
[4, 4, 4, 4, 4, 4],
[5, 6, 6, 6, 6, 6],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]])
result = block([[one_2d, two_2d],
[three_2d],
[four_1d],
[five_0d, six_1d],
[zero_2d]])
assert_equal(result, expected)
def test_nested(self):
one = np.array([1, 1, 1])
two = np.array([[2, 2, 2], [2, 2, 2], [2, 2, 2]])
three = np.array([3, 3, 3])
four = np.array([4, 4, 4])
five = np.array(5)
six = np.array([6, 6, 6, 6, 6])
zero = np.zeros((2, 6))
result = np.block([
[
np.block([
[one],
[three],
[four]
]),
two
],
[five, six],
[zero]
])
expected = np.array([[1, 1, 1, 2, 2, 2],
[3, 3, 3, 2, 2, 2],
[4, 4, 4, 2, 2, 2],
[5, 6, 6, 6, 6, 6],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]])
assert_equal(result, expected)
def test_3d(self):
a000 = np.ones((2, 2, 2), int) * 1
a100 = np.ones((3, 2, 2), int) * 2
a010 = np.ones((2, 3, 2), int) * 3
a001 = np.ones((2, 2, 3), int) * 4
a011 = np.ones((2, 3, 3), int) * 5
a101 = np.ones((3, 2, 3), int) * 6
a110 = np.ones((3, 3, 2), int) * 7
a111 = np.ones((3, 3, 3), int) * 8
result = np.block([
[
[a000, a001],
[a010, a011],
],
[
[a100, a101],
[a110, a111],
]
])
expected = array([[[1, 1, 4, 4, 4],
[1, 1, 4, 4, 4],
[3, 3, 5, 5, 5],
[3, 3, 5, 5, 5],
[3, 3, 5, 5, 5]],
[[1, 1, 4, 4, 4],
[1, 1, 4, 4, 4],
[3, 3, 5, 5, 5],
[3, 3, 5, 5, 5],
[3, 3, 5, 5, 5]],
[[2, 2, 6, 6, 6],
[2, 2, 6, 6, 6],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8]],
[[2, 2, 6, 6, 6],
[2, 2, 6, 6, 6],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8]],
[[2, 2, 6, 6, 6],
[2, 2, 6, 6, 6],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8]]])
assert_array_equal(result, expected)
def test_block_with_mismatched_shape(self):
a = np.array([0, 0])
b = np.eye(2)
assert_raises(ValueError, np.block, [a, b])
assert_raises(ValueError, np.block, [b, a])
def test_no_lists(self):
assert_equal(np.block(1), np.array(1))
assert_equal(np.block(np.eye(3)), np.eye(3))
def test_invalid_nesting(self):
msg = 'depths are mismatched'
assert_raises_regex(ValueError, msg, np.block, [1, [2]])
assert_raises_regex(ValueError, msg, np.block, [1, []])
assert_raises_regex(ValueError, msg, np.block, [[1], 2])
assert_raises_regex(ValueError, msg, np.block, [[], 2])
assert_raises_regex(ValueError, msg, np.block, [
[[1], [2]],
[[3, 4]],
[5] # missing brackets
])
def test_empty_lists(self):
assert_raises_regex(ValueError, 'empty', np.block, [])
assert_raises_regex(ValueError, 'empty', np.block, [[]])
assert_raises_regex(ValueError, 'empty', np.block, [[1], []])
def test_tuple(self):
assert_raises_regex(TypeError, 'tuple', np.block, ([1, 2], [3, 4]))
assert_raises_regex(TypeError, 'tuple', np.block, [(1, 2), (3, 4)])
def test_different_ndims(self):
a = 1.
b = 2 * np.ones((1, 2))
c = 3 * np.ones((1, 1, 3))
result = np.block([a, b, c])
expected = np.array([[[1., 2., 2., 3., 3., 3.]]])
assert_equal(result, expected)
def test_different_ndims_depths(self):
a = 1.
b = 2 * np.ones((1, 2))
c = 3 * np.ones((1, 2, 3))
result = np.block([[a, b], [c]])
expected = np.array([[[1., 2., 2.],
[3., 3., 3.],
[3., 3., 3.]]])
assert_equal(result, expected)
if __name__ == "__main__":
run_module_suite()
|
let result,pText,encKey;
function selectors() {
result=document.querySelector('div#result');
pText=document.querySelector('input#plainText');
encKey=document.querySelector('input#encKey');
}
const hexToDec = (hex) => parseInt(hex,16).toString(10);
const binToDec = (bin) => parseInt(bin,2).toString(10);
function decToBin(dec) {
dec=parseInt(dec,10).toString(2);
while (dec.length<8) { dec="0"+dec; }
return dec;
}
function textToBinary(string) {
return string.split("").map(e=>{
let bin=parseInt(e.charCodeAt(),10).toString(2);
while (bin.length<8) { bin="0"+bin; }
return bin;
});
}
function handleInputChange(e) {
result.innerHTML=repeatXOR(pText.value,encKey.value);
}
function repeatXOR(text,key) {
let O=textToBinary(text);
let K=textToBinary(key);
let XOR=[];
let intXOR=[];
for (var i=0;i<O.length;i++) {
for (var j=0;j<O[i].length;j++) { intXOR.push(O[i][j]^K[i%K.length][j]); }
XOR.push(intXOR.join(""));
intXOR=[];
}
return XOR.map(e=>{
let hex=parseInt(e,2).toString(16);
return hex.length<2 ? "0"+hex : hex;
}).join("");
}
window.onload=function() {
selectors();
pText.addEventListener('keyup', function(e){handleInputChange(e);});
encKey.addEventListener('keyup', function(e){handleInputChange(e);});
result.innerHTML=repeatXOR(pText.value,encKey.value);
}
|
from django.http import HttpResponse, HttpResponseBadRequest
from django.template import loader
from django.views.decorators.http import require_http_methods
from django.urls import reverse
from django.contrib.auth.decorators import login_required
from django.core import serializers
from django.core.serializers.json import DjangoJSONEncoder
from NearBeach.forms import SearchForm, NewGroupForm
from NearBeach.models import group, user_group
import json
@login_required(login_url='login', redirect_field_name='')
@require_http_methods(['POST'])
def check_group_name(request):
"""
:param request:
:return:
"""
# Check user form
form = SearchForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest(form.errors)
# Check to see if the group name exists
group_name_results = group.objects.filter(
is_deleted=False,
group_name__icontains=form.cleaned_data['search'],
)
# Send back data
return HttpResponse(serializers.serialize('json', group_name_results), content_type='application/json')
@login_required(login_url='login', redirect_field_name="")
def group_information(request, group_id):
"""
:param request:
:param group_id:
:return:
"""
# Get the template
t = loader.get_template('NearBeach/groups/group_information.html')
# Get the data we want
group_results = group.objects.get(group_id=group_id)
parent_group_results = group.objects.filter(
is_deleted=False,
)
user_list_results = user_group.objects.filter(
is_deleted=False,
group_id=group_id,
).values(
'username',
'username__first_name',
'username__last_name',
'username__email',
'group',
'group__group_name',
'permission_set',
'permission_set__permission_set_name',
).order_by(
'username__first_name',
'username__last_name',
'permission_set__permission_set_name',
)
# Convert into json
user_list_results = json.dumps(list(user_list_results), cls=DjangoJSONEncoder)
# Context
c = {
'group_id': group_id,
'group_results': serializers.serialize('json', [group_results]),
'nearbeach_title': 'Group Information %s' % group_id,
'parent_group_results': serializers.serialize('json', parent_group_results),
'user_list_results': user_list_results,
}
return HttpResponse(t.render(c, request))
@require_http_methods(['POST'])
@login_required(login_url='login', redirect_field_name='')
def group_information_save(request, group_id):
"""
:param request:
:param group_id:
:return:
"""
# Check user permissions
# Get Form Data
form = NewGroupForm(request.POST)
if not form.is_valid():
print(form.errors)
return HttpResponseBadRequest(form.errors)
# Update the group's data
group_update = group.objects.get(group_id=group_id)
group_update.group_name = form.cleaned_data['group_name']
group_update.parent_group = form.cleaned_data['parent_group']
group_update.save()
return HttpResponse("")
@login_required(login_url='login', redirect_field_name="")
def new_group(request):
"""
:param request:
:return:
"""
# CHeck user permissions
# Get the template
t = loader.get_template('NearBeach/groups/new_group.html')
# Get group data
group_results = group.objects.filter(
is_deleted=False,
).exclude(
group_name__in=['Administration'],
)
# Get the context
c = {
'group_results': serializers.serialize('json', group_results),
'nearbeach_title': 'New Group',
}
# Return
return HttpResponse(t.render(c, request))
@require_http_methods(['POST'])
@login_required(login_url='login', redirect_field_name='')
def new_group_save(request):
"""
:param request:
:return:
"""
# Check user permissions
# Get form data
form = NewGroupForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest(form.errors)
# Create the new group
group_submit = group(
group_name=form.cleaned_data['group_name'],
parent_group=form.cleaned_data['parent_group'],
change_user=request.user,
)
group_submit.save()
# Send back the URL for the group
return HttpResponse(reverse('group_information', args={group_submit.group_id}))
|
#
# PySNMP MIB module OPTIX-SONET-LPBK-MIB-V2 (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/OPTIX-SONET-LPBK-MIB-V2
# Produced by pysmi-0.3.4 at Wed May 1 14:35:17 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ValueSizeConstraint")
optixCommonSonet, = mibBuilder.importSymbols("OPTIX-OID-MIB", "optixCommonSonet")
MOD2Type, = mibBuilder.importSymbols("OPTIX-SONET-TC-MIB", "MOD2Type")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, iso, ObjectIdentity, MibIdentifier, Integer32, ModuleIdentity, Bits, TimeTicks, Unsigned32, Counter32, Counter64, NotificationType, Gauge32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "ObjectIdentity", "MibIdentifier", "Integer32", "ModuleIdentity", "Bits", "TimeTicks", "Unsigned32", "Counter32", "Counter64", "NotificationType", "Gauge32", "IpAddress")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
optixSonetMaintenance = ModuleIdentity((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50))
if mibBuilder.loadTexts: optixSonetMaintenance.setLastUpdated('200602231756Z')
if mibBuilder.loadTexts: optixSonetMaintenance.setOrganization('Huawei Technologies Co.,Ltd.')
if mibBuilder.loadTexts: optixSonetMaintenance.setContactInfo('R&D Building Huawei Technologies Co., Ltd. Bantian, Longgang District Shenzhen, P. R. China http://www.huawei.com Zip:518129 E-mail:support@huawei.com ')
if mibBuilder.loadTexts: optixSonetMaintenance.setDescription('This module describes the Loopback interface of Huawei SONET transmit platform ')
class LpbkType(TextualConvention, Integer32):
description = 'Enter the description for the LpbkType TEXTUAL-CONVENTION converted from type assignment.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(255, 1, 2, 3, 4, 5, 6))
namedValues = NamedValues(("noloop", 255), ("terminal", 1), ("facility", 2), ("crs", 3), ("ds1feac", 4), ("ds3feac", 5), ("fac2ni", 6))
optixSonetLoopback = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10))
lpbkStateTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 10), )
if mibBuilder.loadTexts: lpbkStateTable.setStatus('current')
if mibBuilder.loadTexts: lpbkStateTable.setDescription('Loopback state table ')
lpbkStateEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 10, 1), ).setIndexNames((0, "OPTIX-SONET-LPBK-MIB-V2", "lpbkStateMOD2"), (0, "OPTIX-SONET-LPBK-MIB-V2", "lpbkStateSlot"), (0, "OPTIX-SONET-LPBK-MIB-V2", "lpbkStatePort"), (0, "OPTIX-SONET-LPBK-MIB-V2", "lpbkStatePath"), (0, "OPTIX-SONET-LPBK-MIB-V2", "lpbkStateVT"))
if mibBuilder.loadTexts: lpbkStateEntry.setStatus('current')
if mibBuilder.loadTexts: lpbkStateEntry.setDescription('Loopback state entry ')
lpbkStateMOD2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 10, 1, 1), MOD2Type()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkStateMOD2.setStatus('current')
if mibBuilder.loadTexts: lpbkStateMOD2.setDescription('The modifier that identifies the type of the facility')
lpbkStateSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 10, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkStateSlot.setStatus('current')
if mibBuilder.loadTexts: lpbkStateSlot.setDescription('Slot number. This will indicate what is the slot of the object.')
lpbkStatePort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 10, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkStatePort.setStatus('current')
if mibBuilder.loadTexts: lpbkStatePort.setDescription('Port Number. This will indicate what is the Port of the object.')
lpbkStatePath = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 10, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkStatePath.setStatus('current')
if mibBuilder.loadTexts: lpbkStatePath.setDescription('Path number. This will indicate what is the Path of the object.')
lpbkStateVT = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 10, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkStateVT.setStatus('current')
if mibBuilder.loadTexts: lpbkStateVT.setDescription('VT path number. This will indicate what is the VT path of the object.')
lpbkStateLpbkType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 10, 1, 6), LpbkType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkStateLpbkType.setStatus('current')
if mibBuilder.loadTexts: lpbkStateLpbkType.setDescription('The loopback type of the facility ')
lpbkStateTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 10, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkStateTimeout.setStatus('current')
if mibBuilder.loadTexts: lpbkStateTimeout.setDescription('The remnant time that loop-back will be released. ')
lpbkFlagTable = MibTable((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 20), )
if mibBuilder.loadTexts: lpbkFlagTable.setStatus('current')
if mibBuilder.loadTexts: lpbkFlagTable.setDescription('Loopback Enable table')
lpbkFlagEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 20, 1), ).setIndexNames((0, "OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagMOD2"), (0, "OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagSlot"), (0, "OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagPort"), (0, "OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagPath"), (0, "OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagVT"))
if mibBuilder.loadTexts: lpbkFlagEntry.setStatus('current')
if mibBuilder.loadTexts: lpbkFlagEntry.setDescription('Loopback Enable entry')
lpbkFlagMOD2 = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 20, 1, 1), MOD2Type()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkFlagMOD2.setStatus('current')
if mibBuilder.loadTexts: lpbkFlagMOD2.setDescription('The modifier that identifies the type of the facility')
lpbkFlagSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 20, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkFlagSlot.setStatus('current')
if mibBuilder.loadTexts: lpbkFlagSlot.setDescription('Slot number. This will indicate what is the slot of the object.')
lpbkFlagPort = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 20, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkFlagPort.setStatus('current')
if mibBuilder.loadTexts: lpbkFlagPort.setDescription('Port Number. This will indicate what is the Port of the object.')
lpbkFlagPath = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 20, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkFlagPath.setStatus('current')
if mibBuilder.loadTexts: lpbkFlagPath.setDescription('Path number. This will indicate what is the Path of the object.')
lpbkFlagVT = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 20, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkFlagVT.setStatus('current')
if mibBuilder.loadTexts: lpbkFlagVT.setDescription('VT path number. This will indicate what is the VT path of the object.')
lpbkFlagLpbkType = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 20, 1, 6), LpbkType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkFlagLpbkType.setStatus('current')
if mibBuilder.loadTexts: lpbkFlagLpbkType.setDescription('The loopback type of the facility ')
lpbkFlagEnFlag = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 20, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disable", 0), ("enable", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkFlagEnFlag.setStatus('current')
if mibBuilder.loadTexts: lpbkFlagEnFlag.setDescription('T1 or T3 remote loop-back enable flag ')
lpbkFlagTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 10, 20, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lpbkFlagTimeout.setStatus('current')
if mibBuilder.loadTexts: lpbkFlagTimeout.setDescription('The automatic release time of loopback.')
optixSonetMaintenanceConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 11))
optixSonetMaintenanceGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 11, 1))
currentObjectGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 11, 1, 1)).setObjects(("OPTIX-SONET-LPBK-MIB-V2", "lpbkStateMOD2"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkStateSlot"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkStatePort"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkStatePath"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkStateVT"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkStateLpbkType"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkStateTimeout"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagMOD2"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagSlot"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagPort"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagPath"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagVT"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagLpbkType"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagEnFlag"), ("OPTIX-SONET-LPBK-MIB-V2", "lpbkFlagTimeout"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
currentObjectGroup = currentObjectGroup.setStatus('current')
if mibBuilder.loadTexts: currentObjectGroup.setDescription('Enter the description of the created OBJECT-GROUP.')
optixSonetMaintenanceCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 11, 2))
basicCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 2011, 2, 25, 3, 20, 50, 11, 2, 1)).setObjects(("OPTIX-SONET-LPBK-MIB-V2", "currentObjectGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
basicCompliance = basicCompliance.setStatus('current')
if mibBuilder.loadTexts: basicCompliance.setDescription('Enter the description of the created MODULE-COMPLIANCE.')
mibBuilder.exportSymbols("OPTIX-SONET-LPBK-MIB-V2", lpbkFlagTable=lpbkFlagTable, lpbkFlagEntry=lpbkFlagEntry, lpbkStateTimeout=lpbkStateTimeout, optixSonetMaintenanceGroups=optixSonetMaintenanceGroups, optixSonetMaintenance=optixSonetMaintenance, lpbkFlagPath=lpbkFlagPath, lpbkFlagMOD2=lpbkFlagMOD2, lpbkStateSlot=lpbkStateSlot, lpbkFlagPort=lpbkFlagPort, lpbkFlagSlot=lpbkFlagSlot, lpbkStatePath=lpbkStatePath, optixSonetMaintenanceConformance=optixSonetMaintenanceConformance, lpbkStatePort=lpbkStatePort, lpbkStateTable=lpbkStateTable, LpbkType=LpbkType, currentObjectGroup=currentObjectGroup, lpbkFlagLpbkType=lpbkFlagLpbkType, optixSonetLoopback=optixSonetLoopback, lpbkStateVT=lpbkStateVT, lpbkStateEntry=lpbkStateEntry, lpbkStateLpbkType=lpbkStateLpbkType, lpbkStateMOD2=lpbkStateMOD2, lpbkFlagEnFlag=lpbkFlagEnFlag, lpbkFlagTimeout=lpbkFlagTimeout, PYSNMP_MODULE_ID=optixSonetMaintenance, optixSonetMaintenanceCompliances=optixSonetMaintenanceCompliances, lpbkFlagVT=lpbkFlagVT, basicCompliance=basicCompliance)
|
/**
* Copyright (c) 2017 ZipRecruiter
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
* */
const chai = require('chai');
const expect = chai.expect;
const HttpServer = require('..').HttpServer;
const Driver = require('../clients/js/chrome_driver_proxy');
const Chrome = require('selenium-webdriver/chrome').Driver;
const ChromeOptions = require('selenium-webdriver/chrome').Options;
const chromedriverBin = process.env.CHROMEDRIVER_BIN || '/usr/bin/chromedriver';
describe('Proxy with chrome pool enabled', () => {
let server;
let driver;
const mockServerUrl = 'http://127.0.0.1:8080';
before(function (done) {
const config = {
proxy: {
port: 4444,
baseUrl: '/wd/hub',
},
chromedriver: {
chromedriverPath: chromedriverBin,
port: 4445,
autoRestart: false,
},
chromePool: {
enable: true,
reuse: true,
chromePath: '/usr/bin/google-chrome',
chromeAgentModule: 'test/custom_agent_module.js',
},
};
server = new HttpServer(config.proxy);
this.timeout(5000);
server.start(config, done);
});
after((done) => {
server.stop(done);
});
beforeEach(() => {
const chromeOptions = new ChromeOptions();
chromeOptions.addArguments(
'headless',
'disable-gpu',
'no-first-run',
'no-sandbox',
);
const options = chromeOptions.toCapabilities();
driver = Driver.createSession('http://127.0.0.1:4444/wd/hub', options);
});
afterEach((done) => {
driver.quit().then(() => { done(); });
});
it('can run basic selenium test on custom agent', (done) => {
driver.get(`${mockServerUrl}/base.html`).then(() => { done(); });
});
});
|
(function(e){function t(e){let t=e.split("-"),a=t[0]+"-";return a+=1==t[1].length?"0"+t[1]:t[1],a+="-",a+=1==t[2].length?"0"+t[2]:t[2],a}function a(){let t=b.length;if(t<1)e(".andp-datepicker-container").removeClass("open").hide();else{if(b=b.sort(function(e,t){return e=e.split("/").reverse().join(""),t=t.split("/").reverse().join(""),e>t?1:e<t?-1:0}),E)j.attr("value",b[0]).val(b[0]);else{for(e('input.andp-hidden-dates[data-cal_id="'+y+'"]').remove(),i=0;i<=t-1;i++)c(b[i]);var a="";a=1==j.data("show_all_dates")?j.is(":input")?b.join(", "):"<span>"+b.join("</span><span>")+"</span>":t>1?t+" dates selected":b[0],j.is(":input")?j.attr("value",a).val(a):j.html(a)}e(".andp-datepicker-container").removeClass("open").hide(),selected_date=e(this).data("date")}}function n(t){e(".andp-datepicker-container").removeClass("open").hide();var a=e('.andp-datepicker-container[data-cal_id="'+y+'"]');if(a.length>0)return w=a.find(".andp-year-select"),C=a.find(".andp-month-select"),N=a.find(".andp-days-numbers"),a.addClass("open").show(),void s();var n='<div class="andp-datepicker-container" data-cal_id="'+y+'" >';for(n+='<div class = "andp-header">',n+='<button type = "button" class = "andp-prev andp-change-months"> ❮ </button>',n+='<select class = "andp-month-select"> </select>',n+='<select class = "andp-year-select"> </select>',n+='<button type = "button" class = "andp-next andp-change-months"> ❯ </button> ',n+="</div>",n+='<div class="andp-body">',n+='<div class = "andp-days-names"> <div> SUN </div> <div> MON </div> <div> TUE </div> <div> WED </div> <div> THU </div> <div> FRI </div> <div> SAT </div> </div>',n+='<div class = "andp-days-numbers"> </div>',E||(control_key="mac"==T?"CMD":"CTRL",n+='<div class="andp-info" style="display:none"><i class="mdi mdi-information text-primary"></i> Press <strong>'+control_key+"</strong> or <strong>Shift</strong> key for multiple selection </div>"),n+='<div class="andp-action-btns">',E||(n+='<button type="button" class="apply-date" data-cal_id="'+y+'">Apply</button>'),n+="</div>",n+="</div>",n+="</div>",_.append(n),a=e('.andp-datepicker-container[data-cal_id="'+y+'"]'),w=a.find(".andp-year-select"),C=a.find(".andp-month-select"),N=a.find(".andp-days-numbers"),append_html='<option value = "01" '+("01"==M?"selected":" ")+" > Baisakh </option>",append_html+='<option value = "02" '+("02"==M?"selected":"")+" > Jestha </option>",append_html+='<option value = "03" '+("03"==M?"selected":"")+" > Asar </option>",append_html+='<option value = "04" '+("04"==M?"selected":"")+" > Shrawan </option>",append_html+='<option value = "05" '+("05"==M?"selected":"")+" > Bhadra </option>",append_html+='<option value = "06" '+("06"==M?"selected":"")+" > Ashoj </option>",append_html+='<option value = "07" '+("07"==M?"selected":"")+" > Kartik </option>",append_html+='<option value = "08" '+("08"==M?"selected":"")+" > Mangsir </option>",append_html+='<option value = "09" '+("09"==M?"selected":"")+" > Poush </option>",append_html+='<option value = "10" '+("10"==M?"selected":"")+" > Magh </option> ",append_html+='<option value = "11" '+("11"==M?"selected":"")+" > Falgun </option>",append_html+='<option value = "12" '+("12"==M?"selected":"")+" > Chaitra </option>",C.append(append_html),i=k;i<=Y;i++)append_html='<option value="'+i+'"',i==D&&(append_html+=" selected"),append_html+=">"+i+"</option>",w.append(append_html);l(),e('.andp-datepicker-container[data-cal_id="'+y+'"]').addClass("open"),s()}function s(){var t=j.offset(),a=j.outerHeight(),n=e(window).width(),i=j.outerWidth(),s=e(".andp-datepicker-container").outerWidth();if(s+t.left+10>n){var l=n-(t.left+i);e('.andp-datepicker-container[data-cal_id="'+y+'"]').css({top:t.top+a,right:l,left:"inherit"})}else e('.andp-datepicker-container[data-cal_id="'+y+'"]').css({top:t.top+a,left:t.left,right:"inherit"})}function l(){month=C.val(),year=w.val(),N.html("");var e=new u;e.setNepaliDate(year,month,1);var t=e.getDay(),a=d(year,month);append_html="";var n=1,s=1,l=parseInt(t)-2,o=1;for(i=1;i<=42;i++){last_month=parseInt(month)-1,last_year=parseInt(year),last_month<1&&(last_month=12,last_year-=1,last_year<k&&(last_year=k,last_month=1)),next_month=parseInt(month)+1,next_year=parseInt(year);var h=d(last_year,last_month);if(1==n&&(append_html+='<div class="andp-column">'),i<t)append_html+='<div class="old-dates"> '+parseInt(h-l)+" </div>",l-=1;else if(s<=a){let e=s<10?"0"+s:s,t=year+"-"+month+"-"+e,a=b.indexOf(t);append_html+='<div class="day'+(a>=0?" selected":"")+'" data-date="'+t+'">'+s+"</div>",s++}else append_html+='<div class="old-dates"> '+o+"</div>",o++;7==n&&(append_html+="</div>",n=0),n++}N.append(append_html)}function d(e,t){var a=new u;if(!(e<k||e>Y||t<1||t>12)){e=e-k,t=t-1;return a.nepaliMonths[e][t]}}function o(e,t){e=e.split("-"),t=t.split("-");var a=new u;return a.setNepaliDate(e[0],e[1],e[2]),a.getNepaliDateDifference(t[0],t[1],t[2])}function h(e,t){e=e.split("-"),t=t.split("-");var a=new u;a.setNepaliDate(e[0],e[1],e[2]);var n=[a.getEnglishYear(),a.getEnglishMonth(),a.getEnglishDate()];a.setNepaliDate(t[0],t[1],t[2]);var i=[a.getEnglishYear(),a.getEnglishMonth(),a.getEnglishDate()],s=new Date(n[0],n[1],n[2]),l=new Date(i[0],i[1],i[2]);return s>l&&1}function r(e){e=e.split("-"),year=parseInt(e[0]),month=parseInt(e[1]);var t=parseInt(d(year,month));return day=parseInt(e[2])+1,day>t&&(day=1,month+=1,month>12&&(month=1,year+=1)),year+"-"+month+"-"+day}function p(a,n=!1){a=t(a);var i=b.indexOf(a),s=e('.andp-datepicker-container[data-cal_id="'+y+'"]'),l=s.find('.day[data-date="'+a+'"]');n?l.addClass("soft-select"):i<0?(b.push(a),l.addClass("selected")):(b.splice(i,1),l.removeClass("selected"))}function c(e){L.append('<input class="andp-hidden-dates" type="hidden" data-cal_id="'+y+'" name="'+x+'[]" value="'+e+'">')}function u(){this.englishMonths=[31,28,31,30,31,30,31,31,30,31,30,31],this.englishLeapMonths=[31,29,31,30,31,30,31,31,30,31,30,31],this.nepaliMonths=[[30,32,31,32,31,30,30,30,29,30,29,31],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[30,32,31,32,31,30,30,30,29,30,29,31],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[31,31,31,32,31,31,29,30,30,29,29,31],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[31,31,31,32,31,31,29,30,30,29,30,30],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[31,31,31,32,31,31,29,30,30,29,30,30],[31,31,32,31,31,31,30,29,30,29,30,30],[31,32,31,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,30,29,31],[31,31,31,32,31,31,30,29,30,29,30,30],[31,31,32,31,31,31,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,30],[31,32,31,32,31,30,30,30,29,30,29,31],[31,31,31,32,31,31,30,29,30,29,30,30],[31,31,32,31,31,31,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[30,32,31,32,31,30,30,30,29,30,29,31],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,31,32,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[30,32,31,32,31,30,30,30,29,30,29,31],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[30,32,31,32,31,31,29,30,30,29,29,31],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[31,31,31,32,31,31,29,30,30,29,30,30],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[31,31,31,32,31,31,29,30,30,29,30,30],[31,31,32,31,31,31,30,29,30,29,30,30],[31,32,31,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[31,31,31,32,31,31,30,29,30,29,30,30],[31,31,32,31,31,31,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,30],[31,32,31,32,31,30,30,30,29,30,29,31],[31,31,31,32,31,31,30,29,30,29,30,30],[31,31,32,31,31,31,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,30],[31,32,31,32,31,30,30,30,29,30,29,31],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,31,32,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[30,32,31,32,31,30,30,30,29,30,29,31],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[30,32,31,32,31,31,29,30,29,30,29,31],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[31,31,31,32,31,31,29,30,30,29,29,31],[31,31,32,31,31,31,30,29,30,29,30,30],[31,31,32,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[31,31,31,32,31,31,29,30,30,29,30,30],[31,31,32,31,31,31,30,29,30,29,30,30],[31,32,31,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,31],[31,31,31,32,31,31,30,29,30,29,30,30],[31,31,32,31,31,31,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,30],[31,32,31,32,31,30,30,30,29,30,29,31],[31,31,31,32,31,31,30,29,30,29,30,30],[31,31,32,31,31,31,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,29,30,30],[31,31,32,32,31,30,30,30,29,30,30,30],[30,32,31,32,31,30,30,30,29,30,30,30],[31,31,32,31,31,30,30,30,29,30,30,30],[31,31,32,31,31,30,30,30,29,30,30,30],[31,32,31,32,30,31,30,30,29,30,30,30],[30,32,31,32,31,30,30,30,29,30,30,30],[31,31,32,31,31,31,30,30,29,30,30,30],[30,31,32,32,30,31,30,30,29,30,30,30],[30,32,31,32,31,30,30,30,29,30,30,30],[30,32,31,32,31,30,30,30,29,30,30,30],[31,31,32,31,31,31,30,30,29,30,30,30],[30,31,32,32,31,30,30,30,29,30,30,30],[30,32,31,32,31,30,30,30,29,30,30,30],[31,31,32,31,31,30,30,30,29,30,30,30],[31,31,32,31,31,31,30,29,30,30,30,30],[30,31,32,32,31,30,30,29,30,29,30,30],[31,32,31,32,31,30,30,30,29,30,30,30],[31,31,32,31,31,31,29,30,29,30,29,31],[31,31,32,31,31,31,30,29,29,30,30,30]],this.setCurrentDate=function(){var e=new Date;this.setEnglishDate(e.getFullYear(),e.getMonth()+1,e.getDate())},this.setEnglishDate=function(e,t,a){if(!this.isEnglishRange(e,t,a))throw new Exception("Invalid date format.");this.englishYear=e,this.englishMonth=t,this.englishDate=a,this.nepaliYear=2e3,this.nepaliMonth=1,this.nepaliDate=1;for(var n=this.getEnglishDateDifference(1943,4,14),i=0;n>=this.nepaliYearDays(i);)this.nepaliYear++,n-=this.nepaliYearDays(i),i++;for(var s=0;n>=this.nepaliMonths[i][s];)n-=this.nepaliMonths[i][s],this.nepaliMonth++,s++;this.nepaliDate=this.nepaliDate+n,this.getDay()},this.toEnglishString=function(e){return void 0===e&&(e="-"),this.englishYear+e+this.englishMonth+e+this.englishDate},this.getEnglishDateDifference=function(e,t,a){var n=this.countTotalEnglishDays(this.englishYear,this.englishMonth,this.englishDate)-this.countTotalEnglishDays(e,t,a);return n<0?-n:n},this.countTotalEnglishDays=function(e,t,a){for(var n=365*e+a,i=0;i<t-1;i++)n+=this.englishMonths[i];return n+=this.countleap(e,t),n},this.countleap=function(e,t){return t<=2&&e--,Math.floor(e/4)-Math.floor(e/100)+Math.floor(e/400)},this.isEnglishRange=function(e,t,a){return!(e<1944||e>2042)&&(!(t<1||t>12)&&!(a<1||a>31))},this.isLeapYear=function(e){return e%4==0&&(e%100!=0||e%400==0)},this.setNepaliDate=function(e,t,a){if(this.isNepaliRange(e,t,a)){this.nepaliYear=e,this.nepaliMonth=t,this.nepaliDate=a,this.englishYear=1944,this.englishMonth=1,this.englishDate=1;for(var n=this.getNepaliDateDifference(2e3,9,17);n>=(this.isLeapYear(this.englishYear)?366:365);)n-=this.isLeapYear(this.englishYear)?366:365,this.englishYear++;for(var i=this.isLeapYear(this.englishYear)?this.englishLeapMonths:this.englishMonths,s=0;n>=i[s];)this.englishMonth++,n-=i[s],s++;this.englishDate=this.englishDate+n,this.getDay()}else console.log("Invalid Date Format")},this.toNepaliString=function(e){return void 0===e&&(e="-"),this.nepaliYear+e+this.nepaliMonth+e+this.nepaliDate},this.getNepaliDateDifference=function(e,t,a){var n=this.countTotalNepaliDays(this.nepaliYear,this.nepaliMonth,this.nepaliDate)-this.countTotalNepaliDays(e,t,a);return n<0?-n:n},this.countTotalNepaliDays=function(e,t,a){var n=0;if(e<2e3)return 0;n+=a-1;for(var i=e-2e3,s=0;s<t-1;s++)n+=this.nepaliMonths[i][s];for(s=0;s<i;s++)n+=this.nepaliYearDays(s);return n},this.nepaliYearDays=function(e){for(var t=0,a=0;a<12;a++)t+=this.nepaliMonths[e][a];return t},this.isNepaliRange=function(e,t,a){return!(e<2e3||e>2098)&&(!(t<1||t>12)&&!(a<1||a>this.nepaliMonths[e-2e3][t-1]))},this.getDay=function(){var e=this.getEnglishDateDifference(1943,4,14);return this.weekDay=(3+e%7)%7+1,this.weekDay},this.getEnglishYear=function(){return this.englishYear},this.getEnglishMonth=function(){return this.englishMonth},this.getEnglishDate=function(){return this.englishDate},this.getNepaliYear=function(){return this.nepaliYear},this.getNepaliMonth=function(){return this.nepaliMonth},this.getNepaliDate=function(){return this.nepaliDate}}var _=e("body"),f=new u;f.setCurrentDate();let g=f.nepaliYear+"-"+f.nepaliMonth+"-"+f.nepaliDate;e("form").attr("autocomplete","off");var v,m=1,y="",D=f.getNepaliYear(),M=f.getNepaliMonth(),k=2e3,Y=2098,E=0,w="",C="",N="",b=[],I="",x="",T="win",j="",L="";e.fn.nepaliDatePicker=function(){navigator.platform.toUpperCase().indexOf("MAC")>=0&&(T="mac"),e(this).each(function(){let t=e(this);e(this).attr("data-cal_id","cal-"+m),m++,e(this).addClass("andp-date-picker");var a=e.trim(e(this).attr("value"));let n=e(this).data("single");if(E=1==n||1==n?1:0,a&&!E){L=e(this).parents("form"),y=e(this).data("cal_id"),x=e(this).attr("name");let n=a.split(",");if(n.forEach(function(e,t){c(e.trim())}),1!=e(this).data("show_all_dates"))n.length>1?output_msg=n.length+" dates selected":output_msg=n[0],e(this).attr("value",output_msg);else if(!t.is("input")){let e=a.split(","),n="<span>"+e.join("</span><span>")+"</span>";t.append(n)}}}),e(this).click(function(){if(v=this,b=[],j=e(this),data_single=e(this).data("single"),E=1==data_single||1==data_single?1:0,y=e(this).data("cal_id"),n(this),E)selected_date=t(e(this).val()),selected_date.length>0?(older_date_ar=selected_date.split("-"),C.val(older_date_ar[1]).change(),w.val(older_date_ar[0]).change(),p(selected_date)):p(g,!0);else{L=e(this).parents("form"),x=e(this).attr("name"),x?e(this).removeAttr("name","").attr("data-name",x):x=e(this).attr("data-name");var a=e('input.andp-hidden-dates[data-cal_id="'+y+'"]'),i=a.length;if(i>0)if(1==i)selected_date=t(a.eq(0).val()),older_date_ar=selected_date.split("-"),C.val(older_date_ar[1]).change(),w.val(older_date_ar[0]).change(),p(selected_date);else{older_date=e('input.andp-hidden-dates[data-cal_id="'+y+'"]');let n=older_date.length;older_date=t(older_date.eq(n-1).val()),older_date&&older_date.length>0&&(older_date_ar=older_date.split("-"),C.val(older_date_ar[1]).change(),w.val(older_date_ar[0]).change()),a.each(function(){let a=t(e(this).val());p(a)})}else p(g,!0)}}),_.on("change",".andp-month-select, .andp-year-select",function(){l()})},_.on("click",".andp-datepicker-container.open .andp-change-months",function(t){selected_month=parseInt(C.val()),selected_year=parseInt(w.val()),e(this).hasClass("andp-next")?(selected_month+=1,selected_month>12&&(selected_month=1,selected_year+=1,selected_year>Y&&(selected_year=Y,selected_month=12))):(selected_month-=1,selected_month<1&&(selected_month=12,selected_year-=1,selected_year<k&&(selected_year=k,selected_month=1))),selected_month<10&&(selected_month="0"+selected_month),selected_year<10&&(selected_year="0"+selected_year),C.val(selected_month).change(),w.val(selected_year).change()}),_.on("click",".andp-datepicker-container.open .andp-days-numbers .day",function(t){selected_day=e(this).text(),selected_date=e(this).data("date");var n=e('.andp-datepicker-container[data-cal_id="'+y+'"]');if(E)b=[],n.find(".andp-column .day").removeClass("selected"),p(selected_date),n.find(".andp-info").hide(),a();else if(t.shiftKey){var s=b.length;if(s>0){selected_date=e(this).data("date"),I=b[s-1];var l=h(selected_date,I)?I:selected_date,d=l,c=o(selected_date,I);for(b=[],n.find(".andp-column .day").removeClass("selected"),p(d),i=1;i<=c;i++)d=r(d),p(d)}}else t.ctrlKey||t.metaKey?p(selected_date):(b=[],n.find(".andp-column .day").removeClass("selected"),p(selected_date),n.find(".andp-info").show());e("document").trigger("andp_date_selected",[b,v])}),_.on("click",function(t){var a=e(".andp-datepicker-container, .andp-date-picker");a.is(t.target)||0!==a.has(t.target).length||e(".andp-datepicker-container").removeClass("open").hide()}),_.on("click",".andp-datepicker-container.open .apply-date",function(){a()})})(jQuery);
|
import constants from '../constants';
// import { AsyncStorage, } from 'react-web';
// import customSettings from '../content/config/settings.json';
// import Immutable from 'immutable';
const dynamic = {
setDynamicData(prop, value) {
return {
type: constants.dynamic.SET_DYNAMIC_DATA,
payload: { prop, value, },
};
},
setSocket(socket) {
return {
type: constants.dynamic.SET_SOCKET,
payload: { socket, },
};
},
};
export default dynamic;
|
var working = false;
$('#login').on('submit',function() {
e.preventDefault();
if (working) return;
working = true;
var $this = $(this),
$state = $this.find('button > .state');
$this.addClass('loading');
$state.html('Authenticating');
setTimeout(function() {
setTimeout(function() {
$state.html('Log in');
$this.removeClass('loading');
working = false;
}, 4000);
}, 3000);
});
|
from collections import defaultdict
import os
import numpy as np
INPUT = os.path.join(os.path.dirname(__file__), "input.txt")
with open(INPUT) as f:
lines = f.readlines()
polymer = lines[0].rstrip()
insertion_rules = {}
for l in lines[2:]:
k, v = l.rstrip().split(" -> ")
insertion_rules[k] = v
# Part 1:
def grow_polymer(p):
new_p = ""
for i in range(len(p)-1):
pair = p[i:i+2]
if pair in insertion_rules:
new_p += pair[0] + insertion_rules[pair]
else:
new_p += pair[0]
new_p += p[-1]
return new_p
for i in range(10):
polymer = grow_polymer(polymer)
polymer_arr = np.array(list(polymer))
symbols, counts = np.unique(polymer_arr, return_counts=True)
print(np.max(counts) - np.min(counts))
# Part 2 - gotta go big
polymer = lines[0].rstrip()
pair_counter = defaultdict(int)
for pair in zip(polymer, polymer[1:]):
pair_counter["".join(pair)] += 1
def grow_polymer_dict(pair_c):
new_pair_c = defaultdict(int)
for pair, count in pair_c.items():
if pair in insertion_rules:
insert = insertion_rules[pair]
new_pair_c[pair[0]+insert] += count
new_pair_c[insert+pair[1]] += count
else:
new_pair_c[pair] += count
return new_pair_c
def count_chars(pair_c):
char_dict = defaultdict(int)
for pair, count in pair_c.items():
char_dict[pair[0]] += count
char_dict[pair[1]] += count
for char, count in char_dict.items():
char_dict[char] = np.ceil(count / 2)
return char_dict
for i in range(40):
pair_counter = grow_polymer_dict(pair_counter)
char_counts = list(count_chars(pair_counter).values())
print(np.max(char_counts) - np.min(char_counts))
|
# n = n
# time = O(1)
# space = O(1)
# done time = 15m
class Solution:
def minPartitions(self, n: str) -> int:
return int(max(n))
|
#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
void usage(){
fprintf(stderr,"tone <frequency_Hz>,[<amplitude>] [<frequency_Hz>,[<amplitude>]...]\n");
exit(1);
}
int main (int argc,char *argv[]){
int i,j;
double *f;
double *amp;
if(argc<2)usage();
f=alloca(sizeof(*f)*(argc-1));
amp=alloca(sizeof(*amp)*(argc-1));
i=0;
while(argv[i+1]){
char *pos=strchr(argv[i+1],',');
f[i]=atof(argv[i+1]);
if(pos)
amp[i]=atof(pos+1)*32767.f;
else
amp[i]=32767.f;
fprintf(stderr,"%g Hz, %g amp\n",f[i],amp[i]);
i++;
}
for(i=0;i<44100*10;i++){
float val=0;
int ival;
for(j=0;j<argc-1;j++)
val+=amp[j]*sin(i/44100.f*f[j]*2*M_PI);
ival=rint(val);
if(ival>32767.f)ival=32767.f;
if(ival<-32768.f)ival=-32768.f;
fprintf(stdout,"%c%c%c%c",
(char)(ival&0xff),
(char)((ival>>8)&0xff),
(char)(ival&0xff),
(char)((ival>>8)&0xff));
}
return(0);
}
|
{"version":3,"sources":["calendar-search.js"],"names":["window","Search","calendar","data","this","util","filterId","minSearchStringLength","showCounters","counters","id","className","pluralMessageId","value","invitation","filter","BX","Main","filterManager","getById","filterApi","getApi","addCustomEvent","delegate","applyFilter","prototype","getFilter","updateCounters","i","_this","cleanNode","countersCont","countersWrap","appendChild","create","props","length","text","message","attrs","data-bx-counter","html","getMessagePlural","events","click","counter","appplyCounterEntries","innerHTML","counterId","setFilter","preset_id","beforeFilterApply","isFilterEmpty","ctx","promise","params","autoResolve","getView","resetFilterMode","resetSearchFilter","fulfill","setView","animation","setTimeout","applyFilterMode","request","action","handler","response","entries","filterMode","displaySearchResult","push","BXEventCalendar","Entry","displayResult","type","isPlainObject","undefined","searchField","getSearch","getLastSquare","getSearchString","searchInput","resetFilter"],"mappings":"CAAC,SAAUA,GAEV,SAASC,EAAOC,EAAUC,GAEzBC,KAAKF,SAAWA,EAChBE,KAAKC,KAAOD,KAAKF,SAASG,KAC1BD,KAAKE,SAAWH,EAAKG,SACrBF,KAAKG,sBAAwB,EAE7BH,KAAKI,aAAe,MACpBJ,KAAKK,WAEHC,GAAI,aACJC,UAAW,8BACXC,gBAAiB,wBACjBC,MAAOV,EAAKM,SAASK,YAAc,IAUrCV,KAAKW,OAASC,GAAGC,KAAKC,cAAcC,QAAQf,KAAKE,UACjD,GAAIF,KAAKW,OACT,CACCX,KAAKgB,UAAYhB,KAAKW,OAAOM,SAE7BL,GAAGM,eAAe,uBAAwBN,GAAGO,SAASnB,KAAKoB,YAAapB,QAI1EH,EAAOwB,WACNC,UAAW,WAEV,OAAOtB,KAAKW,QAGbY,eAAgB,WAEf,IAAIC,EAAGC,EAAQzB,KAEfA,KAAKI,aAAe,MAEpBQ,GAAGc,UAAU1B,KAAKF,SAAS6B,cAC3B3B,KAAK4B,aAAe5B,KAAKF,SAAS6B,aAAaE,YAAYjB,GAAGkB,OAAO,OAAQC,OAAQxB,UAAW,6BAEhG,IAAKiB,EAAI,EAAGA,EAAIxB,KAAKK,SAAS2B,OAAQR,IACtC,CACC,GAAIxB,KAAKK,SAASmB,IAAMxB,KAAKK,SAASmB,GAAGf,MAAQ,EACjD,CACCT,KAAKI,aAAe,KACpB,OAIF,GAAIJ,KAAKI,aACT,CACCJ,KAAK4B,aAAaC,YAAYjB,GAAGkB,OAAO,QACvCC,OAAQxB,UAAW,8BACnB0B,KAAMrB,GAAGsB,QAAQ,oBAAsB,OAGxC,IAAKV,EAAI,EAAGA,EAAIxB,KAAKK,SAAS2B,OAAQR,IACtC,CACC,GAAIxB,KAAKK,SAASmB,IAAMxB,KAAKK,SAASmB,GAAGf,MAAQ,EACjD,CACCT,KAAK4B,aAAaC,YAAYjB,GAAGkB,OAAO,QACvCC,OAAQxB,UAAW,6BAA+B,IAAMP,KAAKK,SAASmB,GAAGjB,WACzE4B,OAAQC,kBAAmBpC,KAAKK,SAASmB,GAAGlB,IAC5C+B,KAAM,wCACL,yCAA2CrC,KAAKK,SAASmB,GAAGf,MAAQ,UACpE,uCAAyCT,KAAKC,KAAKqC,iBAAiBtC,KAAKK,SAASmB,GAAGhB,gBAAgBR,KAAKK,SAASmB,GAAGf,OAAS,UAChI,UACA8B,QACCC,MAAO,SAAWC,GAEjB,OAAO,WAENhB,EAAMiB,qBAAqBD,EAAQnC,KAJ9B,CAMJN,KAAKK,SAASmB,cAOtB,CACCxB,KAAK4B,aAAae,UAAY/B,GAAGsB,QAAQ,oBAI3CQ,qBAAsB,SAASE,GAE9B,GAAIA,GAAa,aACjB,CACC5C,KAAKgB,UAAU6B,WACdC,UAAW,uCAKdC,kBAAmB,WAElB,IAAK/C,KAAKgD,gBACV,IAWD5B,YAAa,SAASd,EAAIP,EAAMkD,EAAKC,EAASC,GAG7C,GAAIA,EACJ,CACCA,EAAOC,YAAc,MAEtB,GAAIpD,KAAKgD,gBACT,CACC,GAAIhD,KAAKF,SAASuD,UAAUC,gBAC5B,CACCtD,KAAKF,SAASuD,UAAUC,iBAAiBC,kBAAmB,QAE7D,GAAIL,EACJ,CACCA,EAAQM,eAIV,CACCxD,KAAKF,SAAS2D,QAAQ,QAASC,UAAW,QAC1CC,WAAW/C,GAAGO,SAAS,WAEtBnB,KAAKF,SAASuD,UAAUO,mBACtB5D,MAAO,KAEVA,KAAKF,SAAS+D,SACb9D,MACC+D,OAAQ,mBAETC,QAASnD,GAAGO,SAAS,SAAS6C,GAE7B,GAAIA,GAAYA,EAASC,QACzB,CACC,IAAKjE,KAAKF,SAASuD,UAAUa,WAC7B,CACCP,WAAW/C,GAAGO,SAAS,WAEtBnB,KAAKF,SAASuD,UAAUO,kBACxB5D,KAAKmE,oBAAoBH,IACvBhE,MAAO,SAGX,CACCA,KAAKmE,oBAAoBH,IAI3B,GAAId,EACJ,CACCA,EAAQM,YAEPxD,UAKNmE,oBAAqB,SAASH,GAE7B,IAAIxC,EAAGyC,KACP,IAAKzC,EAAI,EAAGA,EAAIwC,EAASC,QAAQjC,OAAQR,IACzC,CACCyC,EAAQG,KAAK,IAAIxE,EAAOyE,gBAAgBC,MAAMtE,KAAKF,SAAUkE,EAASC,QAAQzC,KAE/ExB,KAAKF,SAASuD,UAAUkB,cAAcN,GAEtC,GAAIrD,GAAG4D,KAAKC,cAAcT,EAAS3D,UACnC,CACC,IAAKmB,EAAI,EAAGA,EAAIxB,KAAKK,SAAS2B,OAAQR,IACtC,CACC,GAAIwC,EAAS3D,SAASL,KAAKK,SAASmB,GAAGlB,MAAQoE,UAC/C,CACC1E,KAAKK,SAASmB,GAAGf,MAAQuD,EAAS3D,SAASL,KAAKK,SAASmB,GAAGlB,KAAO,GAGrEN,KAAKuB,mBAIPyB,cAAe,WAEd,IAAI2B,EAAc3E,KAAKW,OAAOiE,YAC9B,OAAQD,EAAYE,mBAAqBF,EAAYG,mBAAqBH,EAAYG,kBAAkB9C,OAAShC,KAAKG,wBAGvH4E,YAAa,aAIbC,YAAa,WAEZhF,KAAKW,OAAOqE,gBAId,GAAIpF,EAAOyE,gBACX,CACCzE,EAAOyE,gBAAgBxE,OAASA,MAGjC,CACCe,GAAGM,eAAetB,EAAQ,wBAAyB,WAElDA,EAAOyE,gBAAgBxE,OAASA,MA9NlC,CAiOED","file":""}
|
import Typography from 'typography';
import sutroTheme from 'typography-theme-sutro';
import { css } from 'styled-components';
import { theme } from './theme';
sutroTheme.overrideThemeStyles = () => ({
a: { color: theme.colors.primary },
});
const typography = new Typography(sutroTheme);
// Hot reload typography in development.
if (process.env.NODE_ENV !== 'production') {
typography.injectStyles();
}
// Styled components wrapper for typography.scale
export function scale(amount = 1) {
const s = typography.scale(amount);
return css`
font-size: ${s.fontSize};
line-height: ${s.lineHeight};
`;
}
export const { options, rhythm } = typography;
export default typography;
|
from __future__ import unicode_literals
from unittest import TestCase
from pandagg.query import (
Terms,
Term,
Fuzzy,
Exists,
Ids,
Prefix,
Range,
Regexp,
TermsSet,
Wildcard,
)
class TermLevelQueriesTestCase(TestCase):
def test_fuzzy_clause(self):
body = {"user": {"value": "ki"}}
expected = {"fuzzy": body}
q1 = Fuzzy(field="user", value="ki")
q2 = Fuzzy(user="ki")
q3 = Fuzzy(user={"value": "ki"})
for q in (q1, q2, q3):
self.assertEqual(q.body, body)
self.assertEqual(q.serialize(), expected)
self.assertEqual(q.line_repr(depth=None), 'fuzzy, field=user, value="ki"')
def test_exists_clause(self):
body = {"field": "user"}
expected = {"exists": body}
q = Exists(field="user")
self.assertEqual(q.body, body)
self.assertEqual(q.serialize(), expected)
self.assertEqual(q.line_repr(depth=None), "exists, field=user")
def test_ids_clause(self):
body = {"values": [1, 4, 100]}
expected = {"ids": body}
q = Ids(values=[1, 4, 100])
self.assertEqual(q.body, body)
self.assertEqual(q.serialize(), expected)
self.assertEqual(q.line_repr(depth=None), "ids, values=[1, 4, 100]")
def test_prefix_clause(self):
body = {"user": {"value": "ki"}}
expected = {"prefix": body}
q = Prefix(field="user", value="ki")
self.assertEqual(q.body, body)
self.assertEqual(q.serialize(), expected)
self.assertEqual(q.line_repr(depth=None), 'prefix, field=user, value="ki"')
def test_range_clause(self):
body = {"age": {"gte": 10, "lte": 20, "boost": 2}}
expected = {"range": body}
q1 = Range(field="age", gte=10, lte=20, boost=2)
q2 = Range(age={"gte": 10, "lte": 20, "boost": 2})
for q in (q1, q2):
self.assertEqual(q.body, body)
self.assertEqual(q.serialize(), expected)
self.assertEqual(
q.line_repr(depth=None), "range, field=age, boost=2, gte=10, lte=20"
)
def test_regexp_clause(self):
body = {
"user": {
"value": "k.*y",
"flags": "ALL",
"max_determinized_states": 10000,
"rewrite": "constant_score",
}
}
expected = {"regexp": body}
tag = 'regexp, field=user, flags="ALL", max_determinized_states=10000, rewrite="constant_score", value="k.*y"'
q1 = Regexp(
field="user",
value="k.*y",
flags="ALL",
max_determinized_states=10000,
rewrite="constant_score",
)
q2 = Regexp(
user={
"value": "k.*y",
"flags": "ALL",
"max_determinized_states": 10000,
"rewrite": "constant_score",
}
)
for q in (q1, q2):
self.assertEqual(q.body, body)
self.assertEqual(q.serialize(), expected)
self.assertEqual(q.line_repr(depth=None), tag)
def test_term_clause(self):
body = {"user": {"value": "Kimchy", "boost": 1}}
expected = {"term": body}
q1 = Term(field="user", value="Kimchy", boost=1)
q2 = Term(user={"value": "Kimchy", "boost": 1})
for q in (q1, q2):
self.assertEqual(q.body, body)
self.assertEqual(q.serialize(), expected)
self.assertEqual(
q.line_repr(depth=None), 'term, field=user, boost=1, value="Kimchy"'
)
# other format
q3 = Term(user="Kimchy")
self.assertEqual(q3.body, {"user": {"value": "Kimchy"}})
self.assertEqual(q3.serialize(), {"term": {"user": {"value": "Kimchy"}}})
self.assertEqual(q3.line_repr(depth=None), 'term, field=user, value="Kimchy"')
def test_terms_clause(self):
# note: != syntax than term (...), the "boost" parameter is at same level that "user"
body = {"user": ["kimchy", "elasticsearch"], "boost": 1}
expected = {"terms": body}
q = Terms(user=["kimchy", "elasticsearch"], boost=1)
self.assertEqual(q.body, body)
self.assertEqual(q.serialize(), expected)
self.assertEqual(
q.line_repr(depth=None), 'terms, boost=1, user=["kimchy", "elasticsearch"]',
)
def test_terms_set_clause(self):
body = {
"programming_languages": {
"terms": ["c++", "java", "php"],
"minimum_should_match_field": "required_matches",
}
}
expected = {"terms_set": body}
q1 = TermsSet(
field="programming_languages",
terms=["c++", "java", "php"],
minimum_should_match_field="required_matches",
)
q2 = TermsSet(
programming_languages={
"terms": ["c++", "java", "php"],
"minimum_should_match_field": "required_matches",
}
)
for q in (q1, q2):
self.assertEqual(q.body, body)
self.assertEqual(q.serialize(), expected)
self.assertEqual(
q.line_repr(depth=None),
'terms_set, field=programming_languages, minimum_should_match_field="required_matches", terms=["c++", "java", "php"]',
)
def test_wildcard_clause(self):
body = {"user": {"value": "ki*y", "boost": 1.0, "rewrite": "constant_score"}}
expected = {"wildcard": body}
q1 = Wildcard(field="user", value="ki*y", boost=1.0, rewrite="constant_score")
q2 = Wildcard(user={"value": "ki*y", "boost": 1.0, "rewrite": "constant_score"})
for q in (q1, q2):
self.assertEqual(q.body, body)
self.assertEqual(q.serialize(), expected)
self.assertEqual(
q.line_repr(depth=None),
'wildcard, field=user, boost=1.0, rewrite="constant_score", value="ki*y"',
)
|
"""
Django settings for DjangoBlogClone project.
Generated by 'django-admin startproject' using Django 1.11.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATES_DIR = os.path.join(BASE_DIR, 'blog/templates/blog')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^e3-976l1g!1#kv!xeckp%jcn#hpg*gs9h^e#rks2q5jff2k^9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'DjangoBlogClone.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATES_DIR, ],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'DjangoBlogClone.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
LOGIN_REDIRECT_URL = '/'
|
const util = require("../util");
const modes = ["none", "track", "queue"];
const aliases = {
single: "track",
track: "track",
song: "track",
this: "track",
current: "track",
all: "queue",
every: "queue",
queue: "queue",
off: "none",
none: "none",
nothing: "none"
};
module.exports = {
name: "loop",
aliases: ["repeat"],
exec: (msg, args) => {
const { music } = msg.guild;
if (!music.player) return msg.channel.send(util.embed().setDescription("<a:unchecked:865931554650718249> Currently not playing anything."));
if (args[0]) {
if (!msg.member.voice.channel)
return msg.channel.send(util.embed().setDescription("<a:unchecked:865931554650718249> You must be on a voice channel."));
if (msg.guild.me.voice.channel && !msg.guild.me.voice.channel.equals(msg.member.voice.channel))
return msg.channel.send(util.embed().setDescription(`<a:unchecked:865931554650718249> You must be on ${msg.guild.me.voice.channel} to use this command.`));
const loopMode = aliases[args[0].toLowerCase()];
if (loopMode && modes.includes(loopMode)) {
music.loop = modes.indexOf(loopMode);
msg.channel.send(util.embed().setDescription(music.loop === 0 ? "<a:certo:865931553628356629> Loop disabled." : `<a:certo:865931553628356629> Set loop to ${modes[music.loop]}.`));
} else {
msg.channel.send(
util.embed()
.setDescription("<a:unchecked:865931554650718249> Invalid loop mode. Try single/all/off.")
);
}
} else {
msg.channel.send(util.embed().setDescription(`<a:certo:865931553628356629> Current loop mode: ${modes[music.loop]}`));
}
}
};
|
/* local config */
const ETH_NODE_URL= 'http://127.0.0.1:8545'
const FORCE_BRIDGER_SERVER_URL = 'http://127.0.0.1:3003' //update to your force server url
const CKB_INDEXER_URL= 'http://127.0.0.1:8116'
const NODE_URL = 'http://127.0.0.1:8114/' //update to your node url
const RichCKBPrivkey = "0xa6b023fec4fc492c23c0e999ab03b01a6ca5524a3560725887a8de4362f9c9cc";
const RichETHPrivkey = '0xc4ad657963930fbff2e9de3404b30a4e21432c89952ed430b56bf802945ed37a' //update with your own private key
const recipientETHAddress = '0x17c4b5CE0605F63732bfd175feCe7aC6b4620FD2'//orig; bob:'0xBeB7C1d39B59DF17613F82AF0EC265565414d608'
const ETH_TOKEN_ADDRESS = '0x0000000000000000000000000000000000000000'
// Test config
const DAI_TOKEN_ADDRESS = '0xC4401D8D5F05B958e6f1b884560F649CdDfD9615'
const USDT_TOKEN_ADDRESS = '0x1cf98d2a2f5b0BFc365EAb6Ae1913C275bE2618F'
const USDC_TOKEN_ADDRESS = '0x1F0D2251f51b88FaFc90f06F7022FF8d82154B1a'
const TokenLockerAddress = '0x4347818B33aaf0b442A977900585B9ad1e1B581F'
// lock params
const bridgeFee = '0x0'
const isBid = false;
// unlock params
const unlockFee = "0x1"
const unlockAmount = "0x2"
const burnTxFee = "0.1"
const ORDERBOOK_LOCK_CODEHASH = '0x279bee9fa98959029766c0e0ce19cd91b7180fd15b600a9e95140149b524c53b'
const ORDERBOOK_LOCK_TYPE = 'type'
const PW_LOCK_CODEHASH = '0x58c5f491aba6d61678b7cf7edf4910b1f5e00ec0cde2f42e0abb4fd9aff25a63'
const PW_LOCK_HASHTYPE = 'type'
const userPWEthLock = {
codeHash: PW_LOCK_CODEHASH,
hashType: PW_LOCK_HASHTYPE,
args: recipientETHAddress,
};
const lumos_db_tmp = "lumos_db_tmp/"
// const path = require('path')
// const LUMOS_DB = path.join(lumos_db_tmp, 'lumos_db')
const LUMOS_DB = ""
module.exports = {
ETH_NODE_URL,
FORCE_BRIDGER_SERVER_URL,
NODE_URL,
RichETHPrivkey,
userPWEthLock,
bridgeFee,
isBid,
unlockFee,
unlockAmount,
burnTxFee,
ORDERBOOK_LOCK_CODEHASH,
ORDERBOOK_LOCK_TYPE,
recipientETHAddress,
RichCKBPrivkey,
lumos_db_tmp,
LUMOS_DB,
CKB_INDEXER_URL,
DAI_TOKEN_ADDRESS,
USDT_TOKEN_ADDRESS,
USDC_TOKEN_ADDRESS,
ETH_TOKEN_ADDRESS,
TokenLockerAddress,
}
|
import React, { useState, useCallback } from "react"
import { Youtube } from "../embeds"
import { viewport } from "../../lib/infinite-util.js"
import { dragging, wheeling } from "../../pages/compose"
import {
HoverButtons,
Crosshair,
selection,
shouldHide,
inspectorForElement,
} from "./common"
const InfiniteYoutube = ({ context, scale, x, y, id, selected, ...save }) => {
const { viewportX, viewportY } = viewport(x, y, context)
const [isHovering, setIsHovering] = useState(false)
const [loaded, setLoaded] = useState(false)
const [options, setOptions] = useState(
save.options
? save.options
: {
scale: 1 / scale,
src: "",
}
)
const pushStateToCanvas = useCallback(
opts => {
context.saveElement(id, { options: opts ? opts : options })
},
[id, context, options]
)
const onClick = e => selection(e, id, context, selected)
if (shouldHide(id, context)) return null
return (
<>
{inspectorForElement(
id,
context,
selected,
options,
setOptions,
pushStateToCanvas
)}
<HoverButtons
id={id}
scale={scale}
context={context}
hovering={isHovering}
setHovering={setIsHovering}
dragging={dragging}
viewportX={viewportX}
viewportY={viewportY}
adjustX={20}
adjustY={20}
options={options}
/>
<div
style={{
position: "fixed",
top: viewportY,
left: viewportX,
width: 340,
height: 240,
transform: `scale(${context.zoom.scale / scale})`,
transformOrigin: "top left",
}}
onMouseEnter={e => setIsHovering(true)}
onMouseLeave={e => setIsHovering(false)}
onClick={onClick}
>
<Youtube
style={{
position: "fixed",
top: 20,
left: 20,
}}
src={options.src}
id={`youtube-${id}`}
onLoad={e => setLoaded(true)}
/>
{!loaded ? (
<p
style={{
position: "absolute",
color: "grey",
fontFamily: "sans-serif",
top: 100,
left: 100,
zIndex: -1,
}}
>
loading
</p>
) : null}
</div>
</>
)
}
export default InfiniteYoutube
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import packaging.version
import pkg_resources
import google.auth # type: ignore
import google.api_core # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.servicecontrol_v1.types import quota_controller
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
'google-cloud-service-control',
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
try:
# google.auth.__version__ was added in 1.26.0
_GOOGLE_AUTH_VERSION = google.auth.__version__
except AttributeError:
try: # try pkg_resources if it is available
_GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
except pkg_resources.DistributionNotFound: # pragma: NO COVER
_GOOGLE_AUTH_VERSION = None
class QuotaControllerTransport(abc.ABC):
"""Abstract transport class for QuotaController."""
AUTH_SCOPES = (
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/servicecontrol',
)
DEFAULT_HOST: str = 'servicecontrol.googleapis.com'
def __init__(
self, *,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ':' not in host:
host += ':443'
self._host = host
scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive")
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file,
**scopes_kwargs,
quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id)
# If the credentials are service account credentials, then always try to use self signed JWT.
if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
# TODO(busunkim): This method is in the base transport
# to avoid duplicating code across the transport classes. These functions
# should be deleted once the minimum required versions of google-auth is increased.
# TODO: Remove this function once google-auth >= 1.25.0 is required
@classmethod
def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]:
"""Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
scopes_kwargs = {}
if _GOOGLE_AUTH_VERSION and (
packaging.version.parse(_GOOGLE_AUTH_VERSION)
>= packaging.version.parse("1.25.0")
):
scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
else:
scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
return scopes_kwargs
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.allocate_quota: gapic_v1.method.wrap_method(
self.allocate_quota,
default_timeout=None,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def allocate_quota(self) -> Callable[
[quota_controller.AllocateQuotaRequest],
Union[
quota_controller.AllocateQuotaResponse,
Awaitable[quota_controller.AllocateQuotaResponse]
]]:
raise NotImplementedError()
__all__ = (
'QuotaControllerTransport',
)
|
//
// WWCommandEyeRing.h
// APIObjectiveC
//
// Created by Kevin Liang on 3/31/14.
// Copyright (c) 2014 Wonder Workshop inc. (https://www.makewonder.com/) All rights reserved.
//
#import "WWCommand.h"
/**
* `WWCommandEyeRing` objects instruct a `WWRobot` how to display its eye pattern.
*
* For ledBitmap, the mapping represents an analog clock, where index 0 represents the light at 12 o'clock,
* index 1 represents the light at 1 o'clock, index 2 represents the light at 2 o'clock, and so on.
*/
@interface WWCommandEyeRing : WWCommand
/**
* The filename for the default eye animations to play.
*/
@property (nonatomic, strong) NSString *animationFile;
/**
* The normalized brightness value of the light, between [WW_LIGHT_BRIGHTNESS_MIN, WW_LIGHT_BRIGHTNESS_MAX].
*/
@property (nonatomic) double brightness;
/**
* The eye bitmap pattern to display, represented as an array of booleans.
*/
@property (nonatomic, strong) NSArray *ledBitmap;
/**
* Initializes the command with specific eye animation.
*
* @param animationFile The filename of the eye animation.
*
* @return Returns a newly initialized `WWCommandEyeRing` instance.
*/
- (id) initWithAnimation:(NSString *)animationFile;
/**
* Initializes the command with a specific eye bitmap pattern.
*
* @param bitmap The bitmap pattern represented as array of booleans.
*
* @return Returns a newly initialized `WWCommandEyeRing` instance.
*/
- (id) initWithBitmap:(NSArray *)bitmap;
/**
* Sets the index of the ledBitmap to the specified value.
*
* @param on The boolean value that specifies whether the light is on.
* @param index The index of the light position.
*/
- (void) setLEDValue:(BOOL)on atIndex:(NSUInteger)index;
/**
* Returns the led value at the specified index.
*
* If index is out of bound (index >= ledCount), this method returns false.
*
* @param index The index of the light position.
*
* @return The boolean value that specifies whether the light is on.
*/
- (BOOL) LEDValueAtIndex:(NSUInteger)index;
/**
* Convenience method that sets all the lights to the specified value.
*
* @param on The boolean value that specifies if the light should be on.
*/
- (void) setAllBitmap:(BOOL)on;
@end
|
/*
* This header is generated by classdump-dyld 1.5
* on Wednesday, April 28, 2021 at 9:10:55 PM Mountain Standard Time
* Operating System: Version 14.5 (Build 18L204)
* Image Source: /System/Library/PrivateFrameworks/AppleMediaServices.framework/AppleMediaServices
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by Elias Limneos. Updated by Kevin Bradley.
*/
@protocol AMSRequestPresentationDelegate <NSObject>
@required
-(void)handleAuthenticateRequest:(id)arg1 completion:(/*^block*/id)arg2;
-(void)handleDialogRequest:(id)arg1 completion:(/*^block*/id)arg2;
@end
|
//
// OATurnDrawable.h
// OsmAnd
//
// Created by Alexey Kulish on 02/11/2017.
// Copyright © 2017 OsmAnd. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "OATurnPathHelper.h"
@interface OATurnDrawable : UIView
//@property (nonatomic) Paint paintBlack;
//@property (nonatomic) Paint paintRouteDirection;
@property (nonatomic) UIBezierPath *pathForTurn;
@property (nonatomic) UIBezierPath *pathForTurnOutlay;
@property (nonatomic, readonly) std::shared_ptr<TurnType> turnType;
@property (nonatomic) int turnImminent;
@property (nonatomic) BOOL deviatedFromRoute;
@property (nonatomic) UIFont *textFont;
@property (nonatomic) UIColor *textColor;
@property (nonatomic) UIColor *clr;
@property (nonatomic) CGPoint centerText;
- (instancetype) initWithMini:(BOOL)mini;
- (BOOL) setTurnType:(std::shared_ptr<TurnType>)turnType;
- (void) setTurnImminent:(int)turnImminent deviatedFromRoute:(BOOL)deviatedFromRoute;
@end
|
from mrp.process_def import process
from mrp.runtime.conda import Conda
from mrp.runtime.docker import Docker
from mrp.runtime.host import Host
from mrp.util import NoEscape
from importlib.machinery import SourceFileLoader
import click
import os
@click.group()
def cli():
pass
def main(*args):
try:
cli(*args)
except SystemExit as sys_exit:
if sys_exit.code == 0:
return
raise RuntimeError(
f"mrp.main failed with exit code {sys_exit.code}"
) from sys_exit
except Exception as ex:
click.echo(ex, err=True)
class cmd:
pass
# Register all commands dynamically into the cli and cmd classes
# to allow execution directly without cli+argv.
#
# TODO(lshamis): Maybe do something similar for runtimes.
#
# For example:
# mrp.cmd.up(procs=["foo"])
#
# sys.argv = ["foo"]
# mrp.main()
this_file_path = os.path.dirname(os.path.realpath(__file__))
cmds_path = os.path.join(this_file_path, "cmd")
for cmd_file in os.listdir(cmds_path):
if not cmd_file.startswith("_") and cmd_file.endswith(".py"):
cmd_path = os.path.join(cmds_path, cmd_file)
cmd_name = cmd_file[: -len(".py")]
module = SourceFileLoader(cmd_name, cmd_path).load_module()
setattr(cmd, cmd_name, module.cli.callback)
cli.add_command(module.cli, cmd_name)
__all__ = ["main", "process", "NoEscape", "Docker", "Conda", "Host", "cmd"]
|
# MIT License
#
# Copyright (c) 2020 Arkadiusz Netczuk <dev.arnet@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import os
import logging
try:
from PyQt5 import uic
except ImportError:
### No module named <name>
logging.exception("Exception while importing")
exit(1)
import littleted.defs as defs
def generateUIFileNameFromClassName(classFileName):
baseName = os.path.basename(classFileName)
nameTuple = os.path.splitext(baseName)
return nameTuple[0] + ".ui"
def loadUi(uiFilename):
try:
return uic.loadUiType( os.path.join( defs.ROOT_DIR, "ui", uiFilename ) )
except Exception as e:
print("Exception while loading UI file:", uiFilename, e)
raise
def loadUiFromClassName(uiFilename):
ui_file = generateUIFileNameFromClassName(uiFilename)
return loadUi( ui_file )
|
# import pandas as pd
import sys
# from os import listdir
# from os.path import isfile,
# from os.path import join
import os
import subprocess
import numpy as np
import pandas as pd
# from datetime import datetime
# for sklearn package
from sklearn.model_selection import KFold
from sklearn.model_selection import cross_val_score
# for label models
from snorkel.labeling.model import LabelModel
from snorkel.labeling.model import MajorityLabelVoter
from sklearn import metrics
# for labeling functions
from snorkel.labeling import PandasLFApplier
from snorkel.labeling import LFAnalysis
sys.path.insert(1, '../utils')
from tools import append_tsv_bydf, create_folder
from utils import get_metric_funcs_list
sys.path.insert(1, '../models')
from obtain_label_functions import get_all_lfs
from train_classifiers_to_pickles import train_model_Kfold
sys.path.insert(1, '../dataprocess')
# from pretrained_label_functions import *
from prepare_dataset import prepare_data_for_model, get_data_for_cross_fold_test
from arg_parser import get_lfdict_and_parser
funcs, metrics_names = get_metric_funcs_list()
if(not os.path.exists("term_output")):
create_folder("term_output")
training_log_path = os.path.join("term_output","snorkel_training_log.tsv")
def save_performance_tologfile(model_metrics, lfs_names):
new_metrics_names=metrics_names.copy()
new_metrics_names[-1]=metrics_names[-1][:-1]
log_cols=["Trial_No","LF_Type", "LFs"]+new_metrics_names
if(os.path.exists(training_log_path)):
trial_no = int(
subprocess.check_output(['tail', '-1', training_log_path]
).decode("utf-8").split('\t')[0])+1
else:
trial_no=1
model_results=[]
for lf_type, metric in zip(
["Label Model","Majority Label Voter"], model_metrics
):
confusion_flatten = f'[{", ".join(map(str, metric[-1].flatten()))}]'
metric[-1] = confusion_flatten
model_results.append([trial_no, lf_type,lfs_names,*metric])
result_df=pd.DataFrame(model_results, columns=log_cols)
append_tsv_bydf(result_df,training_log_path)
print(f"Save Performance of Trial {trial_no} into {training_log_path}")
def get_selected_lf_list(rearchive):
lfname_lf_dict = get_all_lfs(rearchive=rearchive)
index_lfname_dict, parser = get_lfdict_and_parser()
args = parser.parse_args()
index_list = list(map(lambda s: s.strip(),args.label_function.split(',')))
lfname_list = [index_lfname_dict[id] for id in index_list]
print('selected label functions:')
for index, lfname in zip(index_list, lfname_list):
print(f'--{index}: {lfname}')
lf_list = [lfname_lf_dict[lfname] for lfname in lfname_list]
return lf_list
def run_snorkel_LF(lf_list, trainData, testData, logged=False):
num_classes = 2
## snorkel pandas applier
applier = PandasLFApplier(lfs=lf_list)
L_train = applier.apply(df=trainData)
print("\n")
print("Labeling Function Analysis on train dataset")
print(f"{LFAnalysis(L_train, lf_list).lf_summary()}")
L_test = applier.apply(df=testData)
### label model ###
label_model = LabelModel(cardinality=num_classes, verbose=True)
label_model.fit(L_train=L_train, n_epochs=500,
lr=0.001, log_freq=100, seed=42)
majority_model = MajorityLabelVoter(cardinality=num_classes)
# print(majority_model.predict(L_test))
# weights of labeling functions used
# label_model_weights = np.around(label_model.get_weights(), 2)
metrics_list=[]
for model, model_name in zip(
[label_model, majority_model],
["Label Model", "Majority Label Voter"]):
normal_labels = model.predict(L_test)
normal_labels=list(map(lambda x: 0 if x==-1 else x,normal_labels))
# print(normal_labels)
print("\n","#"*60,f"\n{model_name}\n", "#"*60)
metrics = [func(testData.label, normal_labels) for func in funcs]
for metrics_name, metric in zip(metrics_names, metrics):
print(metrics_name, metric)
metrics_list.append(metrics)
if(logged):
save_performance_tologfile(metrics_list, [lf.name for lf in lf_list])
# from sklearn.model_selection import StratifiedKFold
# def cross_fold_test(trainData, testData, n_splits=5):
# allData=pd.concat([trainData, testData],axis=0).set_index('patient')
# scv=StratifiedKFold(n_splits, random_state=42, shuffle=True)
# X=allData['summary'].copy()
# y=allData['label'].copy()
# data_dict={}
# # dict_id=0
# for (train_index, test_index), dict_id in zip(scv.split(X, y), list(range(n_splits))):
# train_data=pd.DataFrame({'summary':X[train_index],'label':y[train_index]})
# test_Data=pd.DataFrame({'summary':X[test_index],'label':y[test_index]})
# data_dict[f'train_{dict_id:int}']=trainData
# tes
# data_dictprint(dict_id)
# for train_index, test_index, dict_id in zip(scv.split(allData),:
# train_data_list.append(allData[train_index])
# test_data_list.append(allData[test_index])
# # X_train, X_test= X[train_index], X[test_index]
# # y_train, y_test= y[train_index], y[test_index]
# print(train_data_list[0], train_data_list[1])
# print(test_data_list[0],test_data_list[1])
def main():
lf_list = get_selected_lf_list(rearchive=False)
# TODO: SAVE TRAINING LOG
# trainData, testData = prepare_data_for_model(True) # get data for training models
# run_snorkel_LF(lf_list, trainData, testData,logged=True)
# cross_fold_test(trainData,testData)
'''
contain num_splits of keys and values
key: cv{id}, id from 0 to num_splits-1
value:{'train':train_data,'test':test_data,'tf_idf':tfidf_pack}
tfidf=[tfidf_train_data, tfidf_test_data, trainData.label, testData.label]
'''
# data_dict=get_data_for_cross_fold_test(5)
#
# train_model_Kfold(data_dict) ## train models for loading pickles if pickle files are not created
cv_results=pd.read_csv('../models/term_output/cv_results.csv')
print(cv_results.mean(axis=0))
# print(data_dict['cv0']['train'])
### Majority Label Voter ###
# print("\n","#"*60,"\nMajority Label Voter\n", "#"*60)
# define model
# majority_labels = majority_model.predict(L_test)
# for func, metrics_name in zip(funcs, metrics_names):
# print(metrics_name, func(testData.label, majority_labels))
if __name__ == '__main__':
# main(args)
main()
|
// Copyright (c) 2021, Element Labs and contributors
// For license information, please see license.txt
frappe.ui.form.on('Unallocated items', {
// refresh: function(frm) {
// }
});
|
# -*- coding: utf-8 -*-
"""Some utility functions."""
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
#
# License: BSD (3-clause)
from collections.abc import Iterable
import os
import os.path as op
import logging
import tempfile
from threading import Thread
import time
import numpy as np
from .check import _check_option
from .config import get_config
from ._logging import logger
class ProgressBar(object):
"""Generate a command-line progressbar.
Parameters
----------
iterable : iterable | int | None
The iterable to use. Can also be an int for backward compatibility
(acts like ``max_value``).
initial_value : int
Initial value of process, useful when resuming process from a specific
value, defaults to 0.
mesg : str
Message to include at end of progress bar.
max_total_width : int | str
Maximum total message width. Can use "auto" (default) to try to set
a sane value based on the current terminal width.
max_value : int | None
The max value. If None, the length of ``iterable`` will be used.
**kwargs : dict
Additional keyword arguments for tqdm.
"""
def __init__(self, iterable=None, initial_value=0, mesg=None,
max_total_width='auto', max_value=None,
**kwargs): # noqa: D102
# The following mimics this, but with configurable module to use
# from ..externals.tqdm import auto
from ..externals import tqdm
which_tqdm = get_config('MNE_TQDM', 'tqdm.auto')
_check_option('MNE_TQDM', which_tqdm[:5], ('tqdm', 'tqdm.'),
extra='beginning')
logger.debug(f'Using ProgressBar with {which_tqdm}')
if which_tqdm != 'tqdm':
tqdm = getattr(tqdm, which_tqdm.split('.', 1)[1])
tqdm = tqdm.tqdm
defaults = dict(
leave=True, mininterval=0.016, miniters=1, smoothing=0.05,
bar_format='{percentage:3.0f}%|{bar}| {desc} : {n_fmt}/{total_fmt} [{elapsed}<{remaining}, {rate_fmt:>11}{postfix}]', # noqa: E501
)
for key, val in defaults.items():
if key not in kwargs:
kwargs.update({key: val})
if isinstance(iterable, Iterable):
self.iterable = iterable
if max_value is None:
self.max_value = len(iterable)
else:
self.max_value = max_value
else: # ignore max_value then
self.max_value = int(iterable)
self.iterable = None
if max_total_width == 'auto':
max_total_width = None # tqdm's auto
with tempfile.NamedTemporaryFile('wb', prefix='tmp_mne_prog') as tf:
self._mmap_fname = tf.name
del tf # should remove the file
self._mmap = None
disable = logger.level > logging.INFO
self._tqdm = tqdm(
iterable=self.iterable, desc=mesg, total=self.max_value,
initial=initial_value, ncols=max_total_width,
disable=disable, **kwargs)
def update(self, cur_value):
"""Update progressbar with current value of process.
Parameters
----------
cur_value : number
Current value of process. Should be <= max_value (but this is not
enforced). The percent of the progressbar will be computed as
``(cur_value / max_value) * 100``.
"""
self.update_with_increment_value(cur_value - self._tqdm.n)
def update_with_increment_value(self, increment_value):
"""Update progressbar with an increment.
Parameters
----------
increment_value : int
Value of the increment of process. The percent of the progressbar
will be computed as
``(self.cur_value + increment_value / max_value) * 100``.
"""
self._tqdm.update(increment_value)
def __iter__(self):
"""Iterate to auto-increment the pbar with 1."""
for x in self._tqdm:
yield x
def subset(self, idx):
"""Make a joblib-friendly index subset updater.
Parameters
----------
idx : ndarray
List of indices for this subset.
Returns
-------
updater : instance of PBSubsetUpdater
Class with a ``.update(ii)`` method.
"""
return _PBSubsetUpdater(self, idx)
def __enter__(self): # noqa: D105
# This should only be used with pb.subset and parallelization
if op.isfile(self._mmap_fname):
os.remove(self._mmap_fname)
# prevent corner cases where self.max_value == 0
self._mmap = np.memmap(self._mmap_fname, bool, 'w+',
shape=max(self.max_value, 1))
self.update(0) # must be zero as we just created the memmap
# We need to control how the pickled bars exit: remove print statements
self._thread = _UpdateThread(self)
self._thread.start()
return self
def __exit__(self, type_, value, traceback): # noqa: D105
# Restore exit behavior for our one from the main thread
self.update(self._mmap.sum())
self._tqdm.close()
self._thread._mne_run = False
self._thread.join()
self._mmap = None
if op.isfile(self._mmap_fname):
os.remove(self._mmap_fname)
def __del__(self):
"""Ensure output completes."""
if getattr(self, '_tqdm', None) is not None:
self._tqdm.close()
class _UpdateThread(Thread):
def __init__(self, pb):
super(_UpdateThread, self).__init__(daemon=True)
self._mne_run = True
self._mne_pb = pb
def run(self):
while self._mne_run:
self._mne_pb.update(self._mne_pb._mmap.sum())
time.sleep(1. / 30.) # 30 Hz refresh is plenty
class _PBSubsetUpdater(object):
def __init__(self, pb, idx):
self.mmap = pb._mmap
self.idx = idx
def update(self, ii):
self.mmap[self.idx[ii - 1]] = True
|
webpackJsonp([102],{
/***/ 263:
/***/ (function(module, exports, __webpack_require__) {
var disposed = false
var normalizeComponent = __webpack_require__(16)
/* script */
var __vue_script__ = __webpack_require__(357)
/* template */
var __vue_template__ = __webpack_require__(461)
/* template functional */
var __vue_template_functional__ = false
/* styles */
var __vue_styles__ = null
/* scopeId */
var __vue_scopeId__ = null
/* moduleIdentifier (server only) */
var __vue_module_identifier__ = null
var Component = normalizeComponent(
__vue_script__,
__vue_template__,
__vue_template_functional__,
__vue_styles__,
__vue_scopeId__,
__vue_module_identifier__
)
Component.options.__file = "resources\\assets\\js\\admin\\views\\mt\\pack\\index.vue"
/* hot reload */
if (false) {(function () {
var hotAPI = require("vue-hot-reload-api")
hotAPI.install(require("vue"), false)
if (!hotAPI.compatible) return
module.hot.accept()
if (!module.hot.data) {
hotAPI.createRecord("data-v-0af345ba", Component.options)
} else {
hotAPI.reload("data-v-0af345ba", Component.options)
}
module.hot.dispose(function (data) {
disposed = true
})
})()}
module.exports = Component.exports
/***/ }),
/***/ 357:
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", { value: true });
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
/* harmony default export */ __webpack_exports__["default"] = ({
components: {
'tinymce': function tinymce() {
return __webpack_require__.e/* import() */(61/* duplicate */).then(__webpack_require__.bind(null, 312));
}
},
data: function data() {
return {
dateOptions: this.$store.state.dateOptions,
tinyOptions: {
'height': 300
},
timeOptions: {
format: 'hh:mm a',
useCurrent: false,
showClear: true,
showClose: true
},
formType: false,
pack: {
status: 1,
provider_id: '',
services: [],
service_id: '',
sim_pack_id: '',
name: '',
price: '',
desc: '',
pack_type: 1,
data_type: 1,
offer_type: 1,
to_take: '',
taking_way: null,
order: null,
source: '',
pack_data: [{ data: null, data_type: '', network: 0, validity: null, validity_type: '', period_from: '', period_to: '', comment: '', is_bonus: 0, policy: null }]
},
packConst: {
status: 1,
provider_id: '',
services: [],
service_id: '',
sim_pack_id: '',
name: '',
price: '',
desc: '',
pack_type: 1,
data_type: 1,
offer_type: 1,
to_take: '',
taking_way: null,
order: null,
source: '',
pack_data: [{ data: null, data_type: '', network: 0, validity: null, validity_type: '', period_from: '', period_to: '', comment: '', is_bonus: 0, policy: null }]
},
packSearch: {
provider_id: '',
sim_pack_id: '',
status: '',
date_from: '',
date_to: '',
order_by: ''
},
packs: {},
providers: [],
sim_packs: [],
type: this.$route.params.type,
perPage: this.$route.query.per_page,
perPageOptions: this.$store.state.perPageOptions,
selected: [],
selectAll: false,
noData: '',
preloader: true
};
},
watch: {
perPage: function perPage(val) {
var page = this.$route.query.page;
this.$router.push({ path: document.location.search, query: { page: page, per_page: val } });
this.getLists();
},
'$route.params.type': function $routeParamsType(type) {
this.type = type;
this.getLists();
}
},
created: function created() {
/* Fix tinymce popup modal input typing problem */
$(document).on('focusin', function (e) {
if ($(e.target).closest(".mce-window").length || $(e.target).closest(".moxman-window").length) {
e.stopImmediatePropagation();
}
});
this.getLists();
var per_page = this.$route.query.per_page;
var perPageOptions = this.perPageOptions.find(function (ele) {
return ele.value == per_page;
});
if (!perPageOptions) {
this.perPageOptions.splice(0, 0, { text: per_page, value: per_page });
}
},
methods: {
pack_title: function pack_title() {
switch (this.type) {
case 'net':
this.pack.pack_type = 1;
this.packConst.pack_type = 1;
return 'Internet';
break;
case 'min':
this.pack.pack_type = 2;
this.packConst.pack_type = 2;
return 'Minute';
break;
case 'sms':
this.pack.pack_type = 3;
this.packConst.pack_type = 3;
return 'SMS';
break;
case 'bundle':
this.pack.pack_type = 4;
this.packConst.pack_type = 4;
return 'Bundle';
break;
case 'sim':
this.pack.pack_type = 5;
this.packConst.pack_type = 5;
return 'Sim';
break;
}
},
addData: function addData(index) {
this.pack.pack_data.splice(index + 1, 0, { data: null, data_type: '', network: 0, validity: null, validity_type: '', period_from: '', period_to: '', comment: '', is_bonus: 0, policy: null });
},
removeData: function removeData(index) {
if (this.pack.pack_data.length == 1) {
alert(this.$store.state.needOne);
} else {
this.pack.pack_data.splice(index, 1);
}
},
select: function select() {
this.selected = [];
if (!this.selectAll) {
for (var key in this.packs.data) {
this.selected.push(this.packs.data[key].id);
}
}
},
providerChanged: function providerChanged() {
if (this.pack.provider_id.services) {
this.pack.service_id = this.pack.provider_id.services[0];
this.pack.services = this.pack.provider_id.services;
}
},
dataSearch: function dataSearch() {
var data_search = this.packSearch;
var query = {};
data_search.date_from = data_search.date_from ? data_search.date_from.toString() : '';
data_search.date_to = data_search.date_to ? data_search.date_to.toString() : '';
for (var key in data_search) {
if (data_search[key]) {
query[key] = key != 'provider_id' && key != 'sim_pack_id' ? data_search[key] : data_search[key]['id'];
} else {
if (this.$route.query[key]) {
query[key] = '';
}
}
}
this.$router.push({ path: document.location.search, query: query });
this.getLists();
},
getLists: function getLists() {
var page = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 1;
var click = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : false;
var app = this;
var per_page = this.$route.query.per_page;
switch (this.type) {
case 'net':
var pack_type = 1;
break;
case 'min':
var pack_type = 2;
break;
case 'sms':
var pack_type = 3;
break;
case 'bundle':
var pack_type = 4;
break;
case 'sim':
var pack_type = 5;
break;
}
var current_page = click ? page : this.$route.query.page;
if (click) {
this.$router.push({ path: document.location.search, query: { page: current_page, per_page: per_page } });
}
var allQuery = this.$route.query;
var queryString = Object.keys(allQuery).map(function (key) {
return key + '=' + allQuery[key];
}).join('&');
queryString += '&pack_type=' + pack_type;
axios.get('/api/v1/mt/packs?' + queryString).then(function (resp) {
app.preloader = false;
app.packs = resp.data.packs;
app.providers = resp.data.providers;
app.sim_packs = resp.data.sim_packs;
app.sim_packs_const = resp.data.sim_packs;
if (app.packs.data.length == 0) {
app.noData = app.$store.state.noData;
} else {
app.noData = '';
}
}).catch(function (resp) {
app.$store.commit('errorMessages', resp);
});
},
newForm: function newForm() {
this.formType = true;
this.pack = this.packConst;
},
saveForm: function saveForm() {
var _this = this;
$('#createModal').modal('toggle');
var app = this;
var newPackage = app.pack;
if (this.formType) {
axios.post('/api/v1/mt/packs', newPackage).then(function (resp) {
_this.pack = _this.packConst;
app.$store.commit('submitMsg', 'add');
app.getLists();
}).catch(function (resp) {
app.$store.commit('errorMessages', resp);
});
} else {
axios.patch('/api/v1/mt/packs/' + newPackage.id, newPackage).then(function (resp) {
app.$store.commit('submitMsg', 'update');
app.getLists();
}).catch(function (resp) {
app.$store.commit('errorMessages', resp);
});
}
},
editForm: function editForm(pack) {
this.formType = false;
this.pack = pack;
if (!this.pack.desc) this.pack.desc = '';
if (pack.provider_id.services) {
this.pack.services = pack.provider_id.services;
}
},
deleteEntry: function deleteEntry(id, index) {
var app = this;
if (confirm(app.$store.state.confirmDel)) {
axios.delete('/api/v1/mt/packs/' + id).then(function (resp) {
app.packs.data.splice(index, 1);
app.$store.commit('submitMsg', 'delete');
}).catch(function (resp) {
app.$store.commit('errorMessages', resp);
});
}
},
deleteSelected: function deleteSelected() {
var app = this;
if (app.selected.length == 0) {
app.$store.commit('noSelectedMsg');
return;
}
if (confirm(app.$store.state.confirmDel)) {
var selectedItem = this.selected.toString();
axios.delete('/api/v1/mt/packs/' + selectedItem).then(function (resp) {
app.$store.commit('submitMsg', 'selectedDelete');
app.selectAll = false;
app.selected = [];
app.getLists();
}).catch(function (resp) {
app.$store.commit('errorMessages', resp);
});
}
},
lastChecked: function lastChecked() {
var app = this;
if (confirm('Are you sure to update?')) {
var data = {
ids: this.selected
};
axios.post('/api/v1/mt/lastchecked', data).then(function (resp) {
app.$store.commit('submitMsg', 'update');
app.selectAll = false;
app.selected = [];
app.getLists();
}).catch(function (resp) {
app.$store.commit('errorMessages', resp);
});
}
}
} //methods
//export default
});
/***/ }),
/***/ 461:
/***/ (function(module, exports, __webpack_require__) {
var render = function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("div", { staticClass: "component" }, [
_c(
"div",
{ staticClass: "component-heading clearfix" },
[
_c("h2", { staticClass: "heading float-left" }, [
_vm._v(
"\n " +
_vm._s(_vm.pack_title()) +
" Package Lists\n "
)
]),
_vm._v(" "),
_c("back-forward")
],
1
),
_vm._v(" "),
_c("div", { staticClass: "component-body" }, [
_c(
"div",
{
staticClass: "modal fade",
attrs: {
id: "createModal",
tabindex: "-1",
role: "dialog",
"aria-labelledby": "ModalLongTitle",
"aria-hidden": "true"
}
},
[
_c(
"div",
{
staticClass: "modal-dialog modal-lg",
attrs: { role: "document" }
},
[
_c("div", { staticClass: "modal-content" }, [
_c("div", { staticClass: "modal-header" }, [
_c(
"h5",
{
staticClass: "modal-title",
attrs: { id: "ModalLongTitle" }
},
[
_vm._v(
_vm._s(_vm.formType ? "New" : "Update") +
" " +
_vm._s(_vm.pack_title()) +
" Package"
)
]
),
_vm._v(" "),
_vm._m(0)
]),
_vm._v(" "),
_c(
"form",
{
on: {
submit: function($event) {
$event.preventDefault()
_vm.saveForm()
}
}
},
[
_c("div", { staticClass: "modal-body" }, [
_c("div", { staticClass: "form-row" }, [
_c("div", { staticClass: "col-md-12 mb-3" }, [
_c("label", { attrs: { for: "status" } }, [
_vm._v("Status")
]),
_vm._v(" "),
_c("label", { staticClass: "switch" }, [
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.pack.status,
expression: "pack.status"
}
],
attrs: { type: "checkbox" },
domProps: {
checked: Array.isArray(_vm.pack.status)
? _vm._i(_vm.pack.status, null) > -1
: _vm.pack.status
},
on: {
change: function($event) {
var $$a = _vm.pack.status,
$$el = $event.target,
$$c = $$el.checked ? true : false
if (Array.isArray($$a)) {
var $$v = null,
$$i = _vm._i($$a, $$v)
if ($$el.checked) {
$$i < 0 &&
_vm.$set(
_vm.pack,
"status",
$$a.concat([$$v])
)
} else {
$$i > -1 &&
_vm.$set(
_vm.pack,
"status",
$$a
.slice(0, $$i)
.concat($$a.slice($$i + 1))
)
}
} else {
_vm.$set(_vm.pack, "status", $$c)
}
}
}
}),
_vm._v(" "),
_c("span", { staticClass: "slider round" })
])
]),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-md-6 col-lg-4 mb-3" },
[
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Select Providers")
]),
_vm._v(" "),
_c("multiselect", {
attrs: {
"allow-empty": false,
"deselect-label": "",
"select-label": "",
options: _vm.providers,
"preserve-search": true,
label: "name",
"track-by": "name",
"preselect-first": false
},
on: {
input: function($event) {
_vm.providerChanged()
}
},
scopedSlots: _vm._u([
{
key: "singleLabel",
fn: function(props) {
return [
props.option.logo_small_src
? [
_c("img", {
staticClass: "option__image",
attrs: {
src:
"/uploads/imgs/" +
props.option.logo_small_src
.resized_name,
alt: props.option.name
}
})
]
: _vm._e(),
_vm._v(" "),
_c(
"span",
{ staticClass: "option__title" },
[
_vm._v(
" " + _vm._s(props.option.name)
)
]
)
]
}
},
{
key: "option",
fn: function(props) {
return [
props.option.logo_small_src
? [
_c("img", {
staticClass: "option__image",
attrs: {
src:
"/uploads/imgs/" +
props.option.logo_small_src
.resized_name,
alt: props.option.name
}
})
]
: _vm._e(),
_vm._v(" "),
_c(
"span",
{ staticClass: "option__title" },
[
_vm._v(
" " + _vm._s(props.option.name)
)
]
)
]
}
}
]),
model: {
value: _vm.pack.provider_id,
callback: function($$v) {
_vm.$set(_vm.pack, "provider_id", $$v)
},
expression: "pack.provider_id"
}
})
],
1
),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-md-6 col-lg-4 mb-3" },
[
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Service")
]),
_vm._v(" "),
_c("multiselect", {
attrs: {
"allow-empty": false,
"deselect-label": "",
"select-label": "",
options: _vm.pack.services,
"preserve-search": true,
label: "name",
"track-by": "name",
"preselect-first": false
},
scopedSlots: _vm._u([
{
key: "singleLabel",
fn: function(props) {
return [
_c(
"span",
{ staticClass: "option__title" },
[
_vm._v(
" " + _vm._s(props.option.name)
)
]
)
]
}
},
{
key: "option",
fn: function(props) {
return [
_c(
"span",
{ staticClass: "option__title" },
[
_vm._v(
" " + _vm._s(props.option.name)
)
]
)
]
}
}
]),
model: {
value: _vm.pack.service_id,
callback: function($$v) {
_vm.$set(_vm.pack, "service_id", $$v)
},
expression: "pack.service_id"
}
})
],
1
),
_vm._v(" "),
_vm.type != "sim"
? _c(
"div",
{ staticClass: "col-md-6 col-lg-4 mb-3" },
[
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Select Sim Package")
]),
_vm._v(" "),
_c("multiselect", {
attrs: {
"deselect-label": "",
"select-label": "",
options: _vm.sim_packs,
"preserve-search": true,
label: "name",
"track-by": "name",
"preselect-first": false
},
scopedSlots: _vm._u([
{
key: "tag",
fn: function(props) {
return [
_c("span", [
_vm._v(_vm._s(props.option.name))
]),
_vm._v(" "),
_c(
"span",
{
on: {
click: function($event) {
props.remove(props.option)
}
}
},
[_vm._v("x")]
)
]
}
}
]),
model: {
value: _vm.pack.sim_pack_id,
callback: function($$v) {
_vm.$set(_vm.pack, "sim_pack_id", $$v)
},
expression: "pack.sim_pack_id"
}
})
],
1
)
: _vm._e(),
_vm._v(" "),
_c(
"div",
{
staticClass: "col-md-6 col-lg-4 mb-3 outline-group"
},
[
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.pack.name,
expression: "pack.name"
}
],
staticClass: "form-control outline",
attrs: {
placeholder: " ",
type: "text",
id: "title"
},
domProps: { value: _vm.pack.name },
on: {
input: function($event) {
if ($event.target.composing) {
return
}
_vm.$set(
_vm.pack,
"name",
$event.target.value
)
}
}
}),
_vm._v(" "),
_c("label", { attrs: { for: "title" } }, [
_vm._v("Title")
])
]
),
_vm._v(" "),
_c(
"div",
{
staticClass: "col-md-6 col-lg-4 mb-3 outline-group"
},
[
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.pack.price,
expression: "pack.price"
}
],
staticClass: "form-control outline",
attrs: {
placeholder: " ",
type: "text",
id: "price"
},
domProps: { value: _vm.pack.price },
on: {
input: function($event) {
if ($event.target.composing) {
return
}
_vm.$set(
_vm.pack,
"price",
$event.target.value
)
}
}
}),
_vm._v(" "),
_c("label", { attrs: { for: "price" } }, [
_vm._v("Price")
])
]
),
_vm._v(" "),
_vm.pack.pack_type == 1 || _vm.pack.pack_type == 4
? _c(
"div",
{ staticClass: "col-md-6 col-lg-4 mb-3" },
[
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Data Type")
]),
_vm._v(" "),
_c(
"select",
{
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.pack.data_type,
expression: "pack.data_type"
}
],
staticClass: "form-control",
attrs: { id: "data_type" },
on: {
change: function($event) {
var $$selectedVal = Array.prototype.filter
.call($event.target.options, function(
o
) {
return o.selected
})
.map(function(o) {
var val =
"_value" in o ? o._value : o.value
return val
})
_vm.$set(
_vm.pack,
"data_type",
$event.target.multiple
? $$selectedVal
: $$selectedVal[0]
)
}
}
},
[
_c("option", { attrs: { value: "1" } }, [
_vm._v("All")
]),
_vm._v(" "),
_c("option", { attrs: { value: "0" } }, [
_vm._v("Social")
])
]
)
]
)
: _vm._e(),
_vm._v(" "),
_c("div", { staticClass: "col-md-6 col-lg-4 mb-3" }, [
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Offer Type")
]),
_vm._v(" "),
_c(
"select",
{
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.pack.offer_type,
expression: "pack.offer_type"
}
],
staticClass: "form-control",
attrs: { id: "offer_type" },
on: {
change: function($event) {
var $$selectedVal = Array.prototype.filter
.call($event.target.options, function(o) {
return o.selected
})
.map(function(o) {
var val =
"_value" in o ? o._value : o.value
return val
})
_vm.$set(
_vm.pack,
"offer_type",
$event.target.multiple
? $$selectedVal
: $$selectedVal[0]
)
}
}
},
[
_c("option", { attrs: { value: "1" } }, [
_vm._v("All")
]),
_vm._v(" "),
_c("option", { attrs: { value: "0" } }, [
_vm._v("Off Sim")
]),
_vm._v(" "),
_c("option", { attrs: { value: "2" } }, [
_vm._v("New Sim")
])
]
)
]),
_vm._v(" "),
_c(
"div",
{
staticClass: "col-md-6 col-lg-4 mb-3 outline-group"
},
[
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.pack.to_take,
expression: "pack.to_take"
}
],
staticClass: "form-control outline",
attrs: {
placeholder: " ",
type: "text",
id: "to_take"
},
domProps: { value: _vm.pack.to_take },
on: {
input: function($event) {
if ($event.target.composing) {
return
}
_vm.$set(
_vm.pack,
"to_take",
$event.target.value
)
}
}
}),
_vm._v(" "),
_c("label", { attrs: { for: "to_take" } }, [
_vm._v("How to Take")
])
]
),
_vm._v(" "),
_c("div", { staticClass: "col-md-6 col-lg-4 mb-3" }, [
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Taking Way")
]),
_vm._v(" "),
_c(
"select",
{
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.pack.taking_way,
expression: "pack.taking_way"
}
],
staticClass: "form-control",
attrs: { id: "taking_way" },
on: {
change: function($event) {
var $$selectedVal = Array.prototype.filter
.call($event.target.options, function(o) {
return o.selected
})
.map(function(o) {
var val =
"_value" in o ? o._value : o.value
return val
})
_vm.$set(
_vm.pack,
"taking_way",
$event.target.multiple
? $$selectedVal
: $$selectedVal[0]
)
}
}
},
[
_c("option", { domProps: { value: null } }, [
_vm._v("All")
]),
_vm._v(" "),
_c("option", { attrs: { value: "1" } }, [
_vm._v("Recharge")
]),
_vm._v(" "),
_c("option", { attrs: { value: "2" } }, [
_vm._v("Dial")
])
]
)
]),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-md-4 mb-3 outline-group" },
[
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.pack.order,
expression: "pack.order"
}
],
staticClass: "form-control outline",
attrs: {
placeholder: " ",
type: "number",
id: "order"
},
domProps: { value: _vm.pack.order },
on: {
input: function($event) {
if ($event.target.composing) {
return
}
_vm.$set(
_vm.pack,
"order",
$event.target.value
)
}
}
}),
_vm._v(" "),
_c("label", { attrs: { for: "order" } }, [
_vm._v("Serial")
])
]
),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-md-8 mb-3 outline-group" },
[
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.pack.source,
expression: "pack.source"
}
],
staticClass: "form-control outline",
attrs: {
placeholder: " ",
type: "text",
id: "source"
},
domProps: { value: _vm.pack.source },
on: {
input: function($event) {
if ($event.target.composing) {
return
}
_vm.$set(
_vm.pack,
"source",
$event.target.value
)
}
}
}),
_vm._v(" "),
_c("label", { attrs: { for: "source" } }, [
_vm._v("Source From")
])
]
),
_vm._v(" "),
_c(
"div",
{ staticClass: "mt-number-list" },
[
_c("br"),
_vm._v(" "),
_vm._l(_vm.pack.pack_data, function(net, key) {
return _c("div", { staticClass: "mt-numbers" }, [
_c("div", { staticClass: "mt-title" }, [
_vm._v(
"\n Data and network\n "
)
]),
_vm._v(" "),
_c("div", { staticClass: "mt-control" }, [
_c(
"span",
{
staticClass: "btn btn-primary btn-sm",
on: {
click: function($event) {
_vm.addData(key)
}
}
},
[_c("i", { staticClass: "icon-plus" })]
),
_vm._v(" "),
_c(
"span",
{
staticClass: "btn btn-danger btn-sm",
on: {
click: function($event) {
_vm.removeData(key)
}
}
},
[_c("i", { staticClass: "icon-minus" })]
)
]),
_vm._v(" "),
_c("div", { staticClass: "form-row" }, [
_c(
"div",
{
staticClass:
"col-lg-4 mb-3 mt-40 mar-b-25"
},
[
_c("div", { staticClass: "form-row" }, [
_c(
"div",
{
staticClass: "col-6 outline-group"
},
[
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: net.data,
expression: "net.data"
}
],
staticClass:
"form-control outline",
attrs: {
placeholder: " ",
type: "text",
id: "data" + key
},
domProps: { value: net.data },
on: {
input: function($event) {
if ($event.target.composing) {
return
}
_vm.$set(
net,
"data",
$event.target.value
)
}
}
}),
_vm._v(" "),
_c(
"label",
{ attrs: { for: "data" + key } },
[_vm._v("Data")]
)
]
),
_vm._v(" "),
_c("div", { staticClass: "col-6" }, [
_c(
"label",
{ staticClass: "o-s-l" },
[_vm._v("Select")]
),
_vm._v(" "),
_c(
"select",
{
directives: [
{
name: "model",
rawName: "v-model",
value: net.data_type,
expression: "net.data_type"
}
],
staticClass: "form-control",
on: {
change: function($event) {
var $$selectedVal = Array.prototype.filter
.call(
$event.target.options,
function(o) {
return o.selected
}
)
.map(function(o) {
var val =
"_value" in o
? o._value
: o.value
return val
})
_vm.$set(
net,
"data_type",
$event.target.multiple
? $$selectedVal
: $$selectedVal[0]
)
}
}
},
[
_c(
"option",
{ attrs: { value: "" } },
[_vm._v("Select")]
),
_vm._v(" "),
_vm.pack.pack_type == 1 ||
_vm.pack.pack_type == 4
? [
_c(
"option",
{
attrs: { value: "gb" }
},
[_vm._v("GB")]
),
_vm._v(" "),
_c(
"option",
{
attrs: { value: "mb" }
},
[_vm._v("MB")]
)
]
: _vm._e(),
_vm._v(" "),
_vm.pack.pack_type == 2 ||
_vm.pack.pack_type == 4
? [
_c(
"option",
{
attrs: { value: "min" }
},
[_vm._v("Min")]
),
_vm._v(" "),
_c(
"option",
{
attrs: { value: "sec" }
},
[_vm._v("Sec")]
)
]
: _vm._e(),
_vm._v(" "),
_vm.pack.pack_type == 3 ||
_vm.pack.pack_type == 4
? [
_c(
"option",
{
attrs: { value: "sms" }
},
[_vm._v("SMS")]
)
]
: _vm._e(),
_vm._v(" "),
_vm.pack.pack_type == 5
? [
_c(
"option",
{ attrs: { value: "p" } },
[_vm._v("Paisha")]
)
]
: _vm._e()
],
2
)
])
])
]
),
_vm._v(" "),
_c(
"div",
{
staticClass:
"col-lg-4 mb-3 mt-40 mar-b-25"
},
[
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Network")
]),
_vm._v(" "),
_c(
"select",
{
directives: [
{
name: "model",
rawName: "v-model",
value: net.network,
expression: "net.network"
}
],
staticClass: "form-control",
on: {
change: function($event) {
var $$selectedVal = Array.prototype.filter
.call(
$event.target.options,
function(o) {
return o.selected
}
)
.map(function(o) {
var val =
"_value" in o
? o._value
: o.value
return val
})
_vm.$set(
net,
"network",
$event.target.multiple
? $$selectedVal
: $$selectedVal[0]
)
}
}
},
[
_c(
"option",
{ attrs: { value: "0" } },
[_vm._v("All")]
),
_vm._v(" "),
_vm.pack.pack_type != 2 ||
_vm.pack.pack_type != 3
? [
_c(
"option",
{ attrs: { value: "4" } },
[_vm._v("4G")]
),
_vm._v(" "),
_c(
"option",
{ attrs: { value: "3" } },
[_vm._v("3G")]
),
_vm._v(" "),
_c(
"option",
{ attrs: { value: "2" } },
[_vm._v("2G")]
),
_vm._v(" "),
_c(
"option",
{ attrs: { value: "5" } },
[_vm._v("5G")]
)
]
: _vm._e(),
_vm._v(" "),
_vm.pack.pack_type != 1
? [
_c(
"option",
{ attrs: { value: "1" } },
[_vm._v("Onnet")]
),
_vm._v(" "),
_c(
"option",
{ attrs: { value: "2" } },
[_vm._v("Offnet")]
)
]
: _vm._e()
],
2
)
]
),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-lg-4 mb-3 mt-40" },
[
_c("div", { staticClass: "form-row" }, [
_c("div", { staticClass: "col-6" }, [
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: net.validity,
expression: "net.validity"
}
],
staticClass: "form-control outline",
attrs: {
placeholder: " ",
type: "text",
id: "validity" + key
},
domProps: { value: net.validity },
on: {
input: function($event) {
if ($event.target.composing) {
return
}
_vm.$set(
net,
"validity",
$event.target.value
)
}
}
}),
_vm._v(" "),
_c(
"label",
{
attrs: { for: "validity" + key }
},
[_vm._v("Data Validity")]
)
]),
_vm._v(" "),
_c("div", { staticClass: "col-6" }, [
_c(
"label",
{ staticClass: "o-s-l" },
[_vm._v("Select")]
),
_vm._v(" "),
_c(
"select",
{
directives: [
{
name: "model",
rawName: "v-model",
value: net.validity_type,
expression:
"net.validity_type"
}
],
staticClass: "form-control",
on: {
change: function($event) {
var $$selectedVal = Array.prototype.filter
.call(
$event.target.options,
function(o) {
return o.selected
}
)
.map(function(o) {
var val =
"_value" in o
? o._value
: o.value
return val
})
_vm.$set(
net,
"validity_type",
$event.target.multiple
? $$selectedVal
: $$selectedVal[0]
)
}
}
},
[
_c(
"option",
{ attrs: { value: "" } },
[_vm._v("Select")]
),
_vm._v(" "),
_vm.pack.pack_type != 5
? [
_c(
"option",
{
attrs: { value: "days" }
},
[_vm._v("Days")]
),
_vm._v(" "),
_c(
"option",
{
attrs: {
value: "hours"
}
},
[_vm._v("Hours")]
),
_vm._v(" "),
_c(
"option",
{
attrs: { value: "mins" }
},
[_vm._v("Minutes")]
)
]
: [
_c(
"option",
{
attrs: { value: "sec" }
},
[_vm._v("Second")]
),
_vm._v(" "),
_c(
"option",
{
attrs: { value: "min" }
},
[_vm._v("Min")]
)
]
],
2
)
])
])
]
),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-md-6 col-lg-4 mb-3" },
[
_c("date-picker", {
attrs: {
config: _vm.timeOptions,
placeholder: "Period From"
},
model: {
value: net.period_from,
callback: function($$v) {
_vm.$set(net, "period_from", $$v)
},
expression: "net.period_from"
}
})
],
1
),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-md-6 col-lg-4 mb-3" },
[
_c("date-picker", {
attrs: {
config: _vm.timeOptions,
placeholder: "Period To"
},
model: {
value: net.period_to,
callback: function($$v) {
_vm.$set(net, "period_to", $$v)
},
expression: "net.period_to"
}
})
],
1
),
_vm._v(" "),
_c(
"div",
{
staticClass:
"col-md-6 col-lg-4 mb-3 outline-group"
},
[
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: net.comment,
expression: "net.comment"
}
],
staticClass: "form-control outline",
attrs: {
placeholder: " ",
type: "text",
id: "comment" + key
},
domProps: { value: net.comment },
on: {
input: function($event) {
if ($event.target.composing) {
return
}
_vm.$set(
net,
"comment",
$event.target.value
)
}
}
}),
_vm._v(" "),
_c(
"label",
{ attrs: { for: "comment" + key } },
[_vm._v("Comment")]
)
]
),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-md-6 col-lg-4 mb-3" },
[
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Is Bonus?")
]),
_vm._v(" "),
_c(
"select",
{
directives: [
{
name: "model",
rawName: "v-model",
value: net.is_bonus,
expression: "net.is_bonus"
}
],
staticClass: "form-control",
on: {
change: function($event) {
var $$selectedVal = Array.prototype.filter
.call(
$event.target.options,
function(o) {
return o.selected
}
)
.map(function(o) {
var val =
"_value" in o
? o._value
: o.value
return val
})
_vm.$set(
net,
"is_bonus",
$event.target.multiple
? $$selectedVal
: $$selectedVal[0]
)
}
}
},
[
_c(
"option",
{ attrs: { value: "0" } },
[_vm._v("Normal")]
),
_vm._v(" "),
_c(
"option",
{ attrs: { value: "1" } },
[_vm._v("Bonus")]
)
]
)
]
),
_vm._v(" "),
_c(
"div",
{
staticClass:
"col-md-6 col-lg-4 mb-3 outline-group"
},
[
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: net.policy,
expression: "net.policy"
}
],
staticClass: "form-control outline",
attrs: {
placeholder: " ",
type: "text",
id: "policy" + key
},
domProps: { value: net.policy },
on: {
input: function($event) {
if ($event.target.composing) {
return
}
_vm.$set(
net,
"policy",
$event.target.value
)
}
}
}),
_vm._v(" "),
_c(
"label",
{ attrs: { for: "policy" + key } },
[_vm._v("Fair Uses Policy")]
)
]
)
])
])
})
],
2
),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-md-12 mb-3 mt-3" },
[
_c("tinymce", {
attrs: {
id: "d1",
other_options: _vm.tinyOptions
},
model: {
value: _vm.pack.desc,
callback: function($$v) {
_vm.$set(_vm.pack, "desc", $$v)
},
expression: "pack.desc"
}
})
],
1
)
])
]),
_vm._v(" "),
_c("div", { staticClass: "modal-footer" }, [
_c(
"button",
{
staticClass: "btn btn-secondary",
attrs: { type: "button", "data-dismiss": "modal" }
},
[_vm._v("Close")]
),
_vm._v(" "),
_c(
"button",
{
staticClass: "btn btn-primary",
attrs: { type: "submit" }
},
[
_vm._v(
"\n " +
_vm._s(_vm.formType ? "Create" : "Update") +
"\n "
)
]
)
])
]
)
])
]
)
]
),
_vm._v(" "),
_c(
"div",
{
staticClass: "modal right fade",
attrs: {
id: "dataSearch",
tabindex: "-1",
role: "dialog",
"aria-hidden": "true"
}
},
[
_c(
"div",
{ staticClass: "modal-dialog", attrs: { role: "document" } },
[
_c("div", { staticClass: "modal-content" }, [
_vm._m(1),
_vm._v(" "),
_c(
"form",
{
on: {
submit: function($event) {
$event.preventDefault()
_vm.dataSearch()
}
}
},
[
_c("div", { staticClass: "modal-body" }, [
_c("div", { staticClass: "form-row" }, [
_c("div", { staticClass: "col-12 mb-3" }, [
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Show")
]),
_vm._v(" "),
_c(
"select",
{
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.perPage,
expression: "perPage"
}
],
staticClass: "form-control",
on: {
change: function($event) {
var $$selectedVal = Array.prototype.filter
.call($event.target.options, function(o) {
return o.selected
})
.map(function(o) {
var val =
"_value" in o ? o._value : o.value
return val
})
_vm.perPage = $event.target.multiple
? $$selectedVal
: $$selectedVal[0]
}
}
},
_vm._l(_vm.perPageOptions, function(option) {
return _c(
"option",
{ domProps: { value: option.value } },
[
_vm._v(
"\n " +
_vm._s(option.text) +
"\n "
)
]
)
})
)
]),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-12 mb-3" },
[
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Select Provider")
]),
_vm._v(" "),
_c("multiselect", {
attrs: {
"deselect-label": "",
"select-label": "",
options: _vm.providers,
"preserve-search": true,
placeholder: "Select Provider",
label: "name",
"track-by": "name",
"preselect-first": true
},
scopedSlots: _vm._u([
{
key: "tag",
fn: function(props) {
return [
_c("span", [
_vm._v(_vm._s(props.option.name))
]),
_vm._v(" "),
_c(
"span",
{
on: {
click: function($event) {
props.remove(props.option)
}
}
},
[_vm._v("x")]
)
]
}
}
]),
model: {
value: _vm.packSearch.provider_id,
callback: function($$v) {
_vm.$set(_vm.packSearch, "provider_id", $$v)
},
expression: "packSearch.provider_id"
}
})
],
1
),
_vm._v(" "),
_vm.type != "sim"
? _c(
"div",
{ staticClass: "col-12 mb-3" },
[
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Select Sim Package")
]),
_vm._v(" "),
_c("multiselect", {
attrs: {
"deselect-label": "",
"select-label": "",
options: _vm.sim_packs,
"preserve-search": true,
label: "name",
"track-by": "name",
"preselect-first": false
},
scopedSlots: _vm._u([
{
key: "tag",
fn: function(props) {
return [
_c("span", [
_vm._v(_vm._s(props.option.name))
]),
_vm._v(" "),
_c(
"span",
{
on: {
click: function($event) {
props.remove(props.option)
}
}
},
[_vm._v("x")]
)
]
}
}
]),
model: {
value: _vm.packSearch.sim_pack_id,
callback: function($$v) {
_vm.$set(
_vm.packSearch,
"sim_pack_id",
$$v
)
},
expression: "packSearch.sim_pack_id"
}
})
],
1
)
: _vm._e(),
_vm._v(" "),
_c("div", { staticClass: "col-12 mb-3" }, [
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Select Status")
]),
_vm._v(" "),
_c(
"select",
{
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.packSearch.status,
expression: "packSearch.status"
}
],
staticClass: "form-control",
attrs: { id: "search-status" },
on: {
change: function($event) {
var $$selectedVal = Array.prototype.filter
.call($event.target.options, function(o) {
return o.selected
})
.map(function(o) {
var val =
"_value" in o ? o._value : o.value
return val
})
_vm.$set(
_vm.packSearch,
"status",
$event.target.multiple
? $$selectedVal
: $$selectedVal[0]
)
}
}
},
[
_c("option", { attrs: { value: "" } }, [
_vm._v("Select Status")
]),
_vm._v(" "),
_c("option", { attrs: { value: "1" } }, [
_vm._v("Active")
]),
_vm._v(" "),
_c("option", { attrs: { value: "0" } }, [
_vm._v("Inactive")
])
]
)
]),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-12 mb-3" },
[
_c("date-picker", {
attrs: {
config: _vm.dateOptions,
placeholder: "Date From"
},
model: {
value: _vm.packSearch.date_from,
callback: function($$v) {
_vm.$set(_vm.packSearch, "date_from", $$v)
},
expression: "packSearch.date_from"
}
})
],
1
),
_vm._v(" "),
_c(
"div",
{ staticClass: "col-12 mb-3" },
[
_c("date-picker", {
attrs: {
config: _vm.dateOptions,
placeholder: "Date To"
},
model: {
value: _vm.packSearch.date_to,
callback: function($$v) {
_vm.$set(_vm.packSearch, "date_to", $$v)
},
expression: "packSearch.date_to"
}
})
],
1
),
_vm._v(" "),
_c("div", { staticClass: "col-12 mb-3" }, [
_c("label", { staticClass: "o-s-l" }, [
_vm._v("Order By")
]),
_vm._v(" "),
_c(
"select",
{
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.packSearch.order_by,
expression: "packSearch.order_by"
}
],
staticClass: "form-control",
attrs: { id: "search-order-by" },
on: {
change: function($event) {
var $$selectedVal = Array.prototype.filter
.call($event.target.options, function(o) {
return o.selected
})
.map(function(o) {
var val =
"_value" in o ? o._value : o.value
return val
})
_vm.$set(
_vm.packSearch,
"order_by",
$event.target.multiple
? $$selectedVal
: $$selectedVal[0]
)
}
}
},
[
_c("option", { attrs: { value: "" } }, [
_vm._v("Newer First")
]),
_vm._v(" "),
_c("option", { attrs: { value: "1" } }, [
_vm._v("Newer Last")
])
]
)
]),
_vm._v(" "),
_vm._m(2)
])
])
]
)
])
]
)
]
),
_vm._v(" "),
_c("div", { staticClass: "table-actions" }, [
_c(
"button",
{
staticClass: "btn btn-sm green",
attrs: {
type: "button",
"data-toggle": "modal",
"data-target": "#createModal"
},
on: {
click: function($event) {
_vm.newForm()
}
}
},
[
_c("i", { staticClass: "icon-plus-squared" }),
_vm._v(" " + _vm._s(_vm.pack_title()) + " Package\n ")
]
),
_vm._v(" "),
_vm.selected.length
? _c(
"button",
{
directives: [
{
name: "can",
rawName: "v-can",
value: "package-delete",
expression: "'package-delete'"
}
],
staticClass: "btn btn-sm btn-danger ",
on: {
click: function($event) {
_vm.deleteSelected()
}
}
},
[
_c("i", { staticClass: "icon-trash-empty" }),
_vm._v(" Delete Selected")
]
)
: _vm._e(),
_vm._v(" "),
_vm.selected.length
? _c(
"button",
{
staticClass: "btn btn-sm btn-info",
on: {
click: function($event) {
_vm.lastChecked()
}
}
},
[_vm._v(" Update Last Checked")]
)
: _vm._e(),
_vm._v(" "),
_vm._m(3)
]),
_vm._v(" "),
_c("div", { staticClass: "table-responsive" }, [
_c("table", { staticClass: "table table-striped bg-white" }, [
_c("thead", [
_c("tr", [
_c("th", { staticStyle: { width: "20px" } }, [
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.selectAll,
expression: "selectAll"
}
],
attrs: { type: "checkbox", id: "check-all" },
domProps: {
checked: Array.isArray(_vm.selectAll)
? _vm._i(_vm.selectAll, null) > -1
: _vm.selectAll
},
on: {
click: _vm.select,
change: function($event) {
var $$a = _vm.selectAll,
$$el = $event.target,
$$c = $$el.checked ? true : false
if (Array.isArray($$a)) {
var $$v = null,
$$i = _vm._i($$a, $$v)
if ($$el.checked) {
$$i < 0 && (_vm.selectAll = $$a.concat([$$v]))
} else {
$$i > -1 &&
(_vm.selectAll = $$a
.slice(0, $$i)
.concat($$a.slice($$i + 1)))
}
} else {
_vm.selectAll = $$c
}
}
}
})
]),
_vm._v(" "),
_vm._m(4),
_vm._v(" "),
_c("th", [_vm._v("Serial")]),
_vm._v(" "),
_c("th", [_vm._v("Actions")]),
_vm._v(" "),
_c("th", [_vm._v("Provider")]),
_vm._v(" "),
_c("th", [_vm._v("Service")]),
_vm._v(" "),
_vm.type != "sim" ? _c("th", [_vm._v("Package")]) : _vm._e(),
_vm._v(" "),
_c("th", [_vm._v("Status")]),
_vm._v(" "),
_c("th", [_vm._v("Data")]),
_vm._v(" "),
_c("th", [_vm._v("Price")]),
_vm._v(" "),
_c("th", [_vm._v("Validity")])
])
]),
_vm._v(" "),
_c(
"tbody",
[
_vm.preloader
? _c("tr", { staticStyle: { background: "none" } }, [
_c("td", { attrs: { colspan: "5" } }, [_c("preloader")], 1)
])
: _vm._e(),
_vm._v(" "),
_vm._l(_vm.packs.data, function(pack, index) {
return _c("tr", [
_c("td", [
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.selected,
expression: "selected"
}
],
attrs: { type: "checkbox", id: "checkbox-id-" + pack.id },
domProps: {
value: pack.id,
checked: Array.isArray(_vm.selected)
? _vm._i(_vm.selected, pack.id) > -1
: _vm.selected
},
on: {
change: function($event) {
var $$a = _vm.selected,
$$el = $event.target,
$$c = $$el.checked ? true : false
if (Array.isArray($$a)) {
var $$v = pack.id,
$$i = _vm._i($$a, $$v)
if ($$el.checked) {
$$i < 0 && (_vm.selected = $$a.concat([$$v]))
} else {
$$i > -1 &&
(_vm.selected = $$a
.slice(0, $$i)
.concat($$a.slice($$i + 1)))
}
} else {
_vm.selected = $$c
}
}
}
})
]),
_vm._v(" "),
_c("td", [
_c("label", { attrs: { for: "checkbox-id-" + pack.id } }, [
_vm._v(_vm._s(pack.name))
])
]),
_vm._v(" "),
_c("td", [_vm._v(_vm._s(pack.order))]),
_vm._v(" "),
_c("td", [
_c("div", { staticClass: "dropdown action-items" }, [
_vm._m(5, true),
_vm._v(" "),
_c(
"div",
{
staticClass: "dropdown-menu",
attrs: { "aria-labelledby": "actionItemButtons" }
},
[
_c(
"a",
{
directives: [
{
name: "can",
rawName: "v-can",
value: "package-edit",
expression: "'package-edit'"
}
],
staticClass: "dropdown-item",
attrs: {
href: "#",
"data-toggle": "modal",
"data-target": "#createModal"
},
on: {
click: function($event) {
$event.preventDefault()
_vm.editForm(pack)
}
}
},
[
_vm._v(
"\n Edit / View\n "
)
]
),
_vm._v(" "),
_c(
"a",
{
directives: [
{
name: "can",
rawName: "v-can",
value: "package-delete",
expression: "'package-delete'"
}
],
staticClass: "dropdown-item",
attrs: { href: "#" },
on: {
click: function($event) {
$event.preventDefault()
_vm.deleteEntry(pack.id, index)
}
}
},
[
_vm._v(
"\n Delete\n "
)
]
)
]
)
])
]),
_vm._v(" "),
_c("td", [_vm._v(_vm._s(pack.provider_id.name))]),
_vm._v(" "),
_c("td", [_vm._v(_vm._s(pack.service_id.name))]),
_vm._v(" "),
_vm.type != "sim"
? _c("td", [
_vm._v(
_vm._s(pack.sim_pack_id ? pack.sim_pack_id.name : "")
)
])
: _vm._e(),
_vm._v(" "),
_c(
"td",
[
pack.status
? [
_c(
"span",
{ staticClass: "bg-success active-status" },
[_vm._v("Active")]
)
]
: [
_c(
"span",
{ staticClass: "bg-danger active-status" },
[_vm._v("Inactive")]
)
]
],
2
),
_vm._v(" "),
_c(
"td",
{ staticClass: "taxonomies" },
[
pack.pack_data
? _vm._l(pack.pack_data, function(net) {
return _c(
"span",
[
_vm._v(
"\n " +
_vm._s(net.data + " " + net.data_type) +
" " +
_vm._s(
net.validity + " " + net.validity_type
) +
" \n "
),
pack.network == 2 && net.network == 1
? [
_vm._v(
"\n Onnet \n "
)
]
: _vm._e(),
_vm._v(" "),
pack.network == 2 && net.network == 2
? [
_vm._v(
"\n Offnet\n "
)
]
: _vm._e()
],
2
)
})
: _vm._e()
],
2
),
_vm._v(" "),
_c("td", [_vm._v(_vm._s(pack.price))]),
_vm._v(" "),
_c(
"td",
{ staticClass: "taxonomies" },
[
pack.pack_data
? _vm._l(pack.pack_data, function(net) {
return _c("span", [
_vm._v(
"\n " +
_vm._s(
net.validity + " " + net.validity_type
) +
" \n "
)
])
})
: _vm._e()
],
2
)
])
}),
_vm._v(" "),
_vm.noData
? [
_c("tr", {
staticClass: "bg-white",
domProps: { innerHTML: _vm._s(_vm.noData) }
})
]
: _vm._e()
],
2
),
_vm._v(" "),
_c("tfoot", [
_c("tr", [
_c("th", [
_c("input", {
directives: [
{
name: "model",
rawName: "v-model",
value: _vm.selectAll,
expression: "selectAll"
}
],
attrs: { type: "checkbox", id: "check-all-bottom" },
domProps: {
checked: Array.isArray(_vm.selectAll)
? _vm._i(_vm.selectAll, null) > -1
: _vm.selectAll
},
on: {
click: _vm.select,
change: function($event) {
var $$a = _vm.selectAll,
$$el = $event.target,
$$c = $$el.checked ? true : false
if (Array.isArray($$a)) {
var $$v = null,
$$i = _vm._i($$a, $$v)
if ($$el.checked) {
$$i < 0 && (_vm.selectAll = $$a.concat([$$v]))
} else {
$$i > -1 &&
(_vm.selectAll = $$a
.slice(0, $$i)
.concat($$a.slice($$i + 1)))
}
} else {
_vm.selectAll = $$c
}
}
}
})
]),
_vm._v(" "),
_vm._m(6),
_vm._v(" "),
_c("th", [_vm._v("Serial")]),
_vm._v(" "),
_c("th", [_vm._v("Actions")]),
_vm._v(" "),
_c("th", [_vm._v("Provider")]),
_vm._v(" "),
_c("th", [_vm._v("Service")]),
_vm._v(" "),
_vm.type != "sim" ? _c("th", [_vm._v("Package")]) : _vm._e(),
_vm._v(" "),
_c("th", [_vm._v("Status")]),
_vm._v(" "),
_c("th", [_vm._v("Data")]),
_vm._v(" "),
_c("th", [_vm._v("Price")]),
_vm._v(" "),
_c("th", [_vm._v("Validity")])
])
])
])
]),
_vm._v(" "),
_c(
"div",
{ staticClass: "table-actions" },
[
_c("pagination", {
attrs: { data: _vm.packs, limit: 2 },
on: { "pagination-change-page": _vm.getLists }
})
],
1
)
])
])
}
var staticRenderFns = [
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c(
"button",
{
staticClass: "close",
attrs: {
type: "button",
"data-dismiss": "modal",
"aria-label": "Close"
}
},
[_c("span", { attrs: { "aria-hidden": "true" } }, [_vm._v("×")])]
)
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("div", { staticClass: "modal-header" }, [
_c("h5", { staticClass: "modal-title" }, [_vm._v("Search")]),
_vm._v(" "),
_c(
"button",
{
staticClass: "close",
attrs: {
type: "button",
"data-dismiss": "modal",
"aria-label": "Close"
}
},
[_c("span", { attrs: { "aria-hidden": "true" } }, [_vm._v("×")])]
)
])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("div", { staticClass: "col-12 mb-3" }, [
_c(
"button",
{ staticClass: "btn btn-primary btn-sm", attrs: { type: "submit" } },
[_vm._v("Search")]
),
_vm._v(" "),
_c(
"button",
{
staticClass: "btn btn-dark float-right btn-sm",
attrs: { type: "reset" }
},
[_vm._v("Reset")]
)
])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c(
"button",
{
staticClass: "btn btn-sm btn-info float-right",
attrs: {
type: "button",
"data-toggle": "modal",
"data-target": "#dataSearch"
}
},
[
_c("i", { staticClass: "icon-search" }),
_vm._v(" "),
_c("span", { staticClass: "d-none d-sm-inline" }, [_vm._v("Search")])
]
)
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("th", [
_c("label", { attrs: { for: "check-all" } }, [_vm._v("Title")])
])
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c(
"button",
{
staticClass: "btn btn-default btn-sm dropdown-toggle",
attrs: {
type: "button",
id: "actionItemButtons",
"data-toggle": "dropdown",
"aria-haspopup": "true",
"aria-expanded": "false"
}
},
[_c("i", { staticClass: "icon-ellipsis" })]
)
},
function() {
var _vm = this
var _h = _vm.$createElement
var _c = _vm._self._c || _h
return _c("th", [
_c("label", { attrs: { for: "check-all-bottom" } }, [_vm._v("Title")])
])
}
]
render._withStripped = true
module.exports = { render: render, staticRenderFns: staticRenderFns }
if (false) {
module.hot.accept()
if (module.hot.data) {
require("vue-hot-reload-api") .rerender("data-v-0af345ba", module.exports)
}
}
/***/ })
});
|
from conans import ConanFile, CMake, tools
from sys import platform
import re
import os
class LibzmqConan(ConanFile):
name = "libzmq"
version = "4.3.2"
license = "GPL-3.0-only"
url = "https://github.com/zeromq/libzmq.git"
description = "The ZeroMQ lightweight messaging kernel is a library which extends the standard socket interfaces " \
"with features traditionally provided by specialised messaging middleware products. ZeroMQ sockets " \
"provide an abstraction of asynchronous message queues, multiple messaging patterns, message " \
"filtering (subscriptions), seamless access to multiple transport protocols and more."
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False],
"fPIC": [True, False],
"libsodium": [True, False], }
default_options = {"shared": True,
"fPIC": True,
"libsodium": False, }
generators = "cmake"
def requirements(self):
if self.options.libsodium:
self.requires("libsodium/1.0.18@conan/stable")
def source(self):
git = tools.Git()
git.clone(self.url, "v%s" % self.version, shallow=True)
def system_requirements(self):
pass
def _configure_cmake(self):
cmake = CMake(self)
cmake.definitions['WITH_LIBSODIUM'] = "ON" if self.options.libsodium else "OFF"
cmake.configure(build_folder='cmake-build')
return cmake
def build(self):
tools.replace_in_file("CMakeLists.txt", "project(ZeroMQ)",
'''project(ZeroMQ)
include(${CMAKE_CURRENT_SOURCE_DIR}/conanbuildinfo.cmake)
conan_basic_setup()''')
env_build = self._configure_cmake()
env_build.build()
env_build.test()
def package(self):
self.copy("*.h", dst="include", src="include")
if self.options.shared:
self.copy("*.so*", dst="lib", keep_path=False)
else:
self.copy("libzmq.a", dst="lib", keep_path=False)
def package_info(self):
self.cpp_info.name = "libzmq"
# Ordered list of include paths
self.cpp_info.includedirs = ['include']
# The libs to link against
self.cpp_info.libs = [
"libzmq.so"] if self.options.shared else ["libzmq.a"]
# Directories where libraries can be found
self.cpp_info.libdirs = ['lib']
# Directories where resources, data, etc can be found
self.cpp_info.resdirs = []
# Directories where executables and shared libs can be found
self.cpp_info.bindirs = []
# Directories where sources can be found (debugging, reusing sources)
self.cpp_info.srcdirs = []
self.cpp_info.build_modules = [] # Build system utility module files
self.cpp_info.defines = [] # preprocessor definitions
self.cpp_info.cflags = [] # pure C flags
self.cpp_info.cxxflags = [] # C++ compilation flags
self.cpp_info.sharedlinkflags = [] # linker flags
self.cpp_info.exelinkflags = [] # linker flags
self.cpp_info.system_libs = [] # The system libs to link against
|
'''
To generate a standalone PNG file for a Bokeh application from a single
Python script, pass the script name to ``bokeh png`` on the command
line:
.. code-block:: sh
bokeh png app_script.py
The generated PNG will be saved in the current working directory with
the name ``app_script.png``.
It is also possible to run the same commmand with jupyter notebooks:
.. code-block:: sh
bokeh png app_notebook.ipynb
This will generate an PNG file named ``app_notebook.png`` just like
with a python script.
Applications can also be created from directories. The directory should
contain a ``main.py`` (and any other helper modules that are required) as
well as any additional assets (e.g., theme files). Pass the directory name
to ``bokeh png`` to generate the PNG:
.. code-block:: sh
bokeh png app_dir
It is possible to generate PNG files for multiple applications at once:
.. code-block:: sh
bokeh png app_script.py app_dir
For all cases, it's required to explicitly add a Bokeh layout to
``bokeh.io.curdoc`` for it to appear in the output.
'''
from __future__ import absolute_import
import io
import sys
from ...io.export import get_screenshot_as_png, create_webdriver, terminate_webdriver
from ..util import set_single_plot_width_height
from .file_output import FileOutputSubcommand
class PNG(FileOutputSubcommand):
''' Subcommand to output applications as standalone PNG files.
'''
#: name for this subcommand
name = "png"
#: file extension for output generated by this :class:`~bokeh.command.subcommands.file_output.FileOutputSubcommand`
extension = "png"
help = "Create standalone PNG files for one or more applications"
args = (
FileOutputSubcommand.files_arg("PNG"),
('--height', dict(
metavar='HEIGHT',
type=int,
help="The desired height of the exported layout obj only if it's a Plot instance",
default=None,
)),
('--width', dict(
metavar='WIDTH',
type=int,
help="The desired width of the exported layout obj only if it's a Plot instance",
default=None,
)),
) + FileOutputSubcommand.other_args()
def invoke(self, args):
'''
'''
self.driver = create_webdriver()
try:
super(PNG, self).invoke(args)
finally:
terminate_webdriver(self.driver)
def write_file(self, args, filename, doc):
'''
'''
contents = self.file_contents(args, doc)
if filename == '-':
sys.stdout.buffer.write(contents)
else:
with io.open(filename, "w+b") as f:
f.write(contents)
self.after_write_file(args, filename, doc)
def file_contents(self, args, doc):
'''
'''
set_single_plot_width_height(doc, width=args.width, height=args.height)
image = get_screenshot_as_png(doc, driver=self.driver)
buf = io.BytesIO()
image.save(buf, "png")
buf.seek(0)
return buf.read()
|
import React from "react";
import ReactDOM from "react-dom";
import QRCode from "react-qr-code";
import clipboard from "clipboard-polyfill";
import actions from "../actions/utils";
import { Link } from "react-router-dom";
import { NavLink } from "react-router-dom";
import CloseIcon from "../components/icons/CloseIcon";
export default class Receive extends React.Component {
constructor(props) {
super(props);
this.state = {
L: props.L.PAGES.RECEIVE,
account: props.account,
_is_address_copied: false
};
};
_copy_address = () => {
// Copy the address to the clipboard and trigger some shit
clipboard.writeText(this.state.account.address);
this.setState({_is_address_copied: true});
actions.trigger_sfx("copy.mp3", 1);
actions.trigger_vocal("address_copied.mp3", 1);
}
render() {
const { L, account, _is_address_copied } = this.state;
const QR_code_value = JSON.stringify({
protocol: "v.systems",
api: 1,
opc: "account",
address: account.address
});
let svg_copy_icon = !_is_address_copied ?
<svg viewBox="0 0 24 24">
<path fill="currentColor" d="M19,21H8V7H19M19,5H8A2,2 0 0,0 6,7V21A2,2 0 0,0 8,23H19A2,2 0 0,0 21,21V7A2,2 0 0,0 19,5M16,1H4A2,2 0 0,0 2,3V17H4V3H16V1Z" />
</svg>:
<svg viewBox="0 0 24 24">
<path fill="currentColor" d="M9,20.42L2.79,14.21L5.62,11.38L9,14.77L18.88,4.88L21.71,7.71L9,20.42Z" />
</svg>;
return (
<div class="body-content">
<div class="toolbar">
<div class="toolbar-inner">
<div class="toolbar-title">{L.RECEIVE}</div>
<NavLink to={`/`} class="nav-link">
<button class="circle toolbar-menu-button">
<CloseIcon/>
</button>
</NavLink>
</div>
</div>
<div class="qr-card-zone">
<div class="card-inset qr-card">
<QRCode value={QR_code_value} bgColor="#e6e7ee" fgColor="#31344b" size={216}/>
</div>
</div>
<div class="address-zone">
<input class="input-right-button" defaultValue={account.address}/>
<button class="input-right-button" onClick={this._copy_address}>
{svg_copy_icon}
</button>
</div>
<div class="receive-buttons">
<NavLink to={`/`}>
<button class="receive-ok-button rounded">OK</button>
</NavLink>
</div>
</div>
);
}
}
|
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
# pylint: disable=cyclic-import
"""Components to communicate tasks to RabbitMQ."""
import collections
import logging
from tornado import gen
from kiwipy import communications, Future
import plumpy
from aiida.common.extendeddicts import AttributeDict
__all__ = ('RemoteException', 'CommunicationTimeout', 'DeliveryFailed', 'ProcessLauncher', 'BROKER_DEFAULTS')
LOGGER = logging.getLogger(__name__)
RemoteException = plumpy.RemoteException
DeliveryFailed = plumpy.DeliveryFailed
CommunicationTimeout = communications.TimeoutError # pylint: disable=invalid-name
_LAUNCH_QUEUE = 'process.queue'
_MESSAGE_EXCHANGE = 'messages'
_TASK_EXCHANGE = 'tasks'
BROKER_DEFAULTS = AttributeDict({
'protocol': 'amqp',
'username': 'guest',
'password': 'guest',
'host': '127.0.0.1',
'port': 5672,
'virtual_host': '',
'heartbeat': 600,
})
BROKER_VALID_PARAMETERS = [
'heartbeat', # heartbeat timeout in seconds
'cafile', # string containing path to ca certificate file
'capath', # string containing path to ca certificates
'cadata', # base64 encoded ca certificate data
'keyfile', # string containing path to key file
'certfile', # string containing path to certificate file
'no_verify_ssl', # boolean disables certificates validation
]
def get_rmq_url(protocol=None, username=None, password=None, host=None, port=None, virtual_host=None, **kwargs):
"""Return the URL to connect to RabbitMQ.
.. note::
The default of the ``host`` is set to ``127.0.0.1`` instead of ``localhost`` because on some computers localhost
resolves first to IPv6 with address ::1 and if RMQ is not running on IPv6 one gets an annoying warning. For more
info see: https://github.com/aiidateam/aiida-core/issues/1142
:param protocol: the protocol to use, `amqp` or `amqps`.
:param username: the username for authentication.
:param password: the password for authentication.
:param host: the hostname of the RabbitMQ server.
:param port: the port of the RabbitMQ server.
:param virtual_host: the virtual host to connect to.
:returns: the connection URL string.
"""
from urllib.parse import urlencode, urlunparse
invalid = set(kwargs.keys()).difference(BROKER_VALID_PARAMETERS)
if invalid:
raise ValueError(f"invalid URL parameters specified in the keyword arguments: {', '.join(invalid)}")
if 'heartbeat' not in kwargs:
kwargs['heartbeat'] = BROKER_DEFAULTS.heartbeat
scheme = protocol or BROKER_DEFAULTS.protocol
netloc = '{username}:{password}@{host}:{port}'.format(
username=username or BROKER_DEFAULTS.username,
password=password or BROKER_DEFAULTS.password,
host=host or BROKER_DEFAULTS.host,
port=port or BROKER_DEFAULTS.port,
)
path = virtual_host or BROKER_DEFAULTS.virtual_host
parameters = ''
query = urlencode(kwargs)
fragment = ''
# The virtual host is optional but if it is specified it needs to start with a forward slash. If the virtual host
# itself contains forward slashes, they need to be encoded.
if path and not path.startswith('/'):
path = f'/{path}'
return urlunparse((scheme, netloc, path, parameters, query, fragment))
def get_launch_queue_name(prefix=None):
"""Return the launch queue name with an optional prefix.
:returns: launch queue name
"""
if prefix is not None:
return f'{prefix}.{_LAUNCH_QUEUE}'
return _LAUNCH_QUEUE
def get_message_exchange_name(prefix):
"""Return the message exchange name for a given prefix.
:returns: message exchange name
"""
return f'{prefix}.{_MESSAGE_EXCHANGE}'
def get_task_exchange_name(prefix):
"""Return the task exchange name for a given prefix.
:returns: task exchange name
"""
return f'{prefix}.{_TASK_EXCHANGE}'
def _store_inputs(inputs):
"""Try to store the values in the input dictionary.
For nested dictionaries, the values are stored by recursively.
"""
for node in inputs.values():
try:
node.store()
except AttributeError:
if isinstance(node, collections.Mapping):
_store_inputs(node)
class ProcessLauncher(plumpy.ProcessLauncher):
"""A sub class of `plumpy.ProcessLauncher` to launch a `Process`.
It overrides the _continue method to make sure the node corresponding to the task can be loaded and
that if it is already marked as terminated, it is not continued but the future is reconstructed and returned
"""
@staticmethod
def handle_continue_exception(node, exception, message):
"""Handle exception raised in `_continue` call.
If the process state of the node has not yet been put to excepted, the exception was raised before the process
instance could be reconstructed, for example when the process class could not be loaded, thereby circumventing
the exception handling of the state machine. Raising this exception will then acknowledge the process task with
RabbitMQ leaving an uncleaned node in the `CREATED` state for ever. Therefore we have to perform the node
cleaning manually.
:param exception: the exception object
:param message: string message to use for the log message
"""
from aiida.engine import ProcessState
if not node.is_excepted:
node.logger.exception(message)
node.set_exception(str(exception))
node.set_process_state(ProcessState.EXCEPTED)
node.seal()
@gen.coroutine
def _continue(self, communicator, pid, nowait, tag=None):
"""Continue the task.
Note that the task may already have been completed, as indicated from the corresponding the node, in which
case it is not continued, but the corresponding future is reconstructed and returned. This scenario may
occur when the Process was already completed by another worker that however failed to send the acknowledgment.
:param communicator: the communicator that called this method
:param pid: the pid of the process to continue
:param nowait: if True don't wait for the process to finish, just return the pid, otherwise wait and
return the results
:param tag: the tag of the checkpoint to continue from
"""
from aiida.common import exceptions
from aiida.engine.exceptions import PastException
from aiida.orm import load_node, Data
from aiida.orm.utils import serialize
try:
node = load_node(pk=pid)
except (exceptions.MultipleObjectsError, exceptions.NotExistent):
# In this case, the process node corresponding to the process id, cannot be resolved uniquely or does not
# exist. The latter being the most common case, where someone deleted the node, before the process was
# properly terminated. Since the node is never coming back and so the process will never be able to continue
# we raise `Return` instead of `TaskRejected` because the latter would cause the task to be resent and start
# to ping-pong between RabbitMQ and the daemon workers.
LOGGER.exception('Cannot continue process<%d>', pid)
raise gen.Return(False)
if node.is_terminated:
LOGGER.info('not continuing process<%d> which is already terminated with state %s', pid, node.process_state)
future = Future()
if node.is_finished:
future.set_result({entry.link_label: entry.node for entry in node.get_outgoing(node_class=Data)})
elif node.is_excepted:
future.set_exception(PastException(node.exception))
elif node.is_killed:
future.set_exception(plumpy.KilledError())
raise gen.Return(future.result())
try:
result = yield super()._continue(communicator, pid, nowait, tag)
except ImportError as exception:
message = 'the class of the process could not be imported.'
self.handle_continue_exception(node, exception, message)
raise
except Exception as exception:
message = 'failed to recreate the process instance in order to continue it.'
self.handle_continue_exception(node, exception, message)
raise
# Ensure that the result is serialized such that communication thread won't have to do database operations
try:
serialized = serialize.serialize(result)
except Exception:
LOGGER.exception('failed to serialize the result for process<%d>', pid)
raise
raise gen.Return(serialized)
|
//{{NO_DEPENDENCIES}}
// Microsoft Visual C++ generated include file.
// Used by fancyzones.rc
//////////////////////////////
// Non-localizable
#define FILE_DESCRIPTION "PowerToys FancyZones"
#define INTERNAL_NAME "FancyZones"
#define ORIGINAL_FILENAME "PowerToys.FancyZones.exe"
// Non-localizable
//////////////////////////////
|
var io = require('socket.io-client');
var ChatClient = require('./chat-client');
var Canvas = require('./canvas');
var global = require('./global');
var playerNameInput = document.getElementById('playerNameInput');
var socket;
var reason;
var debug = function(args) {
if (console && console.log) {
console.log(args);
}
};
if ( /Android|webOS|iPhone|iPad|iPod|BlackBerry/i.test(navigator.userAgent) ) {
global.mobile = true;
}
function startGame(type) {
global.playerName = playerNameInput.value.replace(/(<([^>]+)>)/ig, '').substring(0,25);
global.playerType = type;
global.screenWidth = window.innerWidth;
global.screenHeight = window.innerHeight;
document.getElementById('startMenuWrapper').style.maxHeight = '0px';
document.getElementById('gameAreaWrapper').style.opacity = 1;
if (!socket) {
socket = io({query:"type=" + type});
setupSocket(socket);
}
if (!global.animLoopHandle)
animloop();
socket.emit('respawn');
window.chat.socket = socket;
window.chat.registerFunctions();
window.canvas.socket = socket;
global.socket = socket;
}
// Checks if the nick chosen contains valid alphanumeric characters (and underscores).
function validNick() {
var regex = /^\w*$/;
debug('Regex Test', regex.exec(playerNameInput.value));
return regex.exec(playerNameInput.value) !== null;
}
window.onload = function() {
var btn = document.getElementById('startButton'),
btnS = document.getElementById('spectateButton'),
nickErrorText = document.querySelector('#startMenu .input-error');
btnS.onclick = function () {
startGame('spectate');
};
btn.onclick = function () {
// Checks if the nick is valid.
if (validNick()) {
nickErrorText.style.opacity = 0;
startGame('player');
} else {
nickErrorText.style.opacity = 1;
}
};
var settingsMenu = document.getElementById('settingsButton');
var settings = document.getElementById('settings');
var instructions = document.getElementById('instructions');
settingsMenu.onclick = function () {
if (settings.style.maxHeight == '300px') {
settings.style.maxHeight = '0px';
} else {
settings.style.maxHeight = '300px';
}
};
playerNameInput.addEventListener('keypress', function (e) {
var key = e.which || e.keyCode;
if (key === global.KEY_ENTER) {
if (validNick()) {
nickErrorText.style.opacity = 0;
startGame('player');
} else {
nickErrorText.style.opacity = 1;
}
}
});
};
// TODO: Break out into GameControls.
var foodConfig = {
border: 0,
};
var playerConfig = {
border: 6,
textColor: '#FFFFFF',
textBorder: '#000000',
textBorderSize: 3,
defaultSize: 300
};
var player = {
id: -1,
x: global.screenWidth / 2,
y: global.screenHeight / 2,
screenWidth: global.screenWidth,
screenHeight: global.screenHeight,
target: {x: global.screenWidth / 2, y: global.screenHeight / 2}
};
global.player = player;
var foods = [];
var viruses = [];
var fireFood = [];
var users = [];
var leaderboard = [];
var target = {x: player.x, y: player.y};
global.target = target;
window.canvas = new Canvas();
window.chat = new ChatClient();
var visibleBorderSetting = document.getElementById('visBord');
visibleBorderSetting.onchange = settings.toggleBorder;
var showMassSetting = document.getElementById('showMass');
showMassSetting.onchange = settings.toggleMass;
var continuitySetting = document.getElementById('continuity');
continuitySetting.onchange = settings.toggleContinuity;
var roundFoodSetting = document.getElementById('roundFood');
roundFoodSetting.onchange = settings.toggleRoundFood;
var c = window.canvas.cv;
var graph = c.getContext('2d');
$( "#feed" ).click(function() {
socket.emit('1');
window.canvas.reenviar = false;
});
$( "#split" ).click(function() {
socket.emit('2');
window.canvas.reenviar = false;
});
// socket stuff.
function setupSocket(socket) {
// Handle ping.
socket.on('pongcheck', function () {
var latency = Date.now() - global.startPingTime;
debug('Latency: ' + latency + 'ms');
window.chat.addSystemLine('Ping: ' + latency + 'ms');
});
// Handle error.
socket.on('connect_failed', function () {
socket.close();
global.disconnected = true;
});
socket.on('disconnect', function () {
socket.close();
global.disconnected = true;
});
// Handle connection.
socket.on('welcome', function (playerSettings) {
player = playerSettings;
player.name = global.playerName;
player.screenWidth = global.screenWidth;
player.screenHeight = global.screenHeight;
player.target = window.canvas.target;
global.player = player;
window.chat.player = player;
socket.emit('gotit', player);
global.gameStart = true;
debug('Game started at: ' + global.gameStart);
window.chat.addSystemLine('Connected to the game!');
window.chat.addSystemLine('Type <b>-help</b> for a list of commands.');
if (global.mobile) {
document.getElementById('gameAreaWrapper').removeChild(document.getElementById('chatbox'));
}
c.focus();
});
socket.on('gameSetup', function(data) {
global.gameWidth = data.gameWidth;
global.gameHeight = data.gameHeight;
resize();
});
socket.on('playerDied', function (data) {
window.chat.addSystemLine('{GAME} - <b>' + (data.name.length < 1 ? 'An unnamed cell' : data.name) + '</b> was eaten.');
});
socket.on('playerDisconnect', function (data) {
window.chat.addSystemLine('{GAME} - <b>' + (data.name.length < 1 ? 'An unnamed cell' : data.name) + '</b> disconnected.');
});
socket.on('playerJoin', function (data) {
window.chat.addSystemLine('{GAME} - <b>' + (data.name.length < 1 ? 'An unnamed cell' : data.name) + '</b> joined.');
});
socket.on('leaderboard', function (data) {
leaderboard = data.leaderboard;
var status = '<span class="title">Leaderboard</span>';
for (var i = 0; i < leaderboard.length; i++) {
status += '<br />';
if (leaderboard[i].id == player.id){
if(leaderboard[i].name.length !== 0)
status += '<span class="me">' + (i + 1) + '. ' + leaderboard[i].name + "</span>";
else
status += '<span class="me">' + (i + 1) + ". An unnamed cell</span>";
} else {
if(leaderboard[i].name.length !== 0)
status += (i + 1) + '. ' + leaderboard[i].name;
else
status += (i + 1) + '. An unnamed cell';
}
}
//status += '<br />Players: ' + data.players;
document.getElementById('status').innerHTML = status;
});
socket.on('serverMSG', function (data) {
window.chat.addSystemLine(data);
});
// Chat.
socket.on('serverSendPlayerChat', function (data) {
window.chat.addChatLine(data.sender, data.message, false);
});
// Handle movement.
socket.on('serverTellPlayerMove', function (userData, foodsList, massList, virusList) {
var playerData;
for(var i =0; i< userData.length; i++) {
if(typeof(userData[i].id) == "undefined") {
playerData = userData[i];
i = userData.length;
}
}
if(global.playerType == 'player') {
var xoffset = player.x - playerData.x;
var yoffset = player.y - playerData.y;
player.x = playerData.x;
player.y = playerData.y;
player.hue = playerData.hue;
player.massTotal = playerData.massTotal;
player.cells = playerData.cells;
player.xoffset = isNaN(xoffset) ? 0 : xoffset;
player.yoffset = isNaN(yoffset) ? 0 : yoffset;
}
users = userData;
foods = foodsList;
viruses = virusList;
fireFood = massList;
});
// Death.
socket.on('RIP', function () {
global.gameStart = false;
global.died = true;
window.setTimeout(function() {
document.getElementById('gameAreaWrapper').style.opacity = 0;
document.getElementById('startMenuWrapper').style.maxHeight = '1000px';
global.died = false;
if (global.animLoopHandle) {
window.cancelAnimationFrame(global.animLoopHandle);
global.animLoopHandle = undefined;
}
}, 2500);
});
socket.on('kick', function (data) {
global.gameStart = false;
reason = data;
global.kicked = true;
socket.close();
});
socket.on('virusSplit', function (virusCell) {
socket.emit('2', virusCell);
reenviar = false;
});
}
function drawCircle(centerX, centerY, radius, sides) {
var theta = 0;
var x = 0;
var y = 0;
graph.beginPath();
for (var i = 0; i < sides; i++) {
theta = (i / sides) * 2 * Math.PI;
x = centerX + radius * Math.sin(theta);
y = centerY + radius * Math.cos(theta);
graph.lineTo(x, y);
}
graph.closePath();
graph.stroke();
graph.fill();
}
function drawFood(food) {
graph.strokeStyle = 'hsl(' + food.hue + ', 100%, 45%)';
graph.fillStyle = 'hsl(' + food.hue + ', 100%, 50%)';
graph.lineWidth = foodConfig.border;
drawCircle(food.x - player.x + global.screenWidth / 2,
food.y - player.y + global.screenHeight / 2,
food.radius, global.foodSides);
}
function drawVirus(virus) {
graph.strokeStyle = virus.stroke;
graph.fillStyle = virus.fill;
graph.lineWidth = virus.strokeWidth;
drawCircle(virus.x - player.x + global.screenWidth / 2,
virus.y - player.y + global.screenHeight / 2,
virus.radius, global.virusSides);
}
function drawFireFood(mass) {
graph.strokeStyle = 'hsl(' + mass.hue + ', 100%, 45%)';
graph.fillStyle = 'hsl(' + mass.hue + ', 100%, 50%)';
graph.lineWidth = playerConfig.border+10;
drawCircle(mass.x - player.x + global.screenWidth / 2,
mass.y - player.y + global.screenHeight / 2,
mass.radius-5, 18 + (~~(mass.masa/5)));
}
function drawPlayers(order) {
var start = {
x: player.x - (global.screenWidth / 2),
y: player.y - (global.screenHeight / 2)
};
for(var z=0; z<order.length; z++)
{
var userCurrent = users[order[z].nCell];
var cellCurrent = users[order[z].nCell].cells[order[z].nDiv];
var x=0;
var y=0;
var points = 500 + ~~(cellCurrent.mass/5);
var increase = Math.PI * 2 / points;
graph.strokeStyle = 'hsl(' + userCurrent.hue + ', 100%, 45%)';
graph.fillStyle = 'hsl(' + userCurrent.hue + ', 100%, 50%)';
graph.lineWidth = playerConfig.border;
var xstore = [];
var ystore = [];
global.spin += 0.0;
var circle = {
x: cellCurrent.x - start.x,
y: cellCurrent.y - start.y
};
for (var i = 0; i < points; i++) {
x = cellCurrent.radius * Math.cos(global.spin) + circle.x;
y = cellCurrent.radius * Math.sin(global.spin) + circle.y;
if(typeof(userCurrent.id) == "undefined") {
x = valueInRange(-userCurrent.x + global.screenWidth / 2,
global.gameWidth - userCurrent.x + global.screenWidth / 2, x);
y = valueInRange(-userCurrent.y + global.screenHeight / 2,
global.gameHeight - userCurrent.y + global.screenHeight / 2, y);
} else {
x = valueInRange(-cellCurrent.x - player.x + global.screenWidth / 2 + (cellCurrent.radius/3),
global.gameWidth - cellCurrent.x + global.gameWidth - player.x + global.screenWidth / 2 - (cellCurrent.radius/3), x);
y = valueInRange(-cellCurrent.y - player.y + global.screenHeight / 2 + (cellCurrent.radius/3),
global.gameHeight - cellCurrent.y + global.gameHeight - player.y + global.screenHeight / 2 - (cellCurrent.radius/3) , y);
}
global.spin += increase;
xstore[i] = x;
ystore[i] = y;
}
/*if (wiggle >= player.radius/ 3) inc = -1;
*if (wiggle <= player.radius / -3) inc = +1;
*wiggle += inc;
*/
for (i = 0; i < points; ++i) {
if (i === 0) {
graph.beginPath();
graph.moveTo(xstore[i], ystore[i]);
} else if (i > 0 && i < points - 1) {
graph.lineTo(xstore[i], ystore[i]);
} else {
graph.lineTo(xstore[i], ystore[i]);
graph.lineTo(xstore[0], ystore[0]);
}
}
graph.lineJoin = 'round';
graph.lineCap = 'round';
graph.fill();
graph.stroke();
var nameCell = "";
if(typeof(userCurrent.id) == "undefined")
nameCell = player.name;
else
nameCell = userCurrent.name;
var fontSize = Math.max(cellCurrent.radius / 3, 12);
graph.lineWidth = playerConfig.textBorderSize;
graph.fillStyle = playerConfig.textColor;
graph.strokeStyle = playerConfig.textBorder;
graph.miterLimit = 1;
graph.lineJoin = 'round';
graph.textAlign = 'center';
graph.textBaseline = 'middle';
graph.font = 'bold ' + fontSize + 'px sans-serif';
if (global.toggleMassState === 0) {
graph.strokeText(nameCell, circle.x, circle.y);
graph.fillText(nameCell, circle.x, circle.y);
} else {
graph.strokeText(nameCell, circle.x, circle.y);
graph.fillText(nameCell, circle.x, circle.y);
graph.font = 'bold ' + Math.max(fontSize / 3 * 2, 10) + 'px sans-serif';
if(nameCell.length === 0) fontSize = 0;
graph.strokeText(Math.round(cellCurrent.mass), circle.x, circle.y+fontSize);
graph.fillText(Math.round(cellCurrent.mass), circle.x, circle.y+fontSize);
}
}
}
function valueInRange(min, max, value) {
return Math.min(max, Math.max(min, value));
}
function drawgrid() {
graph.lineWidth = 1;
graph.strokeStyle = global.lineColor;
graph.globalAlpha = 0.15;
graph.beginPath();
for (var x = global.xoffset - player.x; x < global.screenWidth; x += global.screenHeight / 18) {
graph.moveTo(x, 0);
graph.lineTo(x, global.screenHeight);
}
for (var y = global.yoffset - player.y ; y < global.screenHeight; y += global.screenHeight / 18) {
graph.moveTo(0, y);
graph.lineTo(global.screenWidth, y);
}
graph.stroke();
graph.globalAlpha = 1;
}
function drawborder() {
graph.lineWidth = 1;
graph.strokeStyle = playerConfig.borderColor;
// Left-vertical.
if (player.x <= global.screenWidth/2) {
graph.beginPath();
graph.moveTo(global.screenWidth/2 - player.x, 0 ? player.y > global.screenHeight/2 : global.screenHeight/2 - player.y);
graph.lineTo(global.screenWidth/2 - player.x, global.gameHeight + global.screenHeight/2 - player.y);
graph.strokeStyle = global.lineColor;
graph.stroke();
}
// Top-horizontal.
if (player.y <= global.screenHeight/2) {
graph.beginPath();
graph.moveTo(0 ? player.x > global.screenWidth/2 : global.screenWidth/2 - player.x, global.screenHeight/2 - player.y);
graph.lineTo(global.gameWidth + global.screenWidth/2 - player.x, global.screenHeight/2 - player.y);
graph.strokeStyle = global.lineColor;
graph.stroke();
}
// Right-vertical.
if (global.gameWidth - player.x <= global.screenWidth/2) {
graph.beginPath();
graph.moveTo(global.gameWidth + global.screenWidth/2 - player.x,
global.screenHeight/2 - player.y);
graph.lineTo(global.gameWidth + global.screenWidth/2 - player.x,
global.gameHeight + global.screenHeight/2 - player.y);
graph.strokeStyle = global.lineColor;
graph.stroke();
}
// Bottom-horizontal.
if (global.gameHeight - player.y <= global.screenHeight/2) {
graph.beginPath();
graph.moveTo(global.gameWidth + global.screenWidth/2 - player.x,
global.gameHeight + global.screenHeight/2 - player.y);
graph.lineTo(global.screenWidth/2 - player.x,
global.gameHeight + global.screenHeight/2 - player.y);
graph.strokeStyle = global.lineColor;
graph.stroke();
}
}
window.requestAnimFrame = (function() {
return window.requestAnimationFrame ||
window.webkitRequestAnimationFrame ||
window.mozRequestAnimationFrame ||
window.msRequestAnimationFrame ||
function( callback ) {
window.setTimeout(callback, 1000 / 60);
};
})();
window.cancelAnimFrame = (function(handle) {
return window.cancelAnimationFrame ||
window.mozCancelAnimationFrame;
})();
function animloop() {
global.animLoopHandle = window.requestAnimFrame(animloop);
gameLoop();
}
function gameLoop() {
if (global.died) {
graph.fillStyle = '#333333';
graph.fillRect(0, 0, global.screenWidth, global.screenHeight);
graph.textAlign = 'center';
graph.fillStyle = '#FFFFFF';
graph.font = 'bold 30px sans-serif';
graph.fillText('You died!', global.screenWidth / 2, global.screenHeight / 2);
}
else if (!global.disconnected) {
if (global.gameStart) {
graph.fillStyle = global.backgroundColor;
graph.fillRect(0, 0, global.screenWidth, global.screenHeight);
drawgrid();
foods.forEach(drawFood);
fireFood.forEach(drawFireFood);
viruses.forEach(drawVirus);
if (global.borderDraw) {
drawborder();
}
var orderMass = [];
for(var i=0; i<users.length; i++) {
for(var j=0; j<users[i].cells.length; j++) {
orderMass.push({
nCell: i,
nDiv: j,
mass: users[i].cells[j].mass
});
}
}
orderMass.sort(function(obj1, obj2) {
return obj1.mass - obj2.mass;
});
drawPlayers(orderMass);
socket.emit('0', window.canvas.target); // playerSendTarget "Heartbeat".
} else {
graph.fillStyle = '#333333';
graph.fillRect(0, 0, global.screenWidth, global.screenHeight);
graph.textAlign = 'center';
graph.fillStyle = '#FFFFFF';
graph.font = 'bold 30px sans-serif';
graph.fillText('Game Over!', global.screenWidth / 2, global.screenHeight / 2);
}
} else {
graph.fillStyle = '#333333';
graph.fillRect(0, 0, global.screenWidth, global.screenHeight);
graph.textAlign = 'center';
graph.fillStyle = '#FFFFFF';
graph.font = 'bold 30px sans-serif';
if (global.kicked) {
if (reason !== '') {
graph.fillText('You were kicked for:', global.screenWidth / 2, global.screenHeight / 2 - 20);
graph.fillText(reason, global.screenWidth / 2, global.screenHeight / 2 + 20);
}
else {
graph.fillText('You were kicked!', global.screenWidth / 2, global.screenHeight / 2);
}
}
else {
graph.fillText('Disconnected!', global.screenWidth / 2, global.screenHeight / 2);
}
}
}
window.addEventListener('resize', resize);
function resize() {
if (!socket) return;
player.screenWidth = c.width = global.screenWidth = global.playerType == 'player' ? window.innerWidth : global.gameWidth;
player.screenHeight = c.height = global.screenHeight = global.playerType == 'player' ? window.innerHeight : global.gameHeight;
if (global.playerType == 'spectate') {
player.x = global.gameWidth / 2;
player.y = global.gameHeight / 2;
}
socket.emit('windowResized', { screenWidth: global.screenWidth, screenHeight: global.screenHeight });
}
|
import unittest
import textwrap
import antlr3
import antlr3.tree
import testbase
import sys
class TestRewriteAST(testbase.ANTLRTest):
def parserClass(self, base):
class TParser(base):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._errors = []
self._output = ""
def capture(self, t):
self._output += t
def traceIn(self, ruleName, ruleIndex):
self.traces.append('>'+ruleName)
def traceOut(self, ruleName, ruleIndex):
self.traces.append('<'+ruleName)
def emitErrorMessage(self, msg):
self._errors.append(msg)
return TParser
def lexerClass(self, base):
class TLexer(base):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._output = ""
def capture(self, t):
self._output += t
def traceIn(self, ruleName, ruleIndex):
self.traces.append('>'+ruleName)
def traceOut(self, ruleName, ruleIndex):
self.traces.append('<'+ruleName)
def recover(self, input, re):
# no error recovery yet, just crash!
raise
return TLexer
def execParser(self, grammar, grammarEntry, input, expectErrors=False):
lexerCls, parserCls = self.compileInlineGrammar(grammar)
cStream = antlr3.StringStream(input)
lexer = lexerCls(cStream)
tStream = antlr3.CommonTokenStream(lexer)
parser = parserCls(tStream)
r = getattr(parser, grammarEntry)()
if not expectErrors:
self.assertEqual(len(parser._errors), 0, parser._errors)
result = ""
if r:
if hasattr(r, 'result'):
result += r.result
if r.tree:
result += r.tree.toStringTree()
if not expectErrors:
return result
else:
return result, parser._errors
def execTreeParser(self, grammar, grammarEntry, treeGrammar, treeEntry, input):
lexerCls, parserCls = self.compileInlineGrammar(grammar)
walkerCls = self.compileInlineGrammar(treeGrammar)
cStream = antlr3.StringStream(input)
lexer = lexerCls(cStream)
tStream = antlr3.CommonTokenStream(lexer)
parser = parserCls(tStream)
r = getattr(parser, grammarEntry)()
nodes = antlr3.tree.CommonTreeNodeStream(r.tree)
nodes.setTokenStream(tStream)
walker = walkerCls(nodes)
r = getattr(walker, treeEntry)()
if r:
return r.tree.toStringTree()
return ""
def testDelete(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID INT -> ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc 34")
self.assertEqual("", found)
def testSingleToken(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID -> ID;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc")
self.assertEqual("abc", found)
def testSingleTokenToNewNode(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID -> ID["x"];
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc")
self.assertEqual("x", found)
def testSingleTokenToNewNodeRoot(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID -> ^(ID["x"] INT);
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc")
self.assertEqual("(x INT)", found)
def testSingleTokenToNewNode2(self):
# Allow creation of new nodes w/o args.
grammar = textwrap.dedent(
r'''
grammar TT;
options {language=Python3;output=AST;}
a : ID -> ID[ ];
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc")
self.assertEqual("ID", found)
def testSingleCharLiteral(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : 'c' -> 'c';
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "c")
self.assertEqual("c", found)
def testSingleStringLiteral(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : 'ick' -> 'ick';
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "ick")
self.assertEqual("ick", found)
def testSingleRule(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : b -> b;
b : ID ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc")
self.assertEqual("abc", found)
def testReorderTokens(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID INT -> INT ID;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc 34")
self.assertEqual("34 abc", found)
def testReorderTokenAndRule(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : b INT -> INT b;
b : ID ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc 34")
self.assertEqual("34 abc", found)
def testTokenTree(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID INT -> ^(INT ID);
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc 34")
self.assertEqual("(34 abc)", found)
def testTokenTreeAfterOtherStuff(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : 'void' ID INT -> 'void' ^(INT ID);
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "void abc 34")
self.assertEqual("void (34 abc)", found)
def testNestedTokenTreeWithOuterLoop(self):
# verify that ID and INT both iterate over outer index variable
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {DUH;}
a : ID INT ID INT -> ^( DUH ID ^( DUH INT) )+ ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a 1 b 2")
self.assertEqual("(DUH a (DUH 1)) (DUH b (DUH 2))", found)
def testOptionalSingleToken(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID -> ID? ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc")
self.assertEqual("abc", found)
def testClosureSingleToken(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID ID -> ID* ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b")
self.assertEqual("a b", found)
def testPositiveClosureSingleToken(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID ID -> ID+ ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b")
self.assertEqual("a b", found)
def testOptionalSingleRule(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : b -> b?;
b : ID ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc")
self.assertEqual("abc", found)
def testClosureSingleRule(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : b b -> b*;
b : ID ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b")
self.assertEqual("a b", found)
def testClosureOfLabel(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : x+=b x+=b -> $x*;
b : ID ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b")
self.assertEqual("a b", found)
def testOptionalLabelNoListLabel(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : (x=ID)? -> $x?;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a")
self.assertEqual("a", found)
def testPositiveClosureSingleRule(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : b b -> b+;
b : ID ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b")
self.assertEqual("a b", found)
def testSinglePredicateT(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID -> {True}? ID -> ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc")
self.assertEqual("abc", found)
def testSinglePredicateF(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID -> {False}? ID -> ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc")
self.assertEqual("", found)
def testMultiplePredicate(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID INT -> {False}? ID
-> {True}? INT
->
;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a 2")
self.assertEqual("2", found)
def testMultiplePredicateTrees(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID INT -> {False}? ^(ID INT)
-> {True}? ^(INT ID)
-> ID
;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a 2")
self.assertEqual("(2 a)", found)
def testSimpleTree(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : op INT -> ^(op INT);
op : '+'|'-' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "-34")
self.assertEqual("(- 34)", found)
def testSimpleTree2(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : op INT -> ^(INT op);
op : '+'|'-' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "+ 34")
self.assertEqual("(34 +)", found)
def testNestedTrees(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : 'var' (ID ':' type ';')+ -> ^('var' ^(':' ID type)+) ;
type : 'int' | 'float' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "var a:int; b:float;")
self.assertEqual("(var (: a int) (: b float))", found)
def testImaginaryTokenCopy(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {VAR;}
a : ID (',' ID)*-> ^(VAR ID)+ ;
type : 'int' | 'float' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a,b,c")
self.assertEqual("(VAR a) (VAR b) (VAR c)", found)
def testTokenUnreferencedOnLeftButDefined(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {VAR;}
a : b -> ID ;
b : ID ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a")
self.assertEqual("ID", found)
def testImaginaryTokenCopySetText(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {VAR;}
a : ID (',' ID)*-> ^(VAR["var"] ID)+ ;
type : 'int' | 'float' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a,b,c")
self.assertEqual("(var a) (var b) (var c)", found)
def testImaginaryTokenNoCopyFromToken(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : lc='{' ID+ '}' -> ^(BLOCK[$lc] ID+) ;
type : 'int' | 'float' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "{a b c}")
self.assertEqual("({ a b c)", found)
def testImaginaryTokenNoCopyFromTokenSetText(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : lc='{' ID+ '}' -> ^(BLOCK[$lc,"block"] ID+) ;
type : 'int' | 'float' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "{a b c}")
self.assertEqual("(block a b c)", found)
def testMixedRewriteAndAutoAST(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : b b^ ; // 2nd b matches only an INT; can make it root
b : ID INT -> INT ID
| INT
;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a 1 2")
self.assertEqual("(2 1 a)", found)
def testSubruleWithRewrite(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : b b ;
b : (ID INT -> INT ID | INT INT -> INT+ )
;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a 1 2 3")
self.assertEqual("1 a 2 3", found)
def testSubruleWithRewrite2(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {TYPE;}
a : b b ;
b : 'int'
( ID -> ^(TYPE 'int' ID)
| ID '=' INT -> ^(TYPE 'int' ID INT)
)
';'
;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "int a; int b=3;")
self.assertEqual("(TYPE int a) (TYPE int b 3)", found)
def testNestedRewriteShutsOffAutoAST(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : b b ;
b : ID ( ID (last=ID -> $last)+ ) ';' // get last ID
| INT // should still get auto AST construction
;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b c d; 42")
self.assertEqual("d 42", found)
def testRewriteActions(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : atom -> ^({self.adaptor.create(INT,"9")} atom) ;
atom : INT ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "3")
self.assertEqual("(9 3)", found)
def testRewriteActions2(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : atom -> {self.adaptor.create(INT,"9")} atom ;
atom : INT ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "3")
self.assertEqual("9 3", found)
def testRefToOldValue(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : (atom -> atom) (op='+' r=atom -> ^($op $a $r) )* ;
atom : INT ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "3+4+5")
self.assertEqual("(+ (+ 3 4) 5)", found)
def testCopySemanticsForRules(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : atom -> ^(atom atom) ; // NOT CYCLE! (dup atom)
atom : INT ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "3")
self.assertEqual("(3 3)", found)
def testCopySemanticsForRules2(self):
# copy type as a root for each invocation of (...)+ in rewrite
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : type ID (',' ID)* ';' -> ^(type ID)+ ;
type : 'int' ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "int a,b,c;")
self.assertEqual("(int a) (int b) (int c)", found)
def testCopySemanticsForRules3(self):
# copy type *and* modifier even though it's optional
# for each invocation of (...)+ in rewrite
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : modifier? type ID (',' ID)* ';' -> ^(type modifier? ID)+ ;
type : 'int' ;
modifier : 'public' ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "public int a,b,c;")
self.assertEqual("(int public a) (int public b) (int public c)", found)
def testCopySemanticsForRules3Double(self):
# copy type *and* modifier even though it's optional
# for each invocation of (...)+ in rewrite
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : modifier? type ID (',' ID)* ';' -> ^(type modifier? ID)+ ^(type modifier? ID)+ ;
type : 'int' ;
modifier : 'public' ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "public int a,b,c;")
self.assertEqual("(int public a) (int public b) (int public c) (int public a) (int public b) (int public c)", found)
def testCopySemanticsForRules4(self):
# copy type *and* modifier even though it's optional
# for each invocation of (...)+ in rewrite
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {MOD;}
a : modifier? type ID (',' ID)* ';' -> ^(type ^(MOD modifier)? ID)+ ;
type : 'int' ;
modifier : 'public' ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "public int a,b,c;")
self.assertEqual("(int (MOD public) a) (int (MOD public) b) (int (MOD public) c)", found)
def testCopySemanticsLists(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {MOD;}
a : ID (',' ID)* ';' -> ID+ ID+ ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a,b,c;")
self.assertEqual("a b c a b c", found)
def testCopyRuleLabel(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : x=b -> $x $x;
b : ID ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a")
self.assertEqual("a a", found)
def testCopyRuleLabel2(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : x=b -> ^($x $x);
b : ID ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a")
self.assertEqual("(a a)", found)
def testQueueingOfTokens(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : 'int' ID (',' ID)* ';' -> ^('int' ID+) ;
op : '+'|'-' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "int a,b,c;")
self.assertEqual("(int a b c)", found)
def testCopyOfTokens(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : 'int' ID ';' -> 'int' ID 'int' ID ;
op : '+'|'-' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "int a;")
self.assertEqual("int a int a", found)
def testTokenCopyInLoop(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : 'int' ID (',' ID)* ';' -> ^('int' ID)+ ;
op : '+'|'-' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "int a,b,c;")
self.assertEqual("(int a) (int b) (int c)", found)
def testTokenCopyInLoopAgainstTwoOthers(self):
# must smear 'int' copies across as root of multiple trees
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : 'int' ID ':' INT (',' ID ':' INT)* ';' -> ^('int' ID INT)+ ;
op : '+'|'-' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "int a:1,b:2,c:3;")
self.assertEqual("(int a 1) (int b 2) (int c 3)", found)
def testListRefdOneAtATime(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID+ -> ID ID ID ; // works if 3 input IDs
op : '+'|'-' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b c")
self.assertEqual("a b c", found)
def testSplitListWithLabels(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {VAR;}
a : first=ID others+=ID* -> $first VAR $others+ ;
op : '+'|'-' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b c")
self.assertEqual("a VAR b c", found)
def testComplicatedMelange(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : A A b=B B b=B c+=C C c+=C D {s=$D.text} -> A+ B+ C+ D ;
type : 'int' | 'float' ;
A : 'a' ;
B : 'b' ;
C : 'c' ;
D : 'd' ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a a b b b c c c d")
self.assertEqual("a a b b b c c c d", found)
def testRuleLabel(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : x=b -> $x;
b : ID ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a")
self.assertEqual("a", found)
def testAmbiguousRule(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID a -> a | INT ;
ID : 'a'..'z'+ ;
INT: '0'..'9'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar,
"a", "abc 34")
self.assertEqual("34", found)
def testRuleListLabel(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : x+=b x+=b -> $x+;
b : ID ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b")
self.assertEqual("a b", found)
def testRuleListLabel2(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : x+=b x+=b -> $x $x*;
b : ID ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b")
self.assertEqual("a b", found)
def testOptional(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : x=b (y=b)? -> $x $y?;
b : ID ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a")
self.assertEqual("a", found)
def testOptional2(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : x=ID (y=b)? -> $x $y?;
b : ID ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b")
self.assertEqual("a b", found)
def testOptional3(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : x=ID (y=b)? -> ($x $y)?;
b : ID ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b")
self.assertEqual("a b", found)
def testOptional4(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : x+=ID (y=b)? -> ($x $y)?;
b : ID ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b")
self.assertEqual("a b", found)
def testOptional5(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : ID -> ID? ; // match an ID to optional ID
b : ID ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a")
self.assertEqual("a", found)
def testArbitraryExprType(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : x+=b x+=b -> {CommonTree(None)};
b : ID ;
ID : 'a'..'z'+ ;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "a b")
self.assertEqual("", found)
def testSet(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a: (INT|ID)+ -> INT+ ID+ ;
INT: '0'..'9'+;
ID : 'a'..'z'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "2 a 34 de")
self.assertEqual("2 34 a de", found)
def testSet2(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a: (INT|ID) -> INT? ID? ;
INT: '0'..'9'+;
ID : 'a'..'z'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "2")
self.assertEqual("2", found)
@testbase.broken("http://www.antlr.org:8888/browse/ANTLR-162",
antlr3.tree.RewriteEmptyStreamException)
def testSetWithLabel(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : x=(INT|ID) -> $x ;
INT: '0'..'9'+;
ID : 'a'..'z'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "2")
self.assertEqual("2", found)
def testRewriteAction(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens { FLOAT; }
r
: INT -> {CommonTree(CommonToken(type=FLOAT, text=$INT.text+".0"))}
;
INT : '0'..'9'+;
WS: (' ' | '\n' | '\t')+ {$channel = HIDDEN};
''')
found = self.execParser(grammar, "r", "25")
self.assertEqual("25.0", found)
def testOptionalSubruleWithoutRealElements(self):
# copy type *and* modifier even though it's optional
# for each invocation of (...)+ in rewrite
grammar = textwrap.dedent(
r"""
grammar T;
options {language=Python3;output=AST;}
tokens {PARMS;}
modulo
: 'modulo' ID ('(' parms+ ')')? -> ^('modulo' ID ^(PARMS parms+)?)
;
parms : '#'|ID;
ID : ('a'..'z' | 'A'..'Z')+;
WS : (' '|'\n') {$channel=HIDDEN} ;
""")
found = self.execParser(grammar, "modulo", "modulo abc (x y #)")
self.assertEqual("(modulo abc (PARMS x y #))", found)
## C A R D I N A L I T Y I S S U E S
def testCardinality(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
tokens {BLOCK;}
a : ID ID INT INT INT -> (ID INT)+;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
self.assertRaises(antlr3.tree.RewriteCardinalityException,
self.execParser, grammar, "a", "a b 3 4 5")
def testCardinality2(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID+ -> ID ID ID ; // only 2 input IDs
op : '+'|'-' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
self.assertRaises(antlr3.tree.RewriteCardinalityException,
self.execParser, grammar, "a", "a b")
def testCardinality3(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID? INT -> ID INT ;
op : '+'|'-' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
self.assertRaises(antlr3.tree.RewriteEmptyStreamException,
self.execParser, grammar, "a", "3")
def testLoopCardinality(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID? INT -> ID+ INT ;
op : '+'|'-' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
self.assertRaises(antlr3.tree.RewriteEarlyExitException,
self.execParser, grammar, "a", "3")
def testWildcard(self):
grammar = textwrap.dedent(
r'''
grammar T;
options {language=Python3;output=AST;}
a : ID c=. -> $c;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found = self.execParser(grammar, "a", "abc 34")
self.assertEqual("34", found)
# E R R O R S
def testExtraTokenInSimpleDecl(self):
grammar = textwrap.dedent(
r'''
grammar foo;
options {language=Python3;output=AST;}
tokens {EXPR;}
decl : type ID '=' INT ';' -> ^(EXPR type ID INT) ;
type : 'int' | 'float' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found, errors = self.execParser(grammar, "decl", "int 34 x=1;",
expectErrors=True)
self.assertEqual(["line 1:4 extraneous input '34' expecting ID"],
errors)
self.assertEqual("(EXPR int x 1)", found) # tree gets correct x and 1 tokens
#@testbase.broken("FIXME", AssertionError)
def testMissingIDInSimpleDecl(self):
grammar = textwrap.dedent(
r'''
grammar foo;
options {language=Python3;output=AST;}
tokens {EXPR;}
decl : type ID '=' INT ';' -> ^(EXPR type ID INT) ;
type : 'int' | 'float' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found, errors = self.execParser(grammar, "decl", "int =1;",
expectErrors=True)
self.assertEqual(["line 1:4 missing ID at '='"], errors)
self.assertEqual("(EXPR int <missing ID> 1)", found) # tree gets invented ID token
def testMissingSetInSimpleDecl(self):
grammar = textwrap.dedent(
r'''
grammar foo;
options {language=Python3;output=AST;}
tokens {EXPR;}
decl : type ID '=' INT ';' -> ^(EXPR type ID INT) ;
type : 'int' | 'float' ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found, errors = self.execParser(grammar, "decl", "x=1;",
expectErrors=True)
self.assertEqual(["line 1:0 mismatched input 'x' expecting set None"],
errors);
self.assertEqual("(EXPR <error: x> x 1)", found) # tree gets invented ID token
def testMissingTokenGivesErrorNode(self):
grammar = textwrap.dedent(
r'''
grammar foo;
options {language=Python3;output=AST;}
a : ID INT -> ID INT ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found, errors = self.execParser(grammar, "a", "abc",
expectErrors=True)
self.assertEqual(["line 1:3 missing INT at '<EOF>'"], errors)
# doesn't do in-line recovery for sets (yet?)
self.assertEqual("abc <missing INT>", found)
def testExtraTokenGivesErrorNode(self):
grammar = textwrap.dedent(
r'''
grammar foo;
options {language=Python3;output=AST;}
a : b c -> b c;
b : ID -> ID ;
c : INT -> INT ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found, errors = self.execParser(grammar, "a", "abc ick 34",
expectErrors=True)
self.assertEqual(["line 1:4 extraneous input 'ick' expecting INT"],
errors)
self.assertEqual("abc 34", found)
#@testbase.broken("FIXME", AssertionError)
def testMissingFirstTokenGivesErrorNode(self):
grammar = textwrap.dedent(
r'''
grammar foo;
options {language=Python3;output=AST;}
a : ID INT -> ID INT ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found, errors = self.execParser(grammar, "a", "34", expectErrors=True)
self.assertEqual(["line 1:0 missing ID at '34'"], errors)
self.assertEqual("<missing ID> 34", found)
#@testbase.broken("FIXME", AssertionError)
def testMissingFirstTokenGivesErrorNode2(self):
grammar = textwrap.dedent(
r'''
grammar foo;
options {language=Python3;output=AST;}
a : b c -> b c;
b : ID -> ID ;
c : INT -> INT ;
ID : 'a'..'z'+ ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found, errors = self.execParser(grammar, "a", "34", expectErrors=True)
# finds an error at the first token, 34, and re-syncs.
# re-synchronizing does not consume a token because 34 follows
# ref to rule b (start of c). It then matches 34 in c.
self.assertEqual(["line 1:0 missing ID at '34'"], errors)
self.assertEqual("<missing ID> 34", found)
def testNoViableAltGivesErrorNode(self):
grammar = textwrap.dedent(
r'''
grammar foo;
options {language=Python3;output=AST;}
a : b -> b | c -> c;
b : ID -> ID ;
c : INT -> INT ;
ID : 'a'..'z'+ ;
S : '*' ;
INT : '0'..'9'+;
WS : (' '|'\n') {$channel=HIDDEN} ;
''')
found, errors = self.execParser(grammar, "a", "*", expectErrors=True)
# finds an error at the first token, 34, and re-syncs.
# re-synchronizing does not consume a token because 34 follows
# ref to rule b (start of c). It then matches 34 in c.
self.assertEqual(["line 1:0 no viable alternative at input '*'"],
errors);
self.assertEqual("<unexpected: [@0,0:0='*',<S>,1:0], resync=*>",
found)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: lambda_alias
version_added: 1.0.0
short_description: Creates, updates or deletes AWS Lambda function aliases
description:
- This module allows the management of AWS Lambda functions aliases via the Ansible
framework. It is idempotent and supports "Check" mode. Use module M(community.aws.lambda) to manage the lambda function
itself and M(community.aws.lambda_event) to manage event source mappings.
author: Pierre Jodouin (@pjodouin), Ryan Scott Brown (@ryansb)
options:
function_name:
description:
- The name of the function alias.
required: true
type: str
state:
description:
- Describes the desired state.
default: "present"
choices: ["present", "absent"]
type: str
name:
description:
- Name of the function alias.
required: true
aliases: ['alias_name']
type: str
description:
description:
- A short, user-defined function alias description.
type: str
function_version:
description:
- Version associated with the Lambda function alias.
A value of 0 (or omitted parameter) sets the alias to the $LATEST version.
aliases: ['version']
type: int
requirements:
- boto3
extends_documentation_fragment:
- amazon.aws.aws
- amazon.aws.ec2
'''
EXAMPLES = '''
---
# Simple example to create a lambda function and publish a version
- hosts: localhost
gather_facts: no
vars:
state: present
project_folder: /path/to/deployment/package
deployment_package: lambda.zip
account: 123456789012
production_version: 5
tasks:
- name: AWS Lambda Function
lambda:
state: "{{ state | default('present') }}"
name: myLambdaFunction
publish: True
description: lambda function description
code_s3_bucket: package-bucket
code_s3_key: "lambda/{{ deployment_package }}"
local_path: "{{ project_folder }}/{{ deployment_package }}"
runtime: python2.7
timeout: 5
handler: lambda.handler
memory_size: 128
role: "arn:aws:iam::{{ account }}:role/API2LambdaExecRole"
- name: Get information
lambda_info:
name: myLambdaFunction
register: lambda_info
- name: show results
ansible.builtin.debug:
msg: "{{ lambda_info['lambda_facts'] }}"
# The following will set the Dev alias to the latest version ($LATEST) since version is omitted (or = 0)
- name: "alias 'Dev' for function {{ lambda_info.lambda_facts.FunctionName }} "
community.aws.lambda_alias:
state: "{{ state | default('present') }}"
function_name: "{{ lambda_info.lambda_facts.FunctionName }}"
name: Dev
description: Development is $LATEST version
# The QA alias will only be created when a new version is published (i.e. not = '$LATEST')
- name: "alias 'QA' for function {{ lambda_info.lambda_facts.FunctionName }} "
community.aws.lambda_alias:
state: "{{ state | default('present') }}"
function_name: "{{ lambda_info.lambda_facts.FunctionName }}"
name: QA
version: "{{ lambda_info.lambda_facts.Version }}"
description: "QA is version {{ lambda_info.lambda_facts.Version }}"
when: lambda_info.lambda_facts.Version != "$LATEST"
# The Prod alias will have a fixed version based on a variable
- name: "alias 'Prod' for function {{ lambda_info.lambda_facts.FunctionName }} "
community.aws.lambda_alias:
state: "{{ state | default('present') }}"
function_name: "{{ lambda_info.lambda_facts.FunctionName }}"
name: Prod
version: "{{ production_version }}"
description: "Production is version {{ production_version }}"
'''
RETURN = '''
---
alias_arn:
description: Full ARN of the function, including the alias
returned: success
type: str
sample: arn:aws:lambda:us-west-2:123456789012:function:myFunction:dev
description:
description: A short description of the alias
returned: success
type: str
sample: The development stage for my hot new app
function_version:
description: The qualifier that the alias refers to
returned: success
type: str
sample: $LATEST
name:
description: The name of the alias assigned
returned: success
type: str
sample: dev
'''
import re
try:
import boto3
from botocore.exceptions import ClientError, ParamValidationError, MissingParametersError
except ImportError:
pass # Handled by AnsibleAWSModule
from ansible_collections.amazon.aws.plugins.module_utils.core import AnsibleAWSModule
from ansible_collections.amazon.aws.plugins.module_utils.ec2 import boto3_conn
from ansible_collections.amazon.aws.plugins.module_utils.ec2 import camel_dict_to_snake_dict
from ansible_collections.amazon.aws.plugins.module_utils.ec2 import get_aws_connection_info
class AWSConnection:
"""
Create the connection object and client objects as required.
"""
def __init__(self, ansible_obj, resources, boto3_=True):
try:
self.region, self.endpoint, aws_connect_kwargs = get_aws_connection_info(ansible_obj, boto3=boto3_)
self.resource_client = dict()
if not resources:
resources = ['lambda']
resources.append('iam')
for resource in resources:
aws_connect_kwargs.update(dict(region=self.region,
endpoint=self.endpoint,
conn_type='client',
resource=resource
))
self.resource_client[resource] = boto3_conn(ansible_obj, **aws_connect_kwargs)
# if region is not provided, then get default profile/session region
if not self.region:
self.region = self.resource_client['lambda'].meta.region_name
except (ClientError, ParamValidationError, MissingParametersError) as e:
ansible_obj.fail_json(msg="Unable to connect, authorize or access resource: {0}".format(e))
try:
self.account_id = self.resource_client['iam'].get_user()['User']['Arn'].split(':')[4]
except (ClientError, ValueError, KeyError, IndexError):
self.account_id = ''
def client(self, resource='lambda'):
return self.resource_client[resource]
def pc(key):
"""
Changes python key into Pascale case equivalent. For example, 'this_function_name' becomes 'ThisFunctionName'.
:param key:
:return:
"""
return "".join([token.capitalize() for token in key.split('_')])
def set_api_params(module, module_params):
"""
Sets module parameters to those expected by the boto3 API.
:param module:
:param module_params:
:return:
"""
api_params = dict()
for param in module_params:
module_param = module.params.get(param, None)
if module_param:
api_params[pc(param)] = module_param
return api_params
def validate_params(module, aws):
"""
Performs basic parameter validation.
:param module: Ansible module reference
:param aws: AWS client connection
:return:
"""
function_name = module.params['function_name']
# validate function name
if not re.search(r'^[\w\-:]+$', function_name):
module.fail_json(
msg='Function name {0} is invalid. Names must contain only alphanumeric characters and hyphens.'.format(function_name)
)
if len(function_name) > 64:
module.fail_json(msg='Function name "{0}" exceeds 64 character limit'.format(function_name))
# if parameter 'function_version' is zero, set it to $LATEST, else convert it to a string
if module.params['function_version'] == 0:
module.params['function_version'] = '$LATEST'
else:
module.params['function_version'] = str(module.params['function_version'])
return
def get_lambda_alias(module, aws):
"""
Returns the lambda function alias if it exists.
:param module: Ansible module reference
:param aws: AWS client connection
:return:
"""
client = aws.client('lambda')
# set API parameters
api_params = set_api_params(module, ('function_name', 'name'))
# check if alias exists and get facts
try:
results = client.get_alias(**api_params)
except (ClientError, ParamValidationError, MissingParametersError) as e:
if e.response['Error']['Code'] == 'ResourceNotFoundException':
results = None
else:
module.fail_json(msg='Error retrieving function alias: {0}'.format(e))
return results
def lambda_alias(module, aws):
"""
Adds, updates or deletes lambda function aliases.
:param module: Ansible module reference
:param aws: AWS client connection
:return dict:
"""
client = aws.client('lambda')
results = dict()
changed = False
current_state = 'absent'
state = module.params['state']
facts = get_lambda_alias(module, aws)
if facts:
current_state = 'present'
if state == 'present':
if current_state == 'present':
# check if alias has changed -- only version and description can change
alias_params = ('function_version', 'description')
for param in alias_params:
if module.params.get(param) != facts.get(pc(param)):
changed = True
break
if changed:
api_params = set_api_params(module, ('function_name', 'name'))
api_params.update(set_api_params(module, alias_params))
if not module.check_mode:
try:
results = client.update_alias(**api_params)
except (ClientError, ParamValidationError, MissingParametersError) as e:
module.fail_json(msg='Error updating function alias: {0}'.format(e))
else:
# create new function alias
api_params = set_api_params(module, ('function_name', 'name', 'function_version', 'description'))
try:
if not module.check_mode:
results = client.create_alias(**api_params)
changed = True
except (ClientError, ParamValidationError, MissingParametersError) as e:
module.fail_json(msg='Error creating function alias: {0}'.format(e))
else: # state = 'absent'
if current_state == 'present':
# delete the function
api_params = set_api_params(module, ('function_name', 'name'))
try:
if not module.check_mode:
results = client.delete_alias(**api_params)
changed = True
except (ClientError, ParamValidationError, MissingParametersError) as e:
module.fail_json(msg='Error deleting function alias: {0}'.format(e))
return dict(changed=changed, **dict(results or facts))
def main():
"""
Main entry point.
:return dict: ansible facts
"""
argument_spec = dict(
state=dict(required=False, default='present', choices=['present', 'absent']),
function_name=dict(required=True),
name=dict(required=True, aliases=['alias_name']),
function_version=dict(type='int', required=False, default=0, aliases=['version']),
description=dict(required=False, default=None),
)
module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[],
required_together=[],
)
aws = AWSConnection(module, ['lambda'])
validate_params(module, aws)
results = lambda_alias(module, aws)
module.exit_json(**camel_dict_to_snake_dict(results))
if __name__ == '__main__':
main()
|
(window["webpackJsonp"] = window["webpackJsonp"] || []).push([[85],{
/***/ "./node_modules/@ionic/core/dist/esm-es5/ion-toast-md.entry.js":
/*!*********************************************************************!*\
!*** ./node_modules/@ionic/core/dist/esm-es5/ion-toast-md.entry.js ***!
\*********************************************************************/
/*! exports provided: ion_toast */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
__webpack_require__.r(__webpack_exports__);
/* harmony export (binding) */ __webpack_require__.d(__webpack_exports__, "ion_toast", function() { return Toast; });
/* harmony import */ var tslib__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(/*! tslib */ "./node_modules/tslib/tslib.es6.js");
/* harmony import */ var _core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(/*! ./core-13ed1ad7.js */ "./node_modules/@ionic/core/dist/esm-es5/core-13ed1ad7.js");
/* harmony import */ var _config_bb99b659_js__WEBPACK_IMPORTED_MODULE_2__ = __webpack_require__(/*! ./config-bb99b659.js */ "./node_modules/@ionic/core/dist/esm-es5/config-bb99b659.js");
/* harmony import */ var _animation_5559213c_js__WEBPACK_IMPORTED_MODULE_3__ = __webpack_require__(/*! ./animation-5559213c.js */ "./node_modules/@ionic/core/dist/esm-es5/animation-5559213c.js");
/* harmony import */ var _overlays_10cf2041_js__WEBPACK_IMPORTED_MODULE_4__ = __webpack_require__(/*! ./overlays-10cf2041.js */ "./node_modules/@ionic/core/dist/esm-es5/overlays-10cf2041.js");
/* harmony import */ var _theme_353a032e_js__WEBPACK_IMPORTED_MODULE_5__ = __webpack_require__(/*! ./theme-353a032e.js */ "./node_modules/@ionic/core/dist/esm-es5/theme-353a032e.js");
/* harmony import */ var _index_cae2ca23_js__WEBPACK_IMPORTED_MODULE_6__ = __webpack_require__(/*! ./index-cae2ca23.js */ "./node_modules/@ionic/core/dist/esm-es5/index-cae2ca23.js");
/**
* iOS Toast Enter Animation
*/
var iosEnterAnimation = function (baseEl, position) {
var baseAnimation = Object(_animation_5559213c_js__WEBPACK_IMPORTED_MODULE_3__["c"])();
var wrapperAnimation = Object(_animation_5559213c_js__WEBPACK_IMPORTED_MODULE_3__["c"])();
var hostEl = baseEl.host || baseEl;
var wrapperEl = baseEl.querySelector('.toast-wrapper');
var bottom = "calc(-10px - var(--ion-safe-area-bottom, 0px))";
var top = "calc(10px + var(--ion-safe-area-top, 0px))";
wrapperAnimation.addElement(wrapperEl);
switch (position) {
case 'top':
wrapperAnimation.fromTo('transform', 'translateY(-100%)', "translateY(" + top + ")");
break;
case 'middle':
var topPosition = Math.floor(hostEl.clientHeight / 2 - wrapperEl.clientHeight / 2);
wrapperEl.style.top = topPosition + "px";
wrapperAnimation.fromTo('opacity', 0.01, 1);
break;
default:
wrapperAnimation.fromTo('transform', 'translateY(100%)', "translateY(" + bottom + ")");
break;
}
return baseAnimation
.addElement(hostEl)
.easing('cubic-bezier(.155,1.105,.295,1.12)')
.duration(400)
.addAnimation(wrapperAnimation);
};
/**
* iOS Toast Leave Animation
*/
var iosLeaveAnimation = function (baseEl, position) {
var baseAnimation = Object(_animation_5559213c_js__WEBPACK_IMPORTED_MODULE_3__["c"])();
var wrapperAnimation = Object(_animation_5559213c_js__WEBPACK_IMPORTED_MODULE_3__["c"])();
var hostEl = baseEl.host || baseEl;
var wrapperEl = baseEl.querySelector('.toast-wrapper');
var bottom = "calc(-10px - var(--ion-safe-area-bottom, 0px))";
var top = "calc(10px + var(--ion-safe-area-top, 0px))";
wrapperAnimation.addElement(wrapperEl);
switch (position) {
case 'top':
wrapperAnimation.fromTo('transform', "translateY(" + top + ")", 'translateY(-100%)');
break;
case 'middle':
wrapperAnimation.fromTo('opacity', 0.99, 0);
break;
default:
wrapperAnimation.fromTo('transform', "translateY(" + bottom + ")", 'translateY(100%)');
break;
}
return baseAnimation
.addElement(hostEl)
.easing('cubic-bezier(.36,.66,.04,1)')
.duration(300)
.addAnimation(wrapperAnimation);
};
/**
* MD Toast Enter Animation
*/
var mdEnterAnimation = function (baseEl, position) {
var baseAnimation = Object(_animation_5559213c_js__WEBPACK_IMPORTED_MODULE_3__["c"])();
var wrapperAnimation = Object(_animation_5559213c_js__WEBPACK_IMPORTED_MODULE_3__["c"])();
var hostEl = baseEl.host || baseEl;
var wrapperEl = baseEl.querySelector('.toast-wrapper');
var bottom = "calc(8px + var(--ion-safe-area-bottom, 0px))";
var top = "calc(8px + var(--ion-safe-area-top, 0px))";
wrapperAnimation.addElement(wrapperEl);
switch (position) {
case 'top':
wrapperEl.style.top = top;
wrapperAnimation.fromTo('opacity', 0.01, 1);
break;
case 'middle':
var topPosition = Math.floor(hostEl.clientHeight / 2 - wrapperEl.clientHeight / 2);
wrapperEl.style.top = topPosition + "px";
wrapperAnimation.fromTo('opacity', 0.01, 1);
break;
default:
wrapperEl.style.bottom = bottom;
wrapperAnimation.fromTo('opacity', 0.01, 1);
break;
}
return baseAnimation
.addElement(hostEl)
.easing('cubic-bezier(.36,.66,.04,1)')
.duration(400)
.addAnimation(wrapperAnimation);
};
/**
* md Toast Leave Animation
*/
var mdLeaveAnimation = function (baseEl) {
var baseAnimation = Object(_animation_5559213c_js__WEBPACK_IMPORTED_MODULE_3__["c"])();
var wrapperAnimation = Object(_animation_5559213c_js__WEBPACK_IMPORTED_MODULE_3__["c"])();
var hostEl = baseEl.host || baseEl;
var wrapperEl = baseEl.querySelector('.toast-wrapper');
wrapperAnimation
.addElement(wrapperEl)
.fromTo('opacity', 0.99, 0);
return baseAnimation
.addElement(hostEl)
.easing('cubic-bezier(.36,.66,.04,1)')
.duration(300)
.addAnimation(wrapperAnimation);
};
var Toast = /** @class */ (function () {
function class_1(hostRef) {
Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["r"])(this, hostRef);
this.presented = false;
this.mode = Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["d"])(this);
/**
* How many milliseconds to wait before hiding the toast. By default, it will show
* until `dismiss()` is called.
*/
this.duration = 0;
/**
* If `true`, the keyboard will be automatically dismissed when the overlay is presented.
*/
this.keyboardClose = false;
/**
* The position of the toast on the screen.
*/
this.position = 'bottom';
/**
* @deprecated Use `buttons` instead. If `true`, the close button will be displayed.
*/
this.showCloseButton = false;
/**
* If `true`, the toast will be translucent.
* Only applies when the mode is `"ios"` and the device supports
* [`backdrop-filter`](https://developer.mozilla.org/en-US/docs/Web/CSS/backdrop-filter#Browser_compatibility).
*/
this.translucent = false;
/**
* If `true`, the toast will animate.
*/
this.animated = true;
this.didPresent = Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["c"])(this, "ionToastDidPresent", 7);
this.willPresent = Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["c"])(this, "ionToastWillPresent", 7);
this.willDismiss = Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["c"])(this, "ionToastWillDismiss", 7);
this.didDismiss = Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["c"])(this, "ionToastDidDismiss", 7);
}
/**
* Present the toast overlay after it has been created.
*/
class_1.prototype.present = function () {
return tslib__WEBPACK_IMPORTED_MODULE_0__["__awaiter"](this, void 0, void 0, function () {
var _this = this;
return tslib__WEBPACK_IMPORTED_MODULE_0__["__generator"](this, function (_a) {
switch (_a.label) {
case 0: return [4 /*yield*/, Object(_overlays_10cf2041_js__WEBPACK_IMPORTED_MODULE_4__["d"])(this, 'toastEnter', iosEnterAnimation, mdEnterAnimation, this.position)];
case 1:
_a.sent();
if (this.duration > 0) {
this.durationTimeout = setTimeout(function () { return _this.dismiss(undefined, 'timeout'); }, this.duration);
}
return [2 /*return*/];
}
});
});
};
/**
* Dismiss the toast overlay after it has been presented.
*
* @param data Any data to emit in the dismiss events.
* @param role The role of the element that is dismissing the toast.
* This can be useful in a button handler for determining which button was
* clicked to dismiss the toast.
* Some examples include: ``"cancel"`, `"destructive"`, "selected"`, and `"backdrop"`.
*/
class_1.prototype.dismiss = function (data, role) {
if (this.durationTimeout) {
clearTimeout(this.durationTimeout);
}
return Object(_overlays_10cf2041_js__WEBPACK_IMPORTED_MODULE_4__["e"])(this, data, role, 'toastLeave', iosLeaveAnimation, mdLeaveAnimation, this.position);
};
/**
* Returns a promise that resolves when the toast did dismiss.
*/
class_1.prototype.onDidDismiss = function () {
return Object(_overlays_10cf2041_js__WEBPACK_IMPORTED_MODULE_4__["f"])(this.el, 'ionToastDidDismiss');
};
/**
* Returns a promise that resolves when the toast will dismiss.
*/
class_1.prototype.onWillDismiss = function () {
return Object(_overlays_10cf2041_js__WEBPACK_IMPORTED_MODULE_4__["f"])(this.el, 'ionToastWillDismiss');
};
class_1.prototype.getButtons = function () {
var _this = this;
var buttons = this.buttons
? this.buttons.map(function (b) {
return (typeof b === 'string')
? { text: b }
: b;
})
: [];
// tslint:disable-next-line: deprecation
if (this.showCloseButton) {
buttons.push({
// tslint:disable-next-line: deprecation
text: this.closeButtonText || 'Close',
handler: function () { return _this.dismiss(undefined, 'cancel'); }
});
}
return buttons;
};
class_1.prototype.buttonClick = function (button) {
return tslib__WEBPACK_IMPORTED_MODULE_0__["__awaiter"](this, void 0, void 0, function () {
var role, shouldDismiss;
return tslib__WEBPACK_IMPORTED_MODULE_0__["__generator"](this, function (_a) {
switch (_a.label) {
case 0:
role = button.role;
if (Object(_overlays_10cf2041_js__WEBPACK_IMPORTED_MODULE_4__["i"])(role)) {
return [2 /*return*/, this.dismiss(undefined, role)];
}
return [4 /*yield*/, this.callButtonHandler(button)];
case 1:
shouldDismiss = _a.sent();
if (shouldDismiss) {
return [2 /*return*/, this.dismiss(undefined, button.role)];
}
return [2 /*return*/, Promise.resolve()];
}
});
});
};
class_1.prototype.callButtonHandler = function (button) {
return tslib__WEBPACK_IMPORTED_MODULE_0__["__awaiter"](this, void 0, void 0, function () {
var rtn, e_1;
return tslib__WEBPACK_IMPORTED_MODULE_0__["__generator"](this, function (_a) {
switch (_a.label) {
case 0:
if (!(button && button.handler)) return [3 /*break*/, 4];
_a.label = 1;
case 1:
_a.trys.push([1, 3, , 4]);
return [4 /*yield*/, Object(_overlays_10cf2041_js__WEBPACK_IMPORTED_MODULE_4__["s"])(button.handler)];
case 2:
rtn = _a.sent();
if (rtn === false) {
// if the return value of the handler is false then do not dismiss
return [2 /*return*/, false];
}
return [3 /*break*/, 4];
case 3:
e_1 = _a.sent();
console.error(e_1);
return [3 /*break*/, 4];
case 4: return [2 /*return*/, true];
}
});
});
};
class_1.prototype.renderButtons = function (buttons, side) {
var _a;
var _this = this;
if (buttons.length === 0) {
return;
}
var mode = Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["d"])(this);
var buttonGroupsClasses = (_a = {
'toast-button-group': true
},
_a["toast-button-group-" + side] = true,
_a);
return (Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["h"])("div", { class: buttonGroupsClasses }, buttons.map(function (b) { return Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["h"])("button", { type: "button", class: buttonClass(b), tabIndex: 0, onClick: function () { return _this.buttonClick(b); } }, Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["h"])("div", { class: "toast-button-inner" }, b.icon &&
Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["h"])("ion-icon", { icon: b.icon, slot: b.text === undefined ? 'icon-only' : undefined, class: "toast-icon" }), b.text), mode === 'md' && Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["h"])("ion-ripple-effect", { type: b.icon !== undefined && b.text === undefined ? 'unbounded' : 'bounded' })); })));
};
class_1.prototype.render = function () {
var _a, _b;
var allButtons = this.getButtons();
var startButtons = allButtons.filter(function (b) { return b.side === 'start'; });
var endButtons = allButtons.filter(function (b) { return b.side !== 'start'; });
var mode = Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["d"])(this);
var wrapperClass = (_a = {
'toast-wrapper': true
},
_a["toast-" + this.position] = true,
_a);
return (Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["h"])(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["H"], { style: {
zIndex: "" + (60000 + this.overlayIndex),
}, class: Object.assign((_b = {}, _b[mode] = true, _b), Object(_theme_353a032e_js__WEBPACK_IMPORTED_MODULE_5__["c"])(this.color), Object(_theme_353a032e_js__WEBPACK_IMPORTED_MODULE_5__["g"])(this.cssClass), { 'toast-translucent': this.translucent }) }, Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["h"])("div", { class: wrapperClass }, Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["h"])("div", { class: "toast-container" }, this.renderButtons(startButtons, 'start'), Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["h"])("div", { class: "toast-content" }, this.header !== undefined &&
Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["h"])("div", { class: "toast-header" }, this.header), this.message !== undefined &&
Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["h"])("div", { class: "toast-message", innerHTML: Object(_index_cae2ca23_js__WEBPACK_IMPORTED_MODULE_6__["s"])(this.message) })), this.renderButtons(endButtons, 'end')))));
};
Object.defineProperty(class_1.prototype, "el", {
get: function () { return Object(_core_13ed1ad7_js__WEBPACK_IMPORTED_MODULE_1__["e"])(this); },
enumerable: true,
configurable: true
});
Object.defineProperty(class_1, "style", {
get: function () { return ":host{--border-width:0;--border-style:none;--border-color:initial;--box-shadow:none;--min-width:auto;--width:auto;--min-height:auto;--height:auto;--max-height:auto;left:0;top:0;display:block;position:absolute;width:100%;height:100%;color:var(--color);font-family:var(--ion-font-family,inherit);contain:strict;z-index:1000;pointer-events:none}:host-context([dir=rtl]){left:unset;right:unset;right:0}:host(.overlay-hidden){display:none}:host(.ion-color){--button-color:inherit;color:var(--ion-color-contrast)}:host(.ion-color) .toast-wrapper{background:var(--ion-color-base)}.toast-wrapper{border-radius:var(--border-radius);left:var(--start);right:var(--end);width:var(--width);min-width:var(--min-width);max-width:var(--max-width);height:var(--height);min-height:var(--min-height);max-height:var(--max-height);border-width:var(--border-width);border-style:var(--border-style);border-color:var(--border-color);background:var(--background);-webkit-box-shadow:var(--box-shadow);box-shadow:var(--box-shadow)}:host-context([dir=rtl]) .toast-wrapper,[dir=rtl] .toast-wrapper{left:unset;right:unset;left:var(--end);right:var(--start)}.toast-container{-ms-flex-align:center;align-items:center;pointer-events:auto;contain:content}.toast-container,.toast-content{display:-ms-flexbox;display:flex}.toast-content{-ms-flex:1;flex:1;-ms-flex-direction:column;flex-direction:column;-ms-flex-pack:center;justify-content:center}.toast-message{-ms-flex:1;flex:1;white-space:pre-wrap}.toast-button-group{display:-ms-flexbox;display:flex}.toast-button{border:0;outline:none;color:var(--button-color);z-index:0}.toast-icon{font-size:1.4em}.toast-button-inner{display:-ms-flexbox;display:flex;-ms-flex-align:center;align-items:center}\@media (any-hover:hover){.toast-button:hover{cursor:pointer}}:host{--background:var(--ion-color-step-800,#333);--border-radius:4px;--box-shadow:0 3px 5px -1px rgba(0,0,0,0.2),0 6px 10px 0 rgba(0,0,0,0.14),0 1px 18px 0 rgba(0,0,0,0.12);--button-color:var(--ion-color-primary,#3880ff);--color:var(--ion-color-step-50,#f2f2f2);--max-width:700px;--start:8px;--end:8px;font-size:14px}.toast-wrapper{margin-left:auto;margin-right:auto;margin-top:auto;margin-bottom:auto;display:block;position:absolute;opacity:.01;z-index:10}\@supports ((-webkit-margin-start:0) or (margin-inline-start:0)) or (-webkit-margin-start:0){.toast-wrapper{margin-left:unset;margin-right:unset;-webkit-margin-start:auto;margin-inline-start:auto;-webkit-margin-end:auto;margin-inline-end:auto}}.toast-content{padding-left:16px;padding-right:16px;padding-top:14px;padding-bottom:14px}\@supports ((-webkit-margin-start:0) or (margin-inline-start:0)) or (-webkit-margin-start:0){.toast-content{padding-left:unset;padding-right:unset;-webkit-padding-start:16px;padding-inline-start:16px;-webkit-padding-end:16px;padding-inline-end:16px}}.toast-header{margin-bottom:2px;font-weight:500}.toast-header,.toast-message{line-height:20px}.toast-button-group-start{margin-left:8px}\@supports ((-webkit-margin-start:0) or (margin-inline-start:0)) or (-webkit-margin-start:0){.toast-button-group-start{margin-left:unset;-webkit-margin-start:8px;margin-inline-start:8px}}.toast-button-group-end{margin-right:8px}\@supports ((-webkit-margin-start:0) or (margin-inline-start:0)) or (-webkit-margin-start:0){.toast-button-group-end{margin-right:unset;-webkit-margin-end:8px;margin-inline-end:8px}}.toast-button{padding-left:15px;padding-right:15px;padding-top:10px;padding-bottom:10px;position:relative;background-color:transparent;font-family:var(--ion-font-family);font-size:14px;font-weight:500;letter-spacing:.84px;text-transform:uppercase;overflow:hidden}\@supports ((-webkit-margin-start:0) or (margin-inline-start:0)) or (-webkit-margin-start:0){.toast-button{padding-left:unset;padding-right:unset;-webkit-padding-start:15px;padding-inline-start:15px;-webkit-padding-end:15px;padding-inline-end:15px}}.toast-button-cancel{color:var(--ion-color-step-100,#e6e6e6)}.toast-button-icon-only{border-radius:50%;padding-left:9px;padding-right:9px;padding-top:9px;padding-bottom:9px;width:36px;height:36px}\@supports ((-webkit-margin-start:0) or (margin-inline-start:0)) or (-webkit-margin-start:0){.toast-button-icon-only{padding-left:unset;padding-right:unset;-webkit-padding-start:9px;padding-inline-start:9px;-webkit-padding-end:9px;padding-inline-end:9px}}\@media (any-hover:hover){.toast-button:hover{background-color:rgba(var(--ion-color-primary-rgb,56,128,255),.08)}.toast-button-cancel:hover{background-color:rgba(var(--ion-background-color-rgb,255,255,255),.08)}}"; },
enumerable: true,
configurable: true
});
return class_1;
}());
var buttonClass = function (button) {
var _a;
return Object.assign((_a = { 'toast-button': true, 'toast-button-icon-only': button.icon !== undefined && button.text === undefined }, _a["toast-button-" + button.role] = button.role !== undefined, _a['ion-focusable'] = true, _a['ion-activatable'] = true, _a), Object(_theme_353a032e_js__WEBPACK_IMPORTED_MODULE_5__["g"])(button.cssClass));
};
/***/ })
}]);
//# sourceMappingURL=85.js.map
|
var wins = 0;
var losses = 0;
var goal = "";
var score = 0;
var crystals = [];
$(document).ready(function() {
//This code was copied from the internet however I will comment my understanding of the function
$('<div id="overlay">').css({//creating overlay div
"width" : "100%"
, "height" : "100%" //both lines are to cover entire window
, "background" : "#000" //set black background
, "position" : "fixed" //static imposition
, "top" : "0"
, "left" : "0" //rest any inherent margins from the browser
, "zIndex" : "50" //bring div to forefront of page
, "MsFilter" : "progid:DXImageTransform.Microsoft.Alpha(Opacity=60)"
, "filter" : "alpha(opacity=60)"
, "MozOpacity" : 0.6
, "KhtmlOpacity" : 0.6
, "opacity" : 0.6 //all the above are cross-browser compatability commands that will all do the same thing (create transparency) but will be picked up or ignored based on the browser
}).appendTo(document.body); //add this created overlay to the body of the webpage
$("#instructPopup").show();
$('#close').click(function(){
$("#instructPopup").remove();
$("#overlay").remove();
});
function game (){
goal =
Math.floor(Math.random() * (120 - 19 + 1)) + 19;
$("#crysGoal").html(goal);
for (var i = 0; i < 4; i++) {
do {var value = Math.floor(Math.random() * (12)) + 1;}
while(crystals.indexOf(value) !== -1 )
crystals.push(value);
}
} game(goal, crystals);
function reset (){
goal = "";
score = 0;
crystals = [];
$(".scoreCntr").html(0);
}
//This function compounds on top of itself after each game. I have no idea how to stop it. I've tried using return in various places. I've tried giving it its own function. I've tried including it in my reset (using return). I don't even know what to search to get help. Google wasn't giving me relevant issues this time around.
function check (){
if (score === goal) {
var audio = new Audio("../week-4-game/assets/images/oh-yeah.mp3");
audio.play();
alert("Yay!");
wins++;
$("span#wins").empty().html(wins);
reset ();
game ();
}
if (score > goal) {
losses++;
$("span#losses").empty().html(losses);
alert("YOU'VE LOST THE GAME!!! <O>____<O>");
reset();
game();
}
}
if (score < goal) {
$(".crystals").click(function userClick (){
if ($(this).is("#crys1")){
console.log(crystals[0]);
score += crystals[0];
$(".scoreCntr").html(score);
check();
};
if ($(this).is("#crys2")){
console.log(crystals[1]);
score += crystals[1];
$(".scoreCntr").html(score);
check();
};
if ($(this).is("#crys3")){
console.log(crystals[2]);
score += crystals[2];
$(".scoreCntr").html(score);
check();
};
if ($(this).is("#crys4")){
console.log(crystals[3]);
score += crystals[3];
$(".scoreCntr").html(score);
check();
};
});
}
});
|
var app = require('./app');
var config = require('./config')
app.on("error",function(e){
if(e.code == "EADDRINUSE"){
console.log(chalk.red.bold(" Error in Starting Server : ") + "Port number " + chalk.grey.bold(app.get('port')) + " is in Use, Please change the port number in " + chalk.grey.bold("config.js"));
process.exit(0);
}
});
var server_port = process.env.PORT || config.port;
app.listen(server_port, function () {
console.log( "Started ui-ux at http://localhost:" + config.port)
});
|
/**
* \file WznmQCtpAPar.h
* API code for table TblWznmQCtpAPar (declarations)
* \copyright (C) 2016-2020 MPSI Technologies GmbH
* \author Alexander Wirthmueller (auto-generation)
* \date created: 5 Dec 2020
*/
// IP header --- ABOVE
#ifndef WZNMQCTPAPAR_H
#define WZNMQCTPAPAR_H
#include <sbecore/Xmlio.h>
/**
* WznmQCtpAPar
*/
class WznmQCtpAPar {
public:
WznmQCtpAPar(const Sbecore::uint jnum = 0, const std::string x1SrefKKey = "", const std::string titX1SrefKKey = "", const std::string Val = "");
public:
Sbecore::uint jnum;
std::string x1SrefKKey;
std::string titX1SrefKKey;
std::string Val;
public:
bool readXML(xmlXPathContext* docctx, std::string basexpath = "", bool addbasetag = false);
};
/**
* ListWznmQCtpAPar
*/
class ListWznmQCtpAPar {
public:
ListWznmQCtpAPar();
ListWznmQCtpAPar(const ListWznmQCtpAPar& src);
ListWznmQCtpAPar& operator=(const ListWznmQCtpAPar& src);
~ListWznmQCtpAPar();
void clear();
public:
std::vector<WznmQCtpAPar*> nodes;
public:
bool readXML(xmlXPathContext* docctx, std::string basexpath = "", bool addbasetag = false);
};
#endif
|
#pragma once
// Copyright 2015 HcNet Development Foundation and contributors. Licensed
// under the Apache License, Version 2.0. See the COPYING file at the root
// of this distribution or at http://www.apache.org/licenses/LICENSE-2.0
#include "crypto/ByteSlice.h"
#include "crypto/SHA.h"
#include "util/Fs.h"
#include "util/Logging.h"
#include "xdrpp/marshal.h"
#include <fstream>
#include <string>
#include <vector>
namespace HcNet
{
/**
* Helper for loading a sequence of XDR objects from a file one at a time,
* rather than all at once.
*/
class XDRInputFileStream
{
std::ifstream mIn;
std::vector<char> mBuf;
size_t mSizeLimit;
size_t mSize;
public:
XDRInputFileStream(unsigned int sizeLimit = 0) : mSizeLimit{sizeLimit}
{
}
void
close()
{
mIn.close();
}
void
open(std::string const& filename)
{
mIn.open(filename, std::ifstream::binary);
if (!mIn)
{
std::string msg("failed to open XDR file: ");
msg += filename;
msg += ", reason: ";
msg += std::to_string(errno);
CLOG(ERROR, "Fs") << msg;
throw std::runtime_error(msg);
}
mSize = fs::size(mIn);
}
operator bool() const
{
return mIn.good();
}
size_t
size() const
{
return mSize;
}
size_t
pos()
{
assert(!mIn.fail());
return mIn.tellg();
}
template <typename T>
bool
readOne(T& out)
{
char szBuf[4];
if (!mIn.read(szBuf, 4))
{
return false;
}
// Read 4 bytes of size, big-endian, with XDR 'continuation' bit cleared
// (high bit of high byte).
uint32_t sz = 0;
sz |= static_cast<uint8_t>(szBuf[0] & '\x7f');
sz <<= 8;
sz |= static_cast<uint8_t>(szBuf[1]);
sz <<= 8;
sz |= static_cast<uint8_t>(szBuf[2]);
sz <<= 8;
sz |= static_cast<uint8_t>(szBuf[3]);
if (mSizeLimit != 0 && sz > mSizeLimit)
{
return false;
}
if (sz > mBuf.size())
{
mBuf.resize(sz);
}
if (!mIn.read(mBuf.data(), sz))
{
throw xdr::xdr_runtime_error("malformed XDR file");
}
xdr::xdr_get g(mBuf.data(), mBuf.data() + sz);
xdr::xdr_argpack_archive(g, out);
return true;
}
};
class XDROutputFileStream
{
std::ofstream mOut;
std::vector<char> mBuf;
public:
void
close()
{
mOut.close();
}
void
open(std::string const& filename)
{
mOut.open(filename, std::ofstream::binary | std::ofstream::trunc);
if (!mOut)
{
std::string msg("failed to open XDR file: ");
msg += filename;
msg += ", reason: ";
msg += std::to_string(errno);
CLOG(FATAL, "Fs") << msg;
throw std::runtime_error(msg);
}
}
operator bool() const
{
return mOut.good();
}
template <typename T>
bool
writeOne(T const& t, SHA256* hasher = nullptr, size_t* bytesPut = nullptr)
{
uint32_t sz = (uint32_t)xdr::xdr_size(t);
assert(sz < 0x80000000);
if (mBuf.size() < sz + 4)
{
mBuf.resize(sz + 4);
}
// Write 4 bytes of size, big-endian, with XDR 'continuation' bit set on
// high bit of high byte.
mBuf[0] = static_cast<char>((sz >> 24) & 0xFF) | '\x80';
mBuf[1] = static_cast<char>((sz >> 16) & 0xFF);
mBuf[2] = static_cast<char>((sz >> 8) & 0xFF);
mBuf[3] = static_cast<char>(sz & 0xFF);
xdr::xdr_put p(mBuf.data() + 4, mBuf.data() + 4 + sz);
xdr_argpack_archive(p, t);
if (!mOut.write(mBuf.data(), sz + 4))
{
return false;
}
if (hasher)
{
hasher->add(ByteSlice(mBuf.data(), sz + 4));
}
if (bytesPut)
{
*bytesPut += (sz + 4);
}
return true;
}
};
}
|
from django.contrib.auth.models import AbstractUser
from django.db.models import CharField
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
class User(AbstractUser):
"""
Default custom user model for {{cookiecutter.project_name}}.
If adding fields that need to be filled at user signup,
check forms.SignupForm and forms.SocialSignupForms accordingly.
"""
#: First and last name do not cover name patterns around the globe
name = CharField(_("Name of User"), blank=True, max_length=255)
first_name = None # type: ignore
last_name = None # type: ignore
def get_absolute_url(self):
"""Get url for user's detail view.
Returns:
str: URL for user detail.
"""
return reverse("users:detail", kwargs={"username": self.username})
|
dead_code_1: {
options = {
dead_code: true
};
input: {
function f() {
a();
b();
x = 10;
return;
if (x) {
y();
}
}
}
expect: {
function f() {
a();
b();
x = 10;
return;
}
}
}
dead_code_2_should_warn: {
options = {
dead_code: true
};
input: {
function f() {
g();
x = 10;
throw new Error("foo");
// completely discarding the `if` would introduce some
// bugs. UglifyJS v1 doesn't deal with this issue; in v2
// we copy any declarations to the upper scope.
if (x) {
y();
var x;
function g(){};
// but nested declarations should not be kept.
(function(){
var q;
function y(){};
})();
}
}
f();
}
expect: {
function f() {
g();
x = 10;
throw new Error("foo");
var x;
function g(){};
}
f();
}
expect_stdout: true
node_version: "<=4"
}
dead_code_2_should_warn_strict: {
options = {
dead_code: true
};
input: {
"use strict";
function f() {
g();
x = 10;
throw new Error("foo");
// completely discarding the `if` would introduce some
// bugs. UglifyJS v1 doesn't deal with this issue; in v2
// we copy any declarations to the upper scope.
if (x) {
y();
var x;
function g(){};
// but nested declarations should not be kept.
(function(){
var q;
function y(){};
})();
}
}
f();
}
expect: {
"use strict";
function f() {
g();
x = 10;
throw new Error("foo");
var x;
}
f();
}
expect_stdout: true
node_version: ">=4"
}
dead_code_constant_boolean_should_warn_more: {
options = {
dead_code : true,
loops : true,
booleans : true,
conditionals : true,
evaluate : true,
side_effects : true,
};
input: {
while (!((foo && bar) || (x + "0"))) {
console.log("unreachable");
var foo;
function bar() {}
}
for (var x = 10, y; x && (y || x) && (!typeof x); ++x) {
asdf();
foo();
var moo;
}
bar();
}
expect: {
var foo;
function bar() {}
// nothing for the while
// as for the for, it should keep:
var x = 10, y;
var moo;
bar();
}
expect_stdout: true
node_version: "<=4"
}
dead_code_constant_boolean_should_warn_more_strict: {
options = {
dead_code : true,
loops : true,
booleans : true,
conditionals : true,
evaluate : true,
side_effects : true,
};
input: {
"use strict";
while (!((foo && bar) || (x + "0"))) {
console.log("unreachable");
var foo;
function bar() {}
}
for (var x = 10, y; x && (y || x) && (!typeof x); ++x) {
asdf();
foo();
var moo;
}
bar();
}
expect: {
"use strict";
var foo;
// nothing for the while
// as for the for, it should keep:
var x = 10, y;
var moo;
bar();
}
expect_stdout: true
node_version: ">=4"
}
try_catch_finally: {
options = {
conditionals: true,
dead_code: true,
evaluate: true,
}
input: {
var a = 1;
!function() {
try {
if (false) throw x;
} catch (a) {
var a = 2;
console.log("FAIL");
} finally {
a = 3;
console.log("PASS");
}
}();
try {
console.log(a);
} finally {
}
}
expect: {
var a = 1;
!function() {
var a;
a = 3;
console.log("PASS");
}();
try {
console.log(a);
} finally {
}
}
expect_stdout: [
"PASS",
"1",
]
}
accessor: {
options = {
side_effects: true,
}
input: {
({
get a() {},
set a(v){
this.b = 2;
},
b: 1
});
}
expect: {}
}
issue_2233_1: {
options = {
pure_getters: "strict",
side_effects: true,
unsafe: true,
}
input: {
Array.isArray;
Boolean;
console.log;
Error.name;
Function.length;
Math.random;
Number.isNaN;
RegExp;
Object.defineProperty;
String.fromCharCode;
}
expect: {}
expect_stdout: true
}
issue_2233_2: {
options = {
pure_getters: "strict",
reduce_vars: true,
side_effects: true,
unsafe: true,
unused: true,
}
input: {
var RegExp;
Array.isArray;
RegExp;
UndeclaredGlobal;
function foo() {
var Number;
AnotherUndeclaredGlobal;
Math.sin;
Number.isNaN;
}
}
expect: {
var RegExp;
UndeclaredGlobal;
function foo() {
var Number;
AnotherUndeclaredGlobal;
Number.isNaN;
}
}
}
issue_2233_3: {
options = {
pure_getters: "strict",
reduce_vars: true,
side_effects: true,
toplevel: true,
unsafe: true,
unused: true,
}
input: {
var RegExp;
Array.isArray;
RegExp;
UndeclaredGlobal;
function foo() {
var Number;
AnotherUndeclaredGlobal;
Math.sin;
Number.isNaN;
}
}
expect: {
UndeclaredGlobal;
}
}
|
(function webpackUniversalModuleDefinition(root, factory) {
if(typeof exports === 'object' && typeof module === 'object')
module.exports = factory(require("jquery"));
else if(typeof define === 'function' && define.amd)
define(["jquery"], factory);
else if(typeof exports === 'object')
exports["tooltips"] = factory(require("jquery"));
else
root["tooltips"] = factory(root["jQuery"]);
})(this, function(__WEBPACK_EXTERNAL_MODULE_2__) {
return /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ({
/***/ 0:
/***/ (function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(27);
/***/ }),
/***/ 2:
/***/ (function(module, exports) {
module.exports = __WEBPACK_EXTERNAL_MODULE_2__;
/***/ }),
/***/ 27:
/***/ (function(module, exports, __webpack_require__) {
'use strict';
/**
* tooltips提示
* by tommyshao <jinhong.shao@frontpay.cn>
* 2016-07-19
*
* Reference uikit.tooltips.js
*
* API:
* ------
* $(element).tooltips(option);
*/
var $ = __webpack_require__(2);
var toggle = '[data-toggle="tooltips"]';
// 构造函数
// ---------
var Tooltips = function Tooltips(el, option) {
this.$el = $(el);
this.options = $.extend({}, Tooltips.DEFAULTS, this.$el.data(), option);
this.$tooltip = null;
this.content = '';
this.tooltipdelay = null;
this.checkdelay = null;
this.init();
};
Tooltips.VERSION = '{{VERSION}}';
// 动画过渡时间
Tooltips.TRANSITION_DURATION = 150;
Tooltips.DEFAULTS = {
"offset": 9,
"pos": "top",
"animation": true,
"delay": 0,
"cls": "",
"active": "active",
"url": "",
"content": function content(elem, title) {
title = elem.attr('title');
if (title) {
elem.data('cached-title', title).removeAttr('title');
}
return elem.data('cached-title');
}
};
// Public Method
// --------------
Tooltips.prototype.init = function () {
var $this = this;
if (!$this.$tooltip) {
$this.$tooltip = $('<div class="tooltips"><div class="tooltips-inner"></div><span class="tips-arrow-border"></span><span class="tips-arrow"></span></div>').appendTo('body');
}
$this.$el.on({
focus: function focus() {
$this.show();
},
blur: function blur() {
$this.hide();
},
mouseenter: function mouseenter() {
$this.show();
},
mouseleave: function mouseleave() {
$this.hide();
}
});
};
Tooltips.prototype.__getPosition = function () {
var $this = this,
pos = $.extend({}, this.$el.offset(), { width: this.$el[0].offsetWidth, height: this.$el[0].offsetHeight }),
width = this.$tooltip[0].offsetWidth,
height = this.$tooltip[0].offsetHeight,
offset = typeof this.options.offset === "function" ? this.options.offset.call(this.$el) : this.options.offset,
position = typeof this.options.pos === "function" ? this.options.pos.call(this.$el) : this.options.pos,
tmppos = position.split('-'),
tcss = {
"display": "none",
"visibility": "visible",
"top": pos.top + pos.height + height,
"left": pos.left
};
var variants = {
"bottom": { top: pos.top + pos.height + offset, left: pos.left + pos.width / 2 - width / 2 },
"top": { top: pos.top - height - offset, left: pos.left + pos.width / 2 - width / 2 },
"left": { top: pos.top + pos.height / 2 - height / 2, left: pos.left - width - offset },
"right": { top: pos.top + pos.height / 2 - height / 2, left: pos.left + pos.width + offset }
};
$.extend(tcss, variants[tmppos[0]]);
if (tmppos.length == 2) tcss.left = tmppos[1] == "left" ? pos.left : pos.left + pos.width - width;
var boundary = this.checkBoundary(tcss.left, tcss.top, width, height);
if (boundary) {
switch (boundary) {
case "x":
if (tmppos.length == 2) {
position = tmppos[0] + '-' + (tcss.left < 0 ? 'left' : 'right');
} else {
position = tcss.left < 0 ? 'right' : 'left';
}
break;
case "y":
if (tmppos.length == 2) {
position = (tcss.top < 0 ? "bottom" : "top") + "-" + tmppos[1];
} else {
position = tcss.top < 0 ? "bottom" : "top";
}
break;
case "xy":
if (tmppos.length == 2) {
position = (tcss.top < 0 ? "bottom" : "top") + "-" + (tcss.left < 0 ? "left" : "right");
} else {
position = tcss.left < 0 ? "right" : "left";
}
break;
}
tmppos = position.split('-');
$.extend(tcss, variants[tmppos[0]]);
if (tmppos.length == 2) tcss.left = tmppos[1] == "left" ? pos.left : pos.left + pos.width - width;
}
tcss.left -= $("body").position().left;
return [tcss, position];
};
Tooltips.prototype.__setPosition = function () {
var myPosition = this.__getPosition();
this.$tooltip.css(myPosition[0]).attr("class", ['tooltips', 'tooltips-' + myPosition[1], this.options.cls].join(' '));
if (this.options.animation) {
this.$tooltip.css({
opacity: 0,
display: "block"
}).addClass(this.options.active).animate({ opacity: 1 }, parseInt(this.options.animation, 10) || 400);
} else {
this.$tooltip.show().addClass(this.options.active);
}
};
Tooltips.prototype.show = function () {
var $this = this;
this.content = typeof this.options.content === "function" ? this.options.content(this.$el) : this.options.content;
if (this.tooltipdelay) clearTimeout(this.tooltipdelay);
if (this.checkdelay) clearTimeout(this.checkdelay);
if (!this.content.length) return;
this.$tooltip.stop().css({ "top": -2000, "visibility": "hidden" }).removeClass(this.options.active).show();
this.$tooltip.find(".tooltips-inner").html(this.content);
// 异步
if (this.options.url !== '') {
this.$tooltip.find(".tooltips-inner").load(this.options.url, function () {
// 设置位置
$this.__setPosition();
});
}
this.tooltipdelay = setTimeout(function () {
// 设置位置
$this.__setPosition();
$this.tooltipdelay = false;
$this.checkdelay = setInterval(function () {
if (!$this.$el.is(':visible')) $this.hide();
}, 150);
}, parseInt(this.options.delay, 10) || 0);
};
Tooltips.prototype.hide = function () {
if (this.$el.is('input') && this.$el[0] === document.activeElement) return;
if (this.tooltipdelay) clearTimeout(this.tooltipdelay);
if (this.checkdelay) clearTimeout(this.checkdelay);
this.$tooltip.stop();
if (this.options.animation) {
var $this = this;
this.$tooltip.fadeOut(parseInt(this.options.animation, 10) || 400, function () {
$this.$tooltip.removeClass($this.options.active);
});
} else {
this.$tooltip.hide().removeClass(this.options.active);
}
};
Tooltips.prototype.checkBoundary = function (left, top, width, height) {
var axis = "";
if (left < 0 || left - $(document).scrollLeft() + width > $(window).width()) {
axis += "x";
}
if (top < 0 || top - $(document).scrollTop() + height > $(window).height()) {
axis += "y";
}
return axis;
};
// 插件定义
// ----------
function Plugin(option) {
return $(this).each(function () {
var $this = $(this);
var data = $this.data('bp.tooltips');
if (!data) $this.data('bp.tooltips', data = new Tooltips(this, option));
if (typeof option == 'string') data[option]();
});
}
// jQuery 插件扩展
// -------------
$.fn.tooltips = Plugin;
$.fn.tooltips.Constructor = Tooltips;
// 元素插件绑定
// ----------
var handler = function handler() {
$(this).tooltips('show');
};
$(function () {
$(document).on('mouseenter.bp.tooltips focus.bp.tooltips', toggle, handler);
});
module.exports = Tooltips;
/***/ })
/******/ })
});
;
//# sourceMappingURL=tooltips.js.map
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri May 10 18:01:57 2019
@author: nakul
"""
import random
import numpy as np
import math
import copy
import matplotlib.pyplot as plt
show_animation = True
ox,oy = [],[]
#explore_x,explore_y = [],[]
obstacle = np.zeros(shape=(1110,1010))
m = 0
res = 1
for x in range (1110):
for y in range(1010):
#Equations for circles
c1 = (x - round(390/res))**2 + (y - round(960/res))**2 - ((40.5/res) + m)**2
c2 = (x - round(438 / res)) ** 2 + (y - round(736 / res)) ** 2 - ((40.5 / res) + m) ** 2
c3 = (x - round(438 / res)) ** 2 + (y - round(274 / res)) ** 2 - ((40.5 / res) + m) ** 2
c4 = (x - round(390 / res)) ** 2 + (y - round(45 / res)) ** 2 - ((40.5 / res) + m) ** 2
#Equation for table
t1 = -y + (750.1/res) - m
t2 = y - (910/res) - m
t3 = -x + (149.89/res)
t4 = x - (309.79/res)
#Table left circle
t_l = (x - (149.89/res))**2 + (y - (830.5/res))**2 - ((79.89/res) + m)**2
#Table right circle
t_r = (x - (309.79/res))**2 + (y - (830.5/res))**2 - ((79.89/res) + m)**2
# Rectangle-1
f1 = -y + (0/res) - m
f2 = y - (35/res) - m
f3 = -x + (685/res) - m
f4 = x - (1110/res) - m
#Rectangle -2
f5 = -y + (35 / res) - m
f6 = y - (111 / res) - m
f7 = -x + (927 / res) - m
f8 = x - (1110 / res) - m
#Rectangle-3
f9 = -y + (35 / res) - m
f10 = y - (93 / res) - m
f11 = -x + (779 / res) - m
f12 = x - (896 / res) - m
#Rectangle-4
f13 = -y + (35 / res) - m
f14 = y - (187 / res) - m
f15 = -x + (474 / res) - m
f16 = x - (748 / res) - m
#Rectangle-5
f17 = -y + (919/res) - m
f18 = y - (1010/res) - m
f19 = -x + (983/res) - m
f20 = x - (1026/res) - m
#Rectangle-6
f20_1 = -y + (827/res) - m
f21 = y - (1010/res) - m
f22 = -x + (832/res) - m
f23 = x - (918/res) - m
#Rectangle-7
f24 = -y + (621/res) - m
f25 = y - (697/res) - m
f26 = -x + (744/res) - m
f27 = x - (1110/res) - m
#Rectangle-8
f28 = -y + (449/res) - m
f29 = y - (566/res) - m
f30 = -x + (1052/res) - m
f31 = x - (1110/res) - m
#Rectangle-9
f32 = -y + (363/res) - m
f33 = y - (449/res) - m
f34 = -x + (1019/res) - m
f35 = x - (1110/res) - m
#Rectangle-10
f36 = -y + (178.75/res) - m
f37 = y - (295.75/res) - m
f38 = -x + (1052/res) - m
f39 = x - (1110/res) - m
#Rectangle-11
f40 = -y + (315/res) - m
f41 = y - (498/res) - m
f42 = -x + (438/res) - m
f43 = x - (529/res) - m
#Rectangle-12
f44 = -y + (265/res) - m
f45 = y - (341/res) - m
f46 = -x + (529/res) - m
f47 = x - (712/res) - m
#Rectangle-13
f48 = -y + (267/res) - m
f49 = y - (384/res) - m
f50 = -x + (784.5/res) - m
f51 = x - (936.5/res) - m
#Equation for boundary 1:
b1 = y - 1 - m
#Equation for boundary 2:
b2 = x - 1 - m
#Equation for boundary 3:
b3 = y - (1010 - 1 - m)
#Equation for boundary 4:
b4 = x - (1110 - 1 - m)
if (c1<=0 or c2<=0 or c3<=0 or c4<=0 or (t1<=0 and t2<=0 and t3<=0 and t4<=0) or t_l<=0 or t_r<=0 or (f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0) or (f5 <= 0 and f6 <= 0 and f7 <= 0 and f8 <= 0) or (f9 <= 0 and f10 <= 0 and f11 <= 0 and f12 <= 0) or (f13 <= 0 and f14 <= 0 and f15 <= 0 and f16 <= 0) or (f17 <= 0 and f18 <= 0 and f19 <= 0 and f20 <= 0) or (f20_1 <= 0 and f21 <= 0 and f22 <= 0 and f23 <= 0) or (f24 <= 0 and f25 <= 0 and f26 <= 0 and f27 <= 0) or (f28 <= 0 and f29 <= 0 and f30 <= 0 and f31 <= 0) or (f32 <= 0 and f33 <= 0 and f34 <= 0 and f35 <= 0) or (f36 <= 0 and f37 <= 0 and f38 <= 0 and f39 <= 0) or (f40 <= 0 and f41 <= 0 and f42 <= 0 and f43 <= 0) or (f44 <= 0 and f45 <= 0 and f46 <= 0 and f47 <= 0) or (f48 <= 0 and f49 <= 0 and f50 <= 0 and f51 <= 0) or b1<0 or b2<0 or b3>=0 or b4>=0):
obstacle[x][y] = 1
ox.append(x)
oy.append(y)
obstacleList = np.vstack((ox,oy)).T
plt.plot(ox,oy,"ko")
plt.axis([-10, 1210, -10, 1110])
plt.grid(True)
class InformedRRTStar():
def __init__(self, start, goal,
obstacleList, randArea,
expandDis=20, goalSampleRate=10, maxIter=200):
self.start = Node(start[0], start[1])
self.goal = Node(goal[0], goal[1])
self.minrand = randArea[0]
self.maxrand = randArea[1]
self.expandDis = expandDis
self.goalSampleRate = goalSampleRate
self.maxIter = maxIter
self.obstacleList = obstacleList
def InformedRRTStarSearch(self, animation=True):
self.nodeList = [self.start]
# max length we expect to find in our 'informed' sample space, starts as infinite
cBest = float('inf')
pathLen = float('inf')
solutionSet = set()
path = None
# Computing the sampling space
cMin = math.sqrt(pow(self.start.x - self.goal.x, 2)
+ pow(self.start.y - self.goal.y, 2))
xCenter = np.array([[(self.start.x + self.goal.x) / 2.0],
[(self.start.y + self.goal.y) / 2.0], [0]])
a1 = np.array([[(self.goal.x - self.start.x) / cMin],
[(self.goal.y - self.start.y) / cMin], [0]])
etheta = math.atan2(a1[1], a1[0])
# first column of idenity matrix transposed
id1_t = np.array([1.0, 0.0, 0.0]).reshape(1, 3)
M = a1 @ id1_t
U, S, Vh = np.linalg.svd(M, 1, 1)
C = np.dot(np.dot(U, np.diag(
[1.0, 1.0, np.linalg.det(U) * np.linalg.det(np.transpose(Vh))])), Vh)
for i in range(self.maxIter):
# Sample space is defined by cBest
# cMin is the minimum distance between the start point and the goal
# xCenter is the midpoint between the start and the goal
# cBest changes when a new path is found
rnd = self.informed_sample(cBest, cMin, xCenter, C)
nind = self.getNearestListIndex(self.nodeList, rnd)
nearestNode = self.nodeList[nind]
# steer
theta = math.atan2(rnd[1] - nearestNode.y, rnd[0] - nearestNode.x)
newNode = self.getNewNode(theta, nind, nearestNode)
d = self.lineCost(nearestNode, newNode)
isCollision = self.__CollisionCheck(newNode, self.obstacleList)
isCollisionEx = self.check_collision_extend(nearestNode, theta, d)
if isCollision and isCollisionEx:
nearInds = self.findNearNodes(newNode)
newNode = self.chooseParent(newNode, nearInds)
self.nodeList.append(newNode)
self.rewire(newNode, nearInds)
if self.isNearGoal(newNode):
solutionSet.add(newNode)
lastIndex = len(self.nodeList) - 1
tempPath = self.getFinalCourse(lastIndex)
tempPathLen = self.getPathLen(tempPath)
if tempPathLen < pathLen:
path = tempPath
cBest = tempPathLen
if animation:
self.drawGraph(xCenter=xCenter,
cBest=cBest, cMin=cMin,
etheta=etheta, rnd=rnd)
return path
def chooseParent(self, newNode, nearInds):
if len(nearInds) == 0:
return newNode
dList = []
for i in nearInds:
dx = newNode.x - self.nodeList[i].x
dy = newNode.y - self.nodeList[i].y
d = math.sqrt(dx ** 2 + dy ** 2)
theta = math.atan2(dy, dx)
if self.check_collision_extend(self.nodeList[i], theta, d):
dList.append(self.nodeList[i].cost + d)
else:
dList.append(float('inf'))
minCost = min(dList)
minInd = nearInds[dList.index(minCost)]
if minCost == float('inf'):
print("mincost is inf")
return newNode
newNode.cost = minCost
newNode.parent = minInd
return newNode
def findNearNodes(self, newNode):
nnode = len(self.nodeList)
r = 50.0 * math.sqrt((math.log(nnode) / nnode))
dlist = [(node.x - newNode.x) ** 2
+ (node.y - newNode.y) ** 2 for node in self.nodeList]
nearinds = [dlist.index(i) for i in dlist if i <= r ** 2]
return nearinds
def informed_sample(self, cMax, cMin, xCenter, C):
if cMax < float('inf'):
r = [cMax / 2.0,
math.sqrt(cMax**2 - cMin**2) / 2.0,
math.sqrt(cMax**2 - cMin**2) / 2.0]
L = np.diag(r)
xBall = self.sampleUnitBall()
rnd = np.dot(np.dot(C, L), xBall) + xCenter
rnd = [rnd[(0, 0)], rnd[(1, 0)]]
else:
rnd = self.sampleFreeSpace()
return rnd
def sampleUnitBall(self):
a = random.random()
b = random.random()
if b < a:
a, b = b, a
sample = (b * math.cos(2 * math.pi * a / b),
b * math.sin(2 * math.pi * a / b))
return np.array([[sample[0]], [sample[1]], [0]])
def sampleFreeSpace(self):
if random.randint(0, 1110) > self.goalSampleRate:
rnd = [random.uniform(self.minrand, self.maxrand),
random.uniform(self.minrand, self.maxrand)]
else:
rnd = [self.goal.x, self.goal.y]
return rnd
def getPathLen(self, path):
pathLen = 0
for i in range(1, len(path)):
node1_x = path[i][0]
node1_y = path[i][1]
node2_x = path[i - 1][0]
node2_y = path[i - 1][1]
pathLen += math.sqrt((node1_x - node2_x)
** 2 + (node1_y - node2_y)**2)
return pathLen
def lineCost(self, node1, node2):
return math.sqrt((node1.x - node2.x)**2 + (node1.y - node2.y)**2)
def getNearestListIndex(self, nodes, rnd):
dList = [(node.x - rnd[0])**2
+ (node.y - rnd[1])**2 for node in nodes]
minIndex = dList.index(min(dList))
return minIndex
def __CollisionCheck(self, newNode, obstacleList):
for (ox,oy) in obstacleList:
dx = ox - newNode.x
dy = oy - newNode.y
d = dx * dx + dy * dy
if d <= 10:
return False # collision
return True # safe
def getNewNode(self, theta, nind, nearestNode):
newNode = copy.deepcopy(nearestNode)
newNode.x += round(self.expandDis * math.cos(theta))
newNode.y += round(self.expandDis * math.sin(theta))
newNode.cost += self.expandDis
newNode.parent = nind
return newNode
def isNearGoal(self, node):
d = self.lineCost(node, self.goal)
if d < self.expandDis:
return True
return False
def rewire(self, newNode, nearInds):
nnode = len(self.nodeList)
for i in nearInds:
nearNode = self.nodeList[i]
d = math.sqrt((nearNode.x - newNode.x)**2
+ (nearNode.y - newNode.y)**2)
scost = newNode.cost + d
if nearNode.cost > scost:
theta = math.atan2(newNode.y - nearNode.y,
newNode.x - nearNode.x)
if self.check_collision_extend(nearNode, theta, d):
nearNode.parent = nnode - 1
nearNode.cost = scost
def check_collision_extend(self, nearNode, theta, d):
tmpNode = copy.deepcopy(nearNode)
for i in range(int(d / self.expandDis)):
tmpNode.x += self.expandDis * math.cos(theta)
tmpNode.y += self.expandDis * math.sin(theta)
if not self.__CollisionCheck(tmpNode, self.obstacleList):
return False
return True
def getFinalCourse(self, lastIndex):
path = [[self.goal.x, self.goal.y]]
while self.nodeList[lastIndex].parent is not None:
node = self.nodeList[lastIndex]
path.append([node.x, node.y])
lastIndex = node.parent
path.append([self.start.x, self.start.y])
return path
def drawGraph(self, xCenter=None, cBest=None, cMin=None, etheta=None, rnd=None):
#plt.clf()
if rnd is not None:
#plt.plot(rnd[0], rnd[1], "^k")
if cBest != float('inf'):
self.plot_ellipse(xCenter, cBest, cMin, etheta)
for node in self.nodeList:
if node.parent is not None:
if node.x or node.y is not None:
plt.plot([node.x, self.nodeList[node.parent].x], [
node.y, self.nodeList[node.parent].y], "-g")
# =============================================================================
# for (ox, oy, size) in self.obstacleList:
# plt.plot(ox, oy, "ok", ms=30 * size)
#
# =============================================================================
plt.plot(self.start.x, self.start.y, "xr")
plt.plot(self.goal.x, self.goal.y, "xr")
plt.axis([-10, 1210, -10, 1110])
plt.grid(True)
plt.pause(0.01)
def plot_ellipse(self, xCenter, cBest, cMin, etheta): # pragma: no cover
a = math.sqrt(cBest**2 - cMin**2) / 2.0
b = cBest / 2.0
angle = math.pi / 2.0 - etheta
cx = xCenter[0]
cy = xCenter[1]
t = np.arange(0, 2 * math.pi + 0.1, 0.1)
x = [a * math.cos(it) for it in t]
y = [b * math.sin(it) for it in t]
R = np.array([[math.cos(angle), math.sin(angle)],
[-math.sin(angle), math.cos(angle)]])
fx = R @ np.array([x, y])
px = np.array(fx[0, :] + cx).flatten()
py = np.array(fx[1, :] + cy).flatten()
plt.plot(cx, cy, "xc")
plt.plot(px, py, "--c")
class Node():
def __init__(self, x, y):
self.x = x
self.y = y
self.cost = 0.0
self.parent = None
def main():
print("Start informed rrt star planning")
# =============================================================================
# # create obstacles
# obstacleList = [
# (5, 5, 0.5),
# (9, 6, 1),
# (7, 5, 1),
# (1, 5, 1),
# (3, 6, 1),
# (7, 9, 1)
# ]
# =============================================================================
# Set params
rrt = InformedRRTStar(start=[50, 50], goal=[200, 200],
randArea=[0, 1110], obstacleList=obstacleList)
path = rrt.InformedRRTStarSearch(animation=show_animation)
print("Done!!")
# Plot path
if show_animation:
rrt.drawGraph()
plt.plot([x for (x, y) in path], [y for (x, y) in path], '-r')
plt.grid(True)
plt.pause(0.01)
plt.show()
if __name__ == '__main__':
main()
|
const { request } = require('../utils');
module.exports = (query = {}) => {
const api = `/56/networks/pancakeswap/assets/`;
return request(api, query);
};
|
define([
'../Core/arraySlice',
'../Core/BoundingSphere',
'../Core/Cartesian3',
'../Core/Cartesian4',
'../Core/Math',
'../Core/Check',
'../Core/Color',
'../Core/combine',
'../Core/ComponentDatatype',
'../Core/defaultValue',
'../Core/defined',
'../Core/defineProperties',
'../Core/destroyObject',
'../Core/FeatureDetection',
'../Core/getStringFromTypedArray',
'../Core/Matrix4',
'../Core/oneTimeWarning',
'../Core/OrthographicFrustum',
'../Core/Plane',
'../Core/PrimitiveType',
'../Core/RuntimeError',
'../Core/Transforms',
'../Renderer/Buffer',
'../Renderer/BufferUsage',
'../Renderer/DrawCommand',
'../Renderer/Pass',
'../Renderer/RenderState',
'../Renderer/ShaderProgram',
'../Renderer/ShaderSource',
'../Renderer/VertexArray',
'../ThirdParty/when',
'./BlendingState',
'./Cesium3DTileBatchTable',
'./Cesium3DTileFeature',
'./Cesium3DTileFeatureTable',
'./DracoLoader',
'./getClipAndStyleCode',
'./getClippingFunction',
'./SceneMode',
'./ShadowMode'
], function(
arraySlice,
BoundingSphere,
Cartesian3,
Cartesian4,
CesiumMath,
Check,
Color,
combine,
ComponentDatatype,
defaultValue,
defined,
defineProperties,
destroyObject,
FeatureDetection,
getStringFromTypedArray,
Matrix4,
oneTimeWarning,
OrthographicFrustum,
Plane,
PrimitiveType,
RuntimeError,
Transforms,
Buffer,
BufferUsage,
DrawCommand,
Pass,
RenderState,
ShaderProgram,
ShaderSource,
VertexArray,
when,
BlendingState,
Cesium3DTileBatchTable,
Cesium3DTileFeature,
Cesium3DTileFeatureTable,
DracoLoader,
getClipAndStyleCode,
getClippingFunction,
SceneMode,
ShadowMode) {
'use strict';
// Bail out if the browser doesn't support typed arrays, to prevent the setup function
// from failing, since we won't be able to create a WebGL context anyway.
if (!FeatureDetection.supportsTypedArrays()) {
return {};
}
var DecodingState = {
NEEDS_DECODE : 0,
DECODING : 1,
READY : 2,
FAILED : 3
};
/**
* Represents the contents of a
* {@link https://github.com/AnalyticalGraphicsInc/3d-tiles/tree/master/specification/TileFormats/PointCloud|Point Cloud}
* tile. Used internally by {@link PointCloud3DTileContent} and {@link TimeDynamicPointCloud}.
*
* @alias PointCloud
* @constructor
*
* @see PointCloud3DTileContent
* @see TimeDynamicPointCloud
*
* @private
*/
function PointCloud(options) {
//>>includeStart('debug', pragmas.debug);
Check.typeOf.object('options', options);
Check.typeOf.object('options.arrayBuffer', options.arrayBuffer);
//>>includeEnd('debug');
// Hold onto the payload until the render resources are created
this._parsedContent = undefined;
this._drawCommand = undefined;
this._isTranslucent = false;
this._styleTranslucent = false;
this._constantColor = Color.clone(Color.DARKGRAY);
this._highlightColor = Color.clone(Color.WHITE);
this._pointSize = 1.0;
this._rtcCenter = undefined;
this._quantizedVolumeScale = undefined;
this._quantizedVolumeOffset = undefined;
// These values are used to regenerate the shader when the style changes
this._styleableShaderAttributes = undefined;
this._isQuantized = false;
this._isOctEncoded16P = false;
this._isRGB565 = false;
this._hasColors = false;
this._hasNormals = false;
this._hasBatchIds = false;
// Draco
this._decodingState = DecodingState.READY;
this._dequantizeInShader = true;
this._isQuantizedDraco = false;
this._isOctEncodedDraco = false;
this._quantizedRange = 0.0;
this._octEncodedRange = 0.0;
// Use per-point normals to hide back-facing points.
this.backFaceCulling = false;
this._backFaceCulling = false;
// Whether to enable normal shading
this.normalShading = true;
this._normalShading = true;
this._opaqueRenderState = undefined;
this._translucentRenderState = undefined;
this._mode = undefined;
this._ready = false;
this._readyPromise = when.defer();
this._pointsLength = 0;
this._geometryByteLength = 0;
this._vertexShaderLoaded = options.vertexShaderLoaded;
this._fragmentShaderLoaded = options.fragmentShaderLoaded;
this._uniformMapLoaded = options.uniformMapLoaded;
this._batchTableLoaded = options.batchTableLoaded;
this._pickIdLoaded = options.pickIdLoaded;
this._opaquePass = defaultValue(options.opaquePass, Pass.OPAQUE);
this._cull = defaultValue(options.cull, true);
this.style = undefined;
this._style = undefined;
this.styleDirty = false;
this.modelMatrix = Matrix4.clone(Matrix4.IDENTITY);
this._modelMatrix = Matrix4.clone(Matrix4.IDENTITY);
this.time = 0.0; // For styling
this.shadows = ShadowMode.ENABLED;
this._boundingSphere = undefined;
this.clippingPlanes = undefined;
this.isClipped = false;
this.clippingPlanesDirty = false;
// If defined, use this matrix to position the clipping planes instead of the modelMatrix.
// This is so that when point clouds are part of a tileset they all get clipped relative
// to the root tile.
this.clippingPlaneOffsetMatrix = undefined;
this.attenuation = false;
this._attenuation = false;
// Options for geometric error based attenuation
this.geometricError = 0.0;
this.geometricErrorScale = 1.0;
this.maximumAttenuation = this._pointSize;
initialize(this, options);
}
defineProperties(PointCloud.prototype, {
pointsLength : {
get : function() {
return this._pointsLength;
}
},
geometryByteLength : {
get : function() {
return this._geometryByteLength;
}
},
ready : {
get : function() {
return this._ready;
}
},
readyPromise : {
get : function() {
return this._readyPromise.promise;
}
},
color : {
get : function() {
return Color.clone(this._highlightColor);
},
set : function(value) {
this._highlightColor = Color.clone(value, this._highlightColor);
}
},
boundingSphere : {
get : function() {
if (defined(this._drawCommand)) {
return this._drawCommand.boundingVolume;
}
},
set : function(value) {
this._boundingSphere = BoundingSphere.clone(value);
}
}
});
var sizeOfUint32 = Uint32Array.BYTES_PER_ELEMENT;
function initialize(pointCloud, options) {
var arrayBuffer = options.arrayBuffer;
var byteOffset = defaultValue(options.byteOffset, 0);
var uint8Array = new Uint8Array(arrayBuffer);
var view = new DataView(arrayBuffer);
byteOffset += sizeOfUint32; // Skip magic
var version = view.getUint32(byteOffset, true);
if (version !== 1) {
throw new RuntimeError('Only Point Cloud tile version 1 is supported. Version ' + version + ' is not.');
}
byteOffset += sizeOfUint32;
// Skip byteLength
byteOffset += sizeOfUint32;
var featureTableJsonByteLength = view.getUint32(byteOffset, true);
if (featureTableJsonByteLength === 0) {
throw new RuntimeError('Feature table must have a byte length greater than zero');
}
byteOffset += sizeOfUint32;
var featureTableBinaryByteLength = view.getUint32(byteOffset, true);
byteOffset += sizeOfUint32;
var batchTableJsonByteLength = view.getUint32(byteOffset, true);
byteOffset += sizeOfUint32;
var batchTableBinaryByteLength = view.getUint32(byteOffset, true);
byteOffset += sizeOfUint32;
var featureTableString = getStringFromTypedArray(uint8Array, byteOffset, featureTableJsonByteLength);
var featureTableJson = JSON.parse(featureTableString);
byteOffset += featureTableJsonByteLength;
var featureTableBinary = new Uint8Array(arrayBuffer, byteOffset, featureTableBinaryByteLength);
byteOffset += featureTableBinaryByteLength;
// Get the batch table JSON and binary
var batchTableJson;
var batchTableBinary;
if (batchTableJsonByteLength > 0) {
// Has a batch table JSON
var batchTableString = getStringFromTypedArray(uint8Array, byteOffset, batchTableJsonByteLength);
batchTableJson = JSON.parse(batchTableString);
byteOffset += batchTableJsonByteLength;
if (batchTableBinaryByteLength > 0) {
// Has a batch table binary
batchTableBinary = new Uint8Array(arrayBuffer, byteOffset, batchTableBinaryByteLength);
byteOffset += batchTableBinaryByteLength;
}
}
var featureTable = new Cesium3DTileFeatureTable(featureTableJson, featureTableBinary);
var pointsLength = featureTable.getGlobalProperty('POINTS_LENGTH');
featureTable.featuresLength = pointsLength;
if (!defined(pointsLength)) {
throw new RuntimeError('Feature table global property: POINTS_LENGTH must be defined');
}
var rtcCenter = featureTable.getGlobalProperty('RTC_CENTER', ComponentDatatype.FLOAT, 3);
if (defined(rtcCenter)) {
pointCloud._rtcCenter = Cartesian3.unpack(rtcCenter);
}
var positions;
var colors;
var normals;
var batchIds;
var hasPositions = false;
var hasColors = false;
var hasNormals = false;
var hasBatchIds = false;
var isQuantized = false;
var isTranslucent = false;
var isRGB565 = false;
var isOctEncoded16P = false;
var dracoBuffer;
var dracoFeatureTableProperties;
var dracoBatchTableProperties;
var featureTableDraco = defined(featureTableJson.extensions) ? featureTableJson.extensions['3DTILES_draco_point_compression'] : undefined;
var batchTableDraco = (defined(batchTableJson) && defined(batchTableJson.extensions)) ? batchTableJson.extensions['3DTILES_draco_point_compression'] : undefined;
if (defined(batchTableDraco)) {
dracoBatchTableProperties = batchTableDraco.properties;
}
if (defined(featureTableDraco)) {
dracoFeatureTableProperties = featureTableDraco.properties;
var dracoByteOffset = featureTableDraco.byteOffset;
var dracoByteLength = featureTableDraco.byteLength;
if (!defined(dracoFeatureTableProperties) || !defined(dracoByteOffset) || !defined(dracoByteLength)) {
throw new RuntimeError('Draco properties, byteOffset, and byteLength must be defined');
}
dracoBuffer = arraySlice(featureTableBinary, dracoByteOffset, dracoByteOffset + dracoByteLength);
hasPositions = defined(dracoFeatureTableProperties.POSITION);
hasColors = defined(dracoFeatureTableProperties.RGB) || defined(dracoFeatureTableProperties.RGBA);
hasNormals = defined(dracoFeatureTableProperties.NORMAL);
hasBatchIds = defined(dracoFeatureTableProperties.BATCH_ID);
isTranslucent = defined(dracoFeatureTableProperties.RGBA);
pointCloud._decodingState = DecodingState.NEEDS_DECODE;
}
var draco;
if (defined(dracoBuffer)) {
draco = {
buffer : dracoBuffer,
featureTableProperties : dracoFeatureTableProperties,
batchTableProperties : dracoBatchTableProperties,
properties : combine(dracoFeatureTableProperties, dracoBatchTableProperties),
dequantizeInShader : pointCloud._dequantizeInShader
};
}
if (!hasPositions) {
if (defined(featureTableJson.POSITION)) {
positions = featureTable.getPropertyArray('POSITION', ComponentDatatype.FLOAT, 3);
hasPositions = true;
} else if (defined(featureTableJson.POSITION_QUANTIZED)) {
positions = featureTable.getPropertyArray('POSITION_QUANTIZED', ComponentDatatype.UNSIGNED_SHORT, 3);
isQuantized = true;
hasPositions = true;
var quantizedVolumeScale = featureTable.getGlobalProperty('QUANTIZED_VOLUME_SCALE', ComponentDatatype.FLOAT, 3);
if (!defined(quantizedVolumeScale)) {
throw new RuntimeError('Global property: QUANTIZED_VOLUME_SCALE must be defined for quantized positions.');
}
pointCloud._quantizedVolumeScale = Cartesian3.unpack(quantizedVolumeScale);
pointCloud._quantizedRange = (1 << 16) - 1;
var quantizedVolumeOffset = featureTable.getGlobalProperty('QUANTIZED_VOLUME_OFFSET', ComponentDatatype.FLOAT, 3);
if (!defined(quantizedVolumeOffset)) {
throw new RuntimeError('Global property: QUANTIZED_VOLUME_OFFSET must be defined for quantized positions.');
}
pointCloud._quantizedVolumeOffset = Cartesian3.unpack(quantizedVolumeOffset);
}
}
if (!hasColors) {
if (defined(featureTableJson.RGBA)) {
colors = featureTable.getPropertyArray('RGBA', ComponentDatatype.UNSIGNED_BYTE, 4);
isTranslucent = true;
hasColors = true;
} else if (defined(featureTableJson.RGB)) {
colors = featureTable.getPropertyArray('RGB', ComponentDatatype.UNSIGNED_BYTE, 3);
hasColors = true;
} else if (defined(featureTableJson.RGB565)) {
colors = featureTable.getPropertyArray('RGB565', ComponentDatatype.UNSIGNED_SHORT, 1);
isRGB565 = true;
hasColors = true;
}
}
if (!hasNormals) {
if (defined(featureTableJson.NORMAL)) {
normals = featureTable.getPropertyArray('NORMAL', ComponentDatatype.FLOAT, 3);
hasNormals = true;
} else if (defined(featureTableJson.NORMAL_OCT16P)) {
normals = featureTable.getPropertyArray('NORMAL_OCT16P', ComponentDatatype.UNSIGNED_BYTE, 2);
isOctEncoded16P = true;
hasNormals = true;
}
}
if (!hasBatchIds) {
if (defined(featureTableJson.BATCH_ID)) {
batchIds = featureTable.getPropertyArray('BATCH_ID', ComponentDatatype.UNSIGNED_SHORT, 1);
hasBatchIds = true;
}
}
if (!hasPositions) {
throw new RuntimeError('Either POSITION or POSITION_QUANTIZED must be defined.');
}
if (defined(featureTableJson.CONSTANT_RGBA)) {
var constantRGBA = featureTable.getGlobalProperty('CONSTANT_RGBA', ComponentDatatype.UNSIGNED_BYTE, 4);
pointCloud._constantColor = Color.fromBytes(constantRGBA[0], constantRGBA[1], constantRGBA[2], constantRGBA[3], pointCloud._constantColor);
}
if (hasBatchIds) {
var batchLength = featureTable.getGlobalProperty('BATCH_LENGTH');
if (!defined(batchLength)) {
throw new RuntimeError('Global property: BATCH_LENGTH must be defined when BATCH_ID is defined.');
}
if (defined(batchTableBinary)) {
// Copy the batchTableBinary section and let the underlying ArrayBuffer be freed
batchTableBinary = new Uint8Array(batchTableBinary);
}
if (defined(pointCloud._batchTableLoaded)) {
pointCloud._batchTableLoaded(batchLength, batchTableJson, batchTableBinary);
}
}
// If points are not batched and there are per-point properties, use these properties for styling purposes
var styleableProperties;
if (!hasBatchIds && defined(batchTableBinary)) {
styleableProperties = Cesium3DTileBatchTable.getBinaryProperties(pointsLength, batchTableJson, batchTableBinary);
}
pointCloud._parsedContent = {
positions : positions,
colors : colors,
normals : normals,
batchIds : batchIds,
styleableProperties : styleableProperties,
draco : draco
};
pointCloud._pointsLength = pointsLength;
pointCloud._isQuantized = isQuantized;
pointCloud._isOctEncoded16P = isOctEncoded16P;
pointCloud._isRGB565 = isRGB565;
pointCloud._isTranslucent = isTranslucent;
pointCloud._hasColors = hasColors;
pointCloud._hasNormals = hasNormals;
pointCloud._hasBatchIds = hasBatchIds;
}
var scratchMin = new Cartesian3();
var scratchMax = new Cartesian3();
var scratchPosition = new Cartesian3();
var randomValues;
function getRandomValues(samplesLength) {
// Use same random values across all runs
if (!defined(randomValues)) {
CesiumMath.setRandomNumberSeed(0);
randomValues = new Array(samplesLength);
for (var i = 0; i < samplesLength; ++i) {
randomValues[i] = CesiumMath.nextRandomNumber();
}
}
return randomValues;
}
function computeApproximateBoundingSphereFromPositions(positions) {
var maximumSamplesLength = 20;
var pointsLength = positions.length / 3;
var samplesLength = Math.min(pointsLength, maximumSamplesLength);
var randomValues = getRandomValues(maximumSamplesLength);
var maxValue = Number.MAX_VALUE;
var minValue = -Number.MAX_VALUE;
var min = Cartesian3.fromElements(maxValue, maxValue, maxValue, scratchMin);
var max = Cartesian3.fromElements(minValue, minValue, minValue, scratchMax);
for (var i = 0; i < samplesLength; ++i) {
var index = Math.floor(randomValues[i] * pointsLength);
var position = Cartesian3.unpack(positions, index * 3, scratchPosition);
Cartesian3.minimumByComponent(min, position, min);
Cartesian3.maximumByComponent(max, position, max);
}
var boundingSphere = BoundingSphere.fromCornerPoints(min, max);
boundingSphere.radius += CesiumMath.EPSILON2; // To avoid radius of zero
return boundingSphere;
}
function prepareVertexAttribute(typedArray) {
// WebGL does not support UNSIGNED_INT, INT, or DOUBLE vertex attributes. Convert these to FLOAT.
var componentDatatype = ComponentDatatype.fromTypedArray(typedArray);
if (componentDatatype === ComponentDatatype.INT || componentDatatype === ComponentDatatype.UNSIGNED_INT || componentDatatype === ComponentDatatype.DOUBLE) {
oneTimeWarning('Cast pnts property to floats', 'Point cloud property "' + name + '" will be casted to a float array because INT, UNSIGNED_INT, and DOUBLE are not valid WebGL vertex attribute types. Some precision may be lost.');
return new Float32Array(typedArray);
}
return typedArray;
}
var scratchPointSizeAndTimeAndGeometricErrorAndDepthMultiplier = new Cartesian4();
var scratchQuantizedVolumeScaleAndOctEncodedRange = new Cartesian4();
var scratchColor = new Color();
var positionLocation = 0;
var colorLocation = 1;
var normalLocation = 2;
var batchIdLocation = 3;
var numberOfAttributes = 4;
var scratchClippingPlaneMatrix = new Matrix4();
function createResources(pointCloud, frameState) {
var context = frameState.context;
var parsedContent = pointCloud._parsedContent;
var pointsLength = pointCloud._pointsLength;
var positions = parsedContent.positions;
var colors = parsedContent.colors;
var normals = parsedContent.normals;
var batchIds = parsedContent.batchIds;
var styleableProperties = parsedContent.styleableProperties;
var hasStyleableProperties = defined(styleableProperties);
var isQuantized = pointCloud._isQuantized;
var isQuantizedDraco = pointCloud._isQuantizedDraco;
var isOctEncoded16P = pointCloud._isOctEncoded16P;
var isOctEncodedDraco = pointCloud._isOctEncodedDraco;
var quantizedRange = pointCloud._quantizedRange;
var octEncodedRange = pointCloud._octEncodedRange;
var isRGB565 = pointCloud._isRGB565;
var isTranslucent = pointCloud._isTranslucent;
var hasColors = pointCloud._hasColors;
var hasNormals = pointCloud._hasNormals;
var hasBatchIds = pointCloud._hasBatchIds;
var componentsPerAttribute;
var componentDatatype;
var styleableVertexAttributes = [];
var styleableShaderAttributes = {};
pointCloud._styleableShaderAttributes = styleableShaderAttributes;
if (hasStyleableProperties) {
var attributeLocation = numberOfAttributes;
for (var name in styleableProperties) {
if (styleableProperties.hasOwnProperty(name)) {
var property = styleableProperties[name];
var typedArray = prepareVertexAttribute(property.typedArray);
componentsPerAttribute = property.componentCount;
componentDatatype = ComponentDatatype.fromTypedArray(typedArray);
var vertexBuffer = Buffer.createVertexBuffer({
context : context,
typedArray : typedArray,
usage : BufferUsage.STATIC_DRAW
});
pointCloud._geometryByteLength += vertexBuffer.sizeInBytes;
var vertexAttribute = {
index : attributeLocation,
vertexBuffer : vertexBuffer,
componentsPerAttribute : componentsPerAttribute,
componentDatatype : componentDatatype,
normalize : false,
offsetInBytes : 0,
strideInBytes : 0
};
styleableVertexAttributes.push(vertexAttribute);
styleableShaderAttributes[name] = {
location : attributeLocation,
componentCount : componentsPerAttribute
};
++attributeLocation;
}
}
}
var positionsVertexBuffer = Buffer.createVertexBuffer({
context : context,
typedArray : positions,
usage : BufferUsage.STATIC_DRAW
});
pointCloud._geometryByteLength += positionsVertexBuffer.sizeInBytes;
var colorsVertexBuffer;
if (hasColors) {
colorsVertexBuffer = Buffer.createVertexBuffer({
context : context,
typedArray : colors,
usage : BufferUsage.STATIC_DRAW
});
pointCloud._geometryByteLength += colorsVertexBuffer.sizeInBytes;
}
var normalsVertexBuffer;
if (hasNormals) {
normalsVertexBuffer = Buffer.createVertexBuffer({
context : context,
typedArray : normals,
usage : BufferUsage.STATIC_DRAW
});
pointCloud._geometryByteLength += normalsVertexBuffer.sizeInBytes;
}
var batchIdsVertexBuffer;
if (hasBatchIds) {
batchIds = prepareVertexAttribute(batchIds);
batchIdsVertexBuffer = Buffer.createVertexBuffer({
context : context,
typedArray : batchIds,
usage : BufferUsage.STATIC_DRAW
});
pointCloud._geometryByteLength += batchIdsVertexBuffer.sizeInBytes;
}
var attributes = [];
if (isQuantized) {
componentDatatype = ComponentDatatype.UNSIGNED_SHORT;
} else if (isQuantizedDraco) {
componentDatatype = (quantizedRange <= 255) ? ComponentDatatype.UNSIGNED_BYTE : ComponentDatatype.UNSIGNED_SHORT;
} else {
componentDatatype = ComponentDatatype.FLOAT;
}
attributes.push({
index : positionLocation,
vertexBuffer : positionsVertexBuffer,
componentsPerAttribute : 3,
componentDatatype : componentDatatype,
normalize : false,
offsetInBytes : 0,
strideInBytes : 0
});
if (pointCloud._cull) {
if (isQuantized || isQuantizedDraco) {
pointCloud._boundingSphere = BoundingSphere.fromCornerPoints(Cartesian3.ZERO, pointCloud._quantizedVolumeScale);
} else {
pointCloud._boundingSphere = computeApproximateBoundingSphereFromPositions(positions);
}
}
if (hasColors) {
if (isRGB565) {
attributes.push({
index : colorLocation,
vertexBuffer : colorsVertexBuffer,
componentsPerAttribute : 1,
componentDatatype : ComponentDatatype.UNSIGNED_SHORT,
normalize : false,
offsetInBytes : 0,
strideInBytes : 0
});
} else {
var colorComponentsPerAttribute = isTranslucent ? 4 : 3;
attributes.push({
index : colorLocation,
vertexBuffer : colorsVertexBuffer,
componentsPerAttribute : colorComponentsPerAttribute,
componentDatatype : ComponentDatatype.UNSIGNED_BYTE,
normalize : true,
offsetInBytes : 0,
strideInBytes : 0
});
}
}
if (hasNormals) {
if (isOctEncoded16P) {
componentsPerAttribute = 2;
componentDatatype = ComponentDatatype.UNSIGNED_BYTE;
} else if (isOctEncodedDraco) {
componentsPerAttribute = 2;
componentDatatype = (octEncodedRange <= 255) ? ComponentDatatype.UNSIGNED_BYTE : ComponentDatatype.UNSIGNED_SHORT;
} else {
componentsPerAttribute = 3;
componentDatatype = ComponentDatatype.FLOAT;
}
attributes.push({
index : normalLocation,
vertexBuffer : normalsVertexBuffer,
componentsPerAttribute : componentsPerAttribute,
componentDatatype : componentDatatype,
normalize : false,
offsetInBytes : 0,
strideInBytes : 0
});
}
if (hasBatchIds) {
attributes.push({
index : batchIdLocation,
vertexBuffer : batchIdsVertexBuffer,
componentsPerAttribute : 1,
componentDatatype : ComponentDatatype.fromTypedArray(batchIds),
normalize : false,
offsetInBytes : 0,
strideInBytes : 0
});
}
if (hasStyleableProperties) {
attributes = attributes.concat(styleableVertexAttributes);
}
var vertexArray = new VertexArray({
context : context,
attributes : attributes
});
pointCloud._opaqueRenderState = RenderState.fromCache({
depthTest : {
enabled : true
}
});
pointCloud._translucentRenderState = RenderState.fromCache({
depthTest : {
enabled : true
},
depthMask : false,
blending : BlendingState.ALPHA_BLEND
});
pointCloud._drawCommand = new DrawCommand({
boundingVolume : new BoundingSphere(),
cull : pointCloud._cull,
modelMatrix : new Matrix4(),
primitiveType : PrimitiveType.POINTS,
vertexArray : vertexArray,
count : pointsLength,
shaderProgram : undefined, // Updated in createShaders
uniformMap : undefined, // Updated in createShaders
renderState : isTranslucent ? pointCloud._translucentRenderState : pointCloud._opaqueRenderState,
pass : isTranslucent ? Pass.TRANSLUCENT : pointCloud._opaquePass,
owner : pointCloud,
castShadows : false,
receiveShadows : false,
pickId : pointCloud._pickIdLoaded()
});
}
function createUniformMap(pointCloud, frameState) {
var context = frameState.context;
var isQuantized = pointCloud._isQuantized;
var isQuantizedDraco = pointCloud._isQuantizedDraco;
var isOctEncodedDraco = pointCloud._isOctEncodedDraco;
var uniformMap = {
u_pointSizeAndTimeAndGeometricErrorAndDepthMultiplier : function() {
var scratch = scratchPointSizeAndTimeAndGeometricErrorAndDepthMultiplier;
scratch.x = pointCloud._attenuation ? pointCloud.maximumAttenuation : pointCloud._pointSize;
scratch.y = pointCloud.time;
if (pointCloud._attenuation) {
var frustum = frameState.camera.frustum;
var depthMultiplier;
// Attenuation is maximumAttenuation in 2D/ortho
if (frameState.mode === SceneMode.SCENE2D || frustum instanceof OrthographicFrustum) {
depthMultiplier = Number.POSITIVE_INFINITY;
} else {
depthMultiplier = context.drawingBufferHeight / frameState.camera.frustum.sseDenominator;
}
scratch.z = pointCloud.geometricError * pointCloud.geometricErrorScale;
scratch.w = depthMultiplier;
}
return scratch;
},
u_highlightColor : function() {
return pointCloud._highlightColor;
},
u_constantColor : function() {
return pointCloud._constantColor;
},
u_clippingPlanes : function() {
var clippingPlanes = pointCloud.clippingPlanes;
var isClipped = pointCloud.isClipped;
return isClipped ? clippingPlanes.texture : context.defaultTexture;
},
u_clippingPlanesEdgeStyle : function() {
var clippingPlanes = pointCloud.clippingPlanes;
if (!defined(clippingPlanes)) {
return Color.TRANSPARENT;
}
var style = Color.clone(clippingPlanes.edgeColor, scratchColor);
style.alpha = clippingPlanes.edgeWidth;
return style;
},
u_clippingPlanesMatrix : function() {
var clippingPlanes = pointCloud.clippingPlanes;
if (!defined(clippingPlanes)) {
return Matrix4.IDENTITY;
}
var clippingPlaneOffsetMatrix = defaultValue(pointCloud.clippingPlaneOffsetMatrix, pointCloud._modelMatrix);
Matrix4.multiply(context.uniformState.view3D, clippingPlaneOffsetMatrix, scratchClippingPlaneMatrix);
return Matrix4.multiply(scratchClippingPlaneMatrix, clippingPlanes.modelMatrix, scratchClippingPlaneMatrix);
}
};
if (isQuantized || isQuantizedDraco || isOctEncodedDraco) {
uniformMap = combine(uniformMap, {
u_quantizedVolumeScaleAndOctEncodedRange : function() {
var scratch = scratchQuantizedVolumeScaleAndOctEncodedRange;
if (defined(pointCloud._quantizedVolumeScale)) {
var scale = Cartesian3.clone(pointCloud._quantizedVolumeScale, scratch);
Cartesian3.divideByScalar(scale, pointCloud._quantizedRange, scratch);
}
scratch.w = pointCloud._octEncodedRange;
return scratch;
}
});
}
if (defined(pointCloud._uniformMapLoaded)) {
uniformMap = pointCloud._uniformMapLoaded(uniformMap);
}
pointCloud._drawCommand.uniformMap = uniformMap;
}
var defaultProperties = ['POSITION', 'COLOR', 'NORMAL', 'POSITION_ABSOLUTE'];
function getStyleableProperties(source, properties) {
// Get all the properties used by this style
var regex = /czm_tiles3d_style_(\w+)/g;
var matches = regex.exec(source);
while (matches !== null) {
var name = matches[1];
if (properties.indexOf(name) === -1) {
properties.push(name);
}
matches = regex.exec(source);
}
}
function getVertexAttribute(vertexArray, index) {
var numberOfAttributes = vertexArray.numberOfAttributes;
for (var i = 0; i < numberOfAttributes; ++i) {
var attribute = vertexArray.getAttribute(i);
if (attribute.index === index) {
return attribute;
}
}
}
function modifyStyleFunction(source) {
// Replace occurrences of czm_tiles3d_style_DEFAULTPROPERTY
var length = defaultProperties.length;
for (var i = 0; i < length; ++i) {
var property = defaultProperties[i];
var styleName = 'czm_tiles3d_style_' + property;
var replaceName = property.toLowerCase();
source = source.replace(new RegExp(styleName + '(\\W)', 'g'), replaceName + '$1');
}
// Edit the function header to accept the point position, color, and normal
return source.replace('()', '(vec3 position, vec3 position_absolute, vec4 color, vec3 normal)');
}
function createShaders(pointCloud, frameState, style) {
var i;
var name;
var attribute;
var context = frameState.context;
var hasStyle = defined(style);
var isQuantized = pointCloud._isQuantized;
var isQuantizedDraco = pointCloud._isQuantizedDraco;
var isOctEncoded16P = pointCloud._isOctEncoded16P;
var isOctEncodedDraco = pointCloud._isOctEncodedDraco;
var isRGB565 = pointCloud._isRGB565;
var isTranslucent = pointCloud._isTranslucent;
var hasColors = pointCloud._hasColors;
var hasNormals = pointCloud._hasNormals;
var hasBatchIds = pointCloud._hasBatchIds;
var backFaceCulling = pointCloud._backFaceCulling;
var normalShading = pointCloud._normalShading;
var vertexArray = pointCloud._drawCommand.vertexArray;
var clippingPlanes = pointCloud.clippingPlanes;
var attenuation = pointCloud._attenuation;
var colorStyleFunction;
var showStyleFunction;
var pointSizeStyleFunction;
var styleTranslucent = isTranslucent;
if (hasStyle) {
var shaderState = {
translucent : false
};
colorStyleFunction = style.getColorShaderFunction('getColorFromStyle', 'czm_tiles3d_style_', shaderState);
showStyleFunction = style.getShowShaderFunction('getShowFromStyle', 'czm_tiles3d_style_', shaderState);
pointSizeStyleFunction = style.getPointSizeShaderFunction('getPointSizeFromStyle', 'czm_tiles3d_style_', shaderState);
if (defined(colorStyleFunction) && shaderState.translucent) {
styleTranslucent = true;
}
}
pointCloud._styleTranslucent = styleTranslucent;
var hasColorStyle = defined(colorStyleFunction);
var hasShowStyle = defined(showStyleFunction);
var hasPointSizeStyle = defined(pointSizeStyleFunction);
var hasClippedContent = pointCloud.isClipped;
// Get the properties in use by the style
var styleableProperties = [];
if (hasColorStyle) {
getStyleableProperties(colorStyleFunction, styleableProperties);
colorStyleFunction = modifyStyleFunction(colorStyleFunction);
}
if (hasShowStyle) {
getStyleableProperties(showStyleFunction, styleableProperties);
showStyleFunction = modifyStyleFunction(showStyleFunction);
}
if (hasPointSizeStyle) {
getStyleableProperties(pointSizeStyleFunction, styleableProperties);
pointSizeStyleFunction = modifyStyleFunction(pointSizeStyleFunction);
}
var usesColorSemantic = (styleableProperties.indexOf('COLOR') >= 0);
var usesNormalSemantic = (styleableProperties.indexOf('NORMAL') >= 0);
// Split default properties from user properties
var userProperties = styleableProperties.filter(function(property) { return defaultProperties.indexOf(property) === -1; });
if (usesNormalSemantic && !hasNormals) {
throw new RuntimeError('Style references the NORMAL semantic but the point cloud does not have normals');
}
// Disable vertex attributes that aren't used in the style, enable attributes that are
var styleableShaderAttributes = pointCloud._styleableShaderAttributes;
for (name in styleableShaderAttributes) {
if (styleableShaderAttributes.hasOwnProperty(name)) {
attribute = styleableShaderAttributes[name];
var enabled = (userProperties.indexOf(name) >= 0);
var vertexAttribute = getVertexAttribute(vertexArray, attribute.location);
vertexAttribute.enabled = enabled;
}
}
var usesColors = hasColors && (!hasColorStyle || usesColorSemantic);
if (hasColors) {
// Disable the color vertex attribute if the color style does not reference the color semantic
var colorVertexAttribute = getVertexAttribute(vertexArray, colorLocation);
colorVertexAttribute.enabled = usesColors;
}
var usesNormals = hasNormals && (normalShading || backFaceCulling || usesNormalSemantic);
if (hasNormals) {
// Disable the normal vertex attribute if normals are not used
var normalVertexAttribute = getVertexAttribute(vertexArray, normalLocation);
normalVertexAttribute.enabled = usesNormals;
}
var attributeLocations = {
a_position : positionLocation
};
if (usesColors) {
attributeLocations.a_color = colorLocation;
}
if (usesNormals) {
attributeLocations.a_normal = normalLocation;
}
if (hasBatchIds) {
attributeLocations.a_batchId = batchIdLocation;
}
var attributeDeclarations = '';
var length = userProperties.length;
for (i = 0; i < length; ++i) {
name = userProperties[i];
attribute = styleableShaderAttributes[name];
if (!defined(attribute)) {
throw new RuntimeError('Style references a property "' + name + '" that does not exist or is not styleable.');
}
var componentCount = attribute.componentCount;
var attributeName = 'czm_tiles3d_style_' + name;
var attributeType;
if (componentCount === 1) {
attributeType = 'float';
} else {
attributeType = 'vec' + componentCount;
}
attributeDeclarations += 'attribute ' + attributeType + ' ' + attributeName + '; \n';
attributeLocations[attributeName] = attribute.location;
}
createUniformMap(pointCloud, frameState);
var vs = 'attribute vec3 a_position; \n' +
'varying vec4 v_color; \n' +
'uniform vec4 u_pointSizeAndTimeAndGeometricErrorAndDepthMultiplier; \n' +
'uniform vec4 u_constantColor; \n' +
'uniform vec4 u_highlightColor; \n';
vs += 'float u_pointSize; \n' +
'float u_time; \n';
if (attenuation) {
vs += 'float u_geometricError; \n' +
'float u_depthMultiplier; \n';
}
vs += attributeDeclarations;
if (usesColors) {
if (isTranslucent) {
vs += 'attribute vec4 a_color; \n';
} else if (isRGB565) {
vs += 'attribute float a_color; \n' +
'const float SHIFT_RIGHT_11 = 1.0 / 2048.0; \n' +
'const float SHIFT_RIGHT_5 = 1.0 / 32.0; \n' +
'const float SHIFT_LEFT_11 = 2048.0; \n' +
'const float SHIFT_LEFT_5 = 32.0; \n' +
'const float NORMALIZE_6 = 1.0 / 64.0; \n' +
'const float NORMALIZE_5 = 1.0 / 32.0; \n';
} else {
vs += 'attribute vec3 a_color; \n';
}
}
if (usesNormals) {
if (isOctEncoded16P || isOctEncodedDraco) {
vs += 'attribute vec2 a_normal; \n';
} else {
vs += 'attribute vec3 a_normal; \n';
}
}
if (hasBatchIds) {
vs += 'attribute float a_batchId; \n';
}
if (isQuantized || isQuantizedDraco || isOctEncodedDraco) {
vs += 'uniform vec4 u_quantizedVolumeScaleAndOctEncodedRange; \n';
}
if (hasColorStyle) {
vs += colorStyleFunction;
}
if (hasShowStyle) {
vs += showStyleFunction;
}
if (hasPointSizeStyle) {
vs += pointSizeStyleFunction;
}
vs += 'void main() \n' +
'{ \n' +
' u_pointSize = u_pointSizeAndTimeAndGeometricErrorAndDepthMultiplier.x; \n' +
' u_time = u_pointSizeAndTimeAndGeometricErrorAndDepthMultiplier.y; \n';
if (attenuation) {
vs += ' u_geometricError = u_pointSizeAndTimeAndGeometricErrorAndDepthMultiplier.z; \n' +
' u_depthMultiplier = u_pointSizeAndTimeAndGeometricErrorAndDepthMultiplier.w; \n';
}
if (usesColors) {
if (isTranslucent) {
vs += ' vec4 color = a_color; \n';
} else if (isRGB565) {
vs += ' float compressed = a_color; \n' +
' float r = floor(compressed * SHIFT_RIGHT_11); \n' +
' compressed -= r * SHIFT_LEFT_11; \n' +
' float g = floor(compressed * SHIFT_RIGHT_5); \n' +
' compressed -= g * SHIFT_LEFT_5; \n' +
' float b = compressed; \n' +
' vec3 rgb = vec3(r * NORMALIZE_5, g * NORMALIZE_6, b * NORMALIZE_5); \n' +
' vec4 color = vec4(rgb, 1.0); \n';
} else {
vs += ' vec4 color = vec4(a_color, 1.0); \n';
}
} else {
vs += ' vec4 color = u_constantColor; \n';
}
if (isQuantized || isQuantizedDraco) {
vs += ' vec3 position = a_position * u_quantizedVolumeScaleAndOctEncodedRange.xyz; \n';
} else {
vs += ' vec3 position = a_position; \n';
}
vs += ' vec3 position_absolute = vec3(czm_model * vec4(position, 1.0)); \n';
if (usesNormals) {
if (isOctEncoded16P) {
vs += ' vec3 normal = czm_octDecode(a_normal); \n';
} else if (isOctEncodedDraco) {
// Draco oct-encoding decodes to zxy order
vs += ' vec3 normal = czm_octDecode(a_normal, u_quantizedVolumeScaleAndOctEncodedRange.w).zxy; \n';
} else {
vs += ' vec3 normal = a_normal; \n';
}
} else {
vs += ' vec3 normal = vec3(1.0); \n';
}
if (hasColorStyle) {
vs += ' color = getColorFromStyle(position, position_absolute, color, normal); \n';
}
if (hasShowStyle) {
vs += ' float show = float(getShowFromStyle(position, position_absolute, color, normal)); \n';
}
if (hasPointSizeStyle) {
vs += ' gl_PointSize = getPointSizeFromStyle(position, position_absolute, color, normal); \n';
} else if (attenuation) {
vs += ' vec4 positionEC = czm_modelView * vec4(position, 1.0); \n' +
' float depth = -positionEC.z; \n' +
// compute SSE for this point
' gl_PointSize = min((u_geometricError / depth) * u_depthMultiplier, u_pointSize); \n';
} else {
vs += ' gl_PointSize = u_pointSize; \n';
}
vs += ' color = color * u_highlightColor; \n';
if (usesNormals && normalShading) {
vs += ' normal = czm_normal * normal; \n' +
' float diffuseStrength = czm_getLambertDiffuse(czm_sunDirectionEC, normal); \n' +
' diffuseStrength = max(diffuseStrength, 0.4); \n' + // Apply some ambient lighting
' color.xyz *= diffuseStrength; \n';
}
vs += ' v_color = color; \n' +
' gl_Position = czm_modelViewProjection * vec4(position, 1.0); \n';
if (usesNormals && backFaceCulling) {
vs += ' float visible = step(-normal.z, 0.0); \n' +
' gl_Position *= visible; \n' +
' gl_PointSize *= visible; \n';
}
if (hasShowStyle) {
vs += ' gl_Position *= show; \n' +
' gl_PointSize *= show; \n';
}
vs += '} \n';
var fs = 'varying vec4 v_color; \n';
if (hasClippedContent) {
fs += 'uniform sampler2D u_clippingPlanes; \n' +
'uniform mat4 u_clippingPlanesMatrix; \n' +
'uniform vec4 u_clippingPlanesEdgeStyle; \n';
fs += '\n';
fs += getClippingFunction(clippingPlanes, context);
fs += '\n';
}
fs += 'void main() \n' +
'{ \n' +
' gl_FragColor = v_color; \n';
if (hasClippedContent) {
fs += getClipAndStyleCode('u_clippingPlanes', 'u_clippingPlanesMatrix', 'u_clippingPlanesEdgeStyle');
}
fs += '} \n';
if (defined(pointCloud._vertexShaderLoaded)) {
vs = pointCloud._vertexShaderLoaded(vs);
}
if (defined(pointCloud._fragmentShaderLoaded)) {
fs = pointCloud._fragmentShaderLoaded(fs);
}
var drawCommand = pointCloud._drawCommand;
if (defined(drawCommand.shaderProgram)) {
// Destroy the old shader
drawCommand.shaderProgram.destroy();
}
drawCommand.shaderProgram = ShaderProgram.fromCache({
context : context,
vertexShaderSource : vs,
fragmentShaderSource : fs,
attributeLocations : attributeLocations
});
try {
// Check if the shader compiles correctly. If not there is likely a syntax error with the style.
drawCommand.shaderProgram._bind();
} catch (error) {
// Rephrase the error.
throw new RuntimeError('Error generating style shader: this may be caused by a type mismatch, index out-of-bounds, or other syntax error.');
}
}
function decodeDraco(pointCloud, context) {
if (pointCloud._decodingState === DecodingState.READY) {
return false;
}
if (pointCloud._decodingState === DecodingState.NEEDS_DECODE) {
var parsedContent = pointCloud._parsedContent;
var draco = parsedContent.draco;
var decodePromise = DracoLoader.decodePointCloud(draco, context);
if (defined(decodePromise)) {
pointCloud._decodingState = DecodingState.DECODING;
decodePromise.then(function(result) {
pointCloud._decodingState = DecodingState.READY;
var decodedPositions = defined(result.POSITION) ? result.POSITION.array : undefined;
var decodedRgb = defined(result.RGB) ? result.RGB.array : undefined;
var decodedRgba = defined(result.RGBA) ? result.RGBA.array : undefined;
var decodedNormals = defined(result.NORMAL) ? result.NORMAL.array : undefined;
var decodedBatchIds = defined(result.BATCH_ID) ? result.BATCH_ID.array : undefined;
var isQuantizedDraco = defined(decodedPositions) && defined(result.POSITION.data.quantization);
var isOctEncodedDraco = defined(decodedNormals) && defined(result.NORMAL.data.quantization);
if (isQuantizedDraco) {
// Draco quantization range == quantized volume scale - size in meters of the quantized volume
// Internal quantized range is the range of values of the quantized data, e.g. 255 for 8-bit, 1023 for 10-bit, etc
var quantization = result.POSITION.data.quantization;
var range = quantization.range;
pointCloud._quantizedVolumeScale = Cartesian3.fromElements(range, range, range);
pointCloud._quantizedVolumeOffset = Cartesian3.unpack(quantization.minValues);
pointCloud._quantizedRange = (1 << quantization.quantizationBits) - 1.0;
pointCloud._isQuantizedDraco = true;
}
if (isOctEncodedDraco) {
pointCloud._octEncodedRange = (1 << result.NORMAL.data.quantization.quantizationBits) - 1.0;
pointCloud._isOctEncodedDraco = true;
}
var styleableProperties = parsedContent.styleableProperties;
var batchTableProperties = draco.batchTableProperties;
for (var name in batchTableProperties) {
if (batchTableProperties.hasOwnProperty(name)) {
var property = result[name];
if (!defined(styleableProperties)) {
styleableProperties = {};
}
styleableProperties[name] = {
typedArray : property.array,
componentCount : property.data.componentsPerAttribute
};
}
}
parsedContent.positions = defaultValue(decodedPositions, parsedContent.positions);
parsedContent.colors = defaultValue(defaultValue(decodedRgba, decodedRgb), parsedContent.colors);
parsedContent.normals = defaultValue(decodedNormals, parsedContent.normals);
parsedContent.batchIds = defaultValue(decodedBatchIds, parsedContent.batchIds);
parsedContent.styleableProperties = styleableProperties;
}).otherwise(function(error) {
pointCloud._decodingState = DecodingState.FAILED;
pointCloud._readyPromise.reject(error);
});
}
}
return true;
}
var scratchComputedTranslation = new Cartesian4();
var scratchScale = new Cartesian3();
PointCloud.prototype.update = function(frameState) {
var context = frameState.context;
var decoding = decodeDraco(this, context);
if (decoding) {
return;
}
var shadersDirty = false;
var modelMatrixDirty = !Matrix4.equals(this._modelMatrix, this.modelMatrix);
if (this._mode !== frameState.mode) {
this._mode = frameState.mode;
modelMatrixDirty = true;
}
if (!defined(this._drawCommand)) {
createResources(this, frameState);
modelMatrixDirty = true;
shadersDirty = true;
this._ready = true;
this._readyPromise.resolve(this);
this._parsedContent = undefined; // Unload
}
if (modelMatrixDirty) {
Matrix4.clone(this.modelMatrix, this._modelMatrix);
var modelMatrix = this._drawCommand.modelMatrix;
Matrix4.clone(this._modelMatrix, modelMatrix);
if (defined(this._rtcCenter)) {
Matrix4.multiplyByTranslation(modelMatrix, this._rtcCenter, modelMatrix);
}
if (defined(this._quantizedVolumeOffset)) {
Matrix4.multiplyByTranslation(modelMatrix, this._quantizedVolumeOffset, modelMatrix);
}
if (frameState.mode !== SceneMode.SCENE3D) {
var projection = frameState.mapProjection;
var translation = Matrix4.getColumn(modelMatrix, 3, scratchComputedTranslation);
if (!Cartesian4.equals(translation, Cartesian4.UNIT_W)) {
Transforms.basisTo2D(projection, modelMatrix, modelMatrix);
}
}
var boundingSphere = this._drawCommand.boundingVolume;
BoundingSphere.clone(this._boundingSphere, boundingSphere);
if (this._cull) {
var center = boundingSphere.center;
Matrix4.multiplyByPoint(modelMatrix, center, center);
var scale = Matrix4.getScale(modelMatrix, scratchScale);
boundingSphere.radius *= Cartesian3.maximumComponent(scale);
}
}
if (this.clippingPlanesDirty) {
this.clippingPlanesDirty = false;
shadersDirty = true;
}
if (this._attenuation !== this.attenuation) {
this._attenuation = this.attenuation;
shadersDirty = true;
}
if (this.backFaceCulling !== this._backFaceCulling) {
this._backFaceCulling = this.backFaceCulling;
shadersDirty = true;
}
if (this.normalShading !== this._normalShading) {
this._normalShading = this.normalShading;
shadersDirty = true;
}
if (this._style !== this.style || this.styleDirty) {
this._style = this.style;
this.styleDirty = false;
shadersDirty = true;
}
if (shadersDirty) {
createShaders(this, frameState, this._style);
}
this._drawCommand.castShadows = ShadowMode.castShadows(this.shadows);
this._drawCommand.receiveShadows = ShadowMode.receiveShadows(this.shadows);
// Update the render state
var isTranslucent = (this._highlightColor.alpha < 1.0) || (this._constantColor.alpha < 1.0) || this._styleTranslucent;
this._drawCommand.renderState = isTranslucent ? this._translucentRenderState : this._opaqueRenderState;
this._drawCommand.pass = isTranslucent ? Pass.TRANSLUCENT : this._opaquePass;
var commandList = frameState.commandList;
var passes = frameState.passes;
if (passes.render || passes.pick) {
commandList.push(this._drawCommand);
}
};
PointCloud.prototype.isDestroyed = function() {
return false;
};
PointCloud.prototype.destroy = function() {
var command = this._drawCommand;
if (defined(command)) {
command.vertexArray = command.vertexArray && command.vertexArray.destroy();
command.shaderProgram = command.shaderProgram && command.shaderProgram.destroy();
}
return destroyObject(this);
};
return PointCloud;
});
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { connect } from 'react-redux';
import { FlyoutFooter } from './view';
import { updateFlyout, FLYOUT_STATE } from '../../../store/ui';
import { promoteTemporaryStyles, clearTemporaryStyles, clearTemporaryLayers,
setSelectedLayer, removeSelectedLayer, promoteTemporaryLayers } from '../../../actions/store_actions';
import { getSelectedLayer } from '../../../selectors/map_selectors';
const mapStateToProps = state => {
const selectedLayer = getSelectedLayer(state);
return {
isNewLayer: selectedLayer.isTemporary()
};
};
const mapDispatchToProps = dispatch => {
return {
cancelLayerPanel: () => {
dispatch(updateFlyout(FLYOUT_STATE.NONE));
dispatch(clearTemporaryStyles());
dispatch(clearTemporaryLayers());
},
saveLayerEdits: isNewLayer => {
dispatch(updateFlyout(FLYOUT_STATE.NONE));
dispatch(promoteTemporaryStyles());
if (isNewLayer) {
dispatch(promoteTemporaryLayers());
}
dispatch(setSelectedLayer(null));
},
removeLayer: () => {
dispatch(updateFlyout(FLYOUT_STATE.NONE));
dispatch(removeSelectedLayer());
dispatch(setSelectedLayer(null));
}
};
};
const connectedFlyoutFooter = connect(mapStateToProps, mapDispatchToProps)(FlyoutFooter);
export { connectedFlyoutFooter as FlyoutFooter };
|
const mongoose = require("mongoose");
// define a schema
const Schema = mongoose.Schema;
const messageSchema = new Schema(
{
text: { type: String, minlength: 1, maxlength: 2000, required: true },
username: { type: String, required: true, minlength: 1 },
user: { type: mongoose.Schema.Types.ObjectId, ref: "User", required: true },
room: { type: String, required: true, minlength: 1 },
},
{ timestamps: true }
);
//compile and export model from schema
module.exports = mongoose.model("Message", messageSchema);
// note: I couldn't get this to work, so I'm setting it to string for now
// room: { type: mongoose.Schema.Types.ObjectId, required: true },
// roomName: { type: mongoose.Schema.Types.ObjectId, required: true },
// image_url: { type: String, minlength: 0, maxlength: 500 },
// sent_on: { type: Date, required: true, default: Date.now() }
// link
// comments
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
setup_path = os.path.dirname(os.path.abspath(__file__))
packages = find_packages(exclude=["tests*"])
# Taken from https://github.com/python-telegram-bot/python-telegram-bot/blob/9d99660ba95b103b3e1dc80414a5ce2fd805260b/setup.py#L9
def requirements():
"""Build the requirements list for this project"""
requirements_list = []
with open(os.path.join(setup_path, "requirements.txt")) as reqs:
for install in reqs:
requirements_list.append(install.strip())
return requirements_list
with open(os.path.join(setup_path, "README.md"), "r", encoding="utf-8") as file:
readme = file.read()
# Check if we are running on CI
CI = os.environ.get("CI")
if CI:
version = ""
TRAVIS_TAG = os.environ.get("TRAVIS_TAG")
GITHUB_ACTIONS = os.environ.get("GITHUB_ACTIONS")
if TRAVIS_TAG:
print("Running on Travis!")
version = TRAVIS_TAG.replace("v", "")
elif GITHUB_ACTIONS:
print("Running on GitHub Actions!")
GITHUB_REF = os.environ.get("GITHUB_REF")
tag = GITHUB_REF.split("/")[-1]
version = tag.replace("v", "")
else:
# Taken from https://packaging.python.org/guides/single-sourcing-package-version/
version_dict = {}
version_file = os.path.join(setup_path, "certleak", "version.py")
with open(version_file, "r", encoding="utf-8") as file:
exec(file.read(), version_dict)
version = version_dict["__version__"]
print("Building version {} of certleak".format(version))
setup(name="certleak",
version=version,
install_requires=requirements(),
keywords="python certificate tls osint framework",
description="Python framework for collecting and analyzing TLS certificate data via the Certificate Transparency Network",
long_description=readme,
long_description_content_type="text/markdown",
url="https://github.com/d-Rickyy-b/certleak",
author="d-Rickyy-b",
author_email="certleak@rico-j.de",
license="MIT",
packages=packages,
include_package_data=True,
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Security",
"Topic :: Internet",
"Programming Language :: Python",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
],
)
|
const fs = require('fs');
const version = require('./package.json').version;
const FILE_PATH = './public/index.html';
const DESTINATION_PATH = './public/index.html';
const minify = require('html-minifier').minify;
console.log('[html-minify] reading file');
let fileData = fs.readFileSync(FILE_PATH, 'utf8');
let size = fileData.length;
console.log('[html-minify] minify');
fileData = minify(fileData, {
removeComments: true,
minifyCSS: true,
minifyJS: true,
collapseWhitespace: true,
preserveLineBreaks: false
});
fileData = fileData.replace('{{ version }}', version);
console.log('[html-minify] write file', DESTINATION_PATH, ((1 - fileData.length / size) * 100).toFixed(2));
fs.writeFileSync(DESTINATION_PATH, fileData, 'utf8');
|
/*Finding a value in a linked list and returning the result*/
#include <stdio.h>
struct entry
{
int value ;
struct entry *next ;
} ;
struct entry * locate ( struct entry *lst_ptr, int input ) //Function to locate a value inside a linked list
{
while ( lst_ptr != (struct entry *) 0 )
{
if ( lst_ptr->value == input )
return lst_ptr ;
lst_ptr = lst_ptr->next ;
}
return lst_ptr ;
}
int main (void)
{
struct entry * locate ( struct entry *lst_ptr, int input ) ; //Function declaration
struct entry n1, n2, n3, n4, *lst = &n1, *result ; //Struct variables declaration and list pointer initialisation
n1.value = 10 ; //Initialisation of struct varibales values
n2.value = 32 ;
n3.value = 42 ;
n4.value = 99 ;
n1.next = &n2 ;
n2.next = &n3 ;
n3.next = &n4 ;
n4.next = (struct entry *) 0 ; //Null pointer
int match ;
printf ( "Enter value : " ) ; //Asking for input to locate
scanf ( "%i", &match ) ;
result = locate ( lst, match ) ; //Function call
if ( result != (struct entry *) 0 ) //Displaying result
printf ( "Located : %i\n", result->value ) ;
else
printf ( "Not found\n") ;
return 0 ;
}
|
from chainer.dataset.tabular import tabular_dataset
class _WithConverter(tabular_dataset.TabularDataset):
def __init__(self, dataset, converter):
self._dataset = dataset
self._converter = converter
def __len__(self):
return len(self._dataset)
@property
def keys(self):
return self._dataset.keys
@property
def mode(self):
return self._dataset.mode
def get_examples(self, indices, key_indices):
return self._dataset.get_examples(indices, key_indices)
def convert(self, data):
if isinstance(data, tuple):
return self._converter(*data)
elif isinstance(data, dict):
return self._converter(**data)
else:
return self._converter(data)
|
nome = str(input('Qual é o seu nome ?')).strip()
if nome == 'Gustavo':
print ('Que nome bonito!')
elif nome == 'Pedro' or nome == 'Maria' or nome == 'Paulo':
print ('Seu nome é bem popular no Brasil')
elif nome in 'Ana Claudia Jéssica Juliana':
print ('Belo nome Feminino')
else:
print ('Seu nome é normal.')
print ('Tenha um bom dia, {}!'.format(nome))
|
// Auto-generated file created by react-native-storybook-loader
// Do not edit.
//
// https://github.com/elderfo/react-native-storybook-loader.git
function loadStories() {
require('../components/ActionSheet/ActionSheet.stories');
require('../components/AvatarInput/AvatarInput.stories');
require('../components/Backdrop/Backdrop.stories');
require('../components/Chat/Chat.stories');
require('../components/ChatFooter/ChatFooter.stories');
require('../components/ChatListItem/ChatListItem.stories');
require('../components/CircularProgress/CircularProgress.stories');
require('../components/ComponentTemplate/ComponentTemplate.stories');
require('../components/ConnectivityIndicator/ConnectivityIndicator.stories');
require('../components/DatePicker/DatePicker.stories');
require('../components/ErrorBoundary/ErrorBoundary.stories');
require('../components/FieldInput/FieldInput.stories');
require('../components/FieldSection/FieldSection.stories');
require('../components/FieldView/FieldView.stories');
require('../components/FriendList/FriendList.stories');
require('../components/FriendListItem/FriendListItem.stories');
require('../components/FriendsButton/FriendsButton.stories');
require('../components/FriendsTabBarLabel/FriendsTabBarLabel.stories');
require('../components/Icon/Icon.stories');
require('../components/Image/Image.stories');
require('../components/ImageComments/ImageComments.stories');
require('../components/ImageCommentsItem/ImageCommentsItem.stories');
require('../components/ImageFit/ImageFit.stories');
require('../components/ImagePicker/ImagePicker.stories');
require('../components/ImagePickerActionSheet/ImagePickerActionSheet.stories');
require('../components/ImageProgress/ImageProgress.stories');
require('../components/ImageView/ImageView.stories');
require('../components/ListPicker/ListPicker.stories');
require('../components/MessageItem/MessageItem.stories');
require('../components/MessageList/MessageList.stories');
require('../components/MessageStateIndicator/MessageStateIndicator.stories');
require('../components/NoContent/NoContent.stories');
require('../components/Overlay/Overlay.stories');
require('../components/PhotoEdit/PhotoEdit.stories');
require('../components/PhotoGrid/PhotoGrid.stories');
require('../components/PhotoList/PhotoList.stories');
require('../components/PhotoListItem/PhotoListItem.stories');
require('../components/Placeholder/Placeholder.stories');
require('../components/ProfileFieldForm/ProfileFieldForm.stories');
require('../components/ProfileFieldInput/ProfileFieldInput.stories');
require('../components/ProfileFieldView/ProfileFieldView.stories');
require('../components/ProfileFieldsView/ProfileFieldsView.stories');
require('../components/SearchBar/SearchBar.stories');
require('../components/SettingList/SettingList.stories');
require('../components/Shadow/Shadow.stories');
require('../components/Spinner/Spinner.stories');
require('../components/TabBarIcon/TabBarIcon.stories');
require('../components/TabBarLabel/TabBarLabel.stories');
require('../components/TabNavigator/TabNavigator.stories');
require('../components/TextPicker/TextPicker.stories');
require('../components/Thumbnail/Thumbnail.stories');
require('../components/UserChats/UserChats.stories');
require('../components/UserHeading/UserHeading.stories');
require('../components/UserList/UserList.stories');
require('../components/UserListItem/UserListItem.stories');
require('../components/UsersRow/UsersRow.stories');
require('../components/UsersRowItem/UsersRowItem.stories');
require('../screens/ChatSearch/ChatSearch.stories');
require('../screens/EmailVerification/EmailVerification.stories');
require('../screens/ForgotPassword/ForgotPassword.stories');
require('../screens/Launch/Launch.stories');
require('../screens/MyPhotos/MyPhotos.stories');
require('../screens/PendingApproval/PendingApproval.stories');
require('../screens/SignIn/SignIn.stories');
require('../screens/SignUp/SignUp.stories');
require('../screens/UserSearch/UserSearch.stories');
require('./stories/Button.stories');
}
const stories = [
'../components/ActionSheet/ActionSheet.stories',
'../components/AvatarInput/AvatarInput.stories',
'../components/Backdrop/Backdrop.stories',
'../components/Chat/Chat.stories',
'../components/ChatFooter/ChatFooter.stories',
'../components/ChatListItem/ChatListItem.stories',
'../components/CircularProgress/CircularProgress.stories',
'../components/ComponentTemplate/ComponentTemplate.stories',
'../components/ConnectivityIndicator/ConnectivityIndicator.stories',
'../components/DatePicker/DatePicker.stories',
'../components/ErrorBoundary/ErrorBoundary.stories',
'../components/FieldInput/FieldInput.stories',
'../components/FieldSection/FieldSection.stories',
'../components/FieldView/FieldView.stories',
'../components/FriendList/FriendList.stories',
'../components/FriendListItem/FriendListItem.stories',
'../components/FriendsButton/FriendsButton.stories',
'../components/FriendsTabBarLabel/FriendsTabBarLabel.stories',
'../components/Icon/Icon.stories',
'../components/Image/Image.stories',
'../components/ImageComments/ImageComments.stories',
'../components/ImageCommentsItem/ImageCommentsItem.stories',
'../components/ImageFit/ImageFit.stories',
'../components/ImagePicker/ImagePicker.stories',
'../components/ImagePickerActionSheet/ImagePickerActionSheet.stories',
'../components/ImageProgress/ImageProgress.stories',
'../components/ImageView/ImageView.stories',
'../components/ListPicker/ListPicker.stories',
'../components/MessageItem/MessageItem.stories',
'../components/MessageList/MessageList.stories',
'../components/MessageStateIndicator/MessageStateIndicator.stories',
'../components/NoContent/NoContent.stories',
'../components/Overlay/Overlay.stories',
'../components/PhotoEdit/PhotoEdit.stories',
'../components/PhotoGrid/PhotoGrid.stories',
'../components/PhotoList/PhotoList.stories',
'../components/PhotoListItem/PhotoListItem.stories',
'../components/Placeholder/Placeholder.stories',
'../components/ProfileFieldForm/ProfileFieldForm.stories',
'../components/ProfileFieldInput/ProfileFieldInput.stories',
'../components/ProfileFieldView/ProfileFieldView.stories',
'../components/ProfileFieldsView/ProfileFieldsView.stories',
'../components/SearchBar/SearchBar.stories',
'../components/SettingList/SettingList.stories',
'../components/Shadow/Shadow.stories',
'../components/Spinner/Spinner.stories',
'../components/TabBarIcon/TabBarIcon.stories',
'../components/TabBarLabel/TabBarLabel.stories',
'../components/TabNavigator/TabNavigator.stories',
'../components/TextPicker/TextPicker.stories',
'../components/Thumbnail/Thumbnail.stories',
'../components/UserChats/UserChats.stories',
'../components/UserHeading/UserHeading.stories',
'../components/UserList/UserList.stories',
'../components/UserListItem/UserListItem.stories',
'../components/UsersRow/UsersRow.stories',
'../components/UsersRowItem/UsersRowItem.stories',
'../screens/ChatSearch/ChatSearch.stories',
'../screens/EmailVerification/EmailVerification.stories',
'../screens/ForgotPassword/ForgotPassword.stories',
'../screens/Launch/Launch.stories',
'../screens/MyPhotos/MyPhotos.stories',
'../screens/PendingApproval/PendingApproval.stories',
'../screens/SignIn/SignIn.stories',
'../screens/SignUp/SignUp.stories',
'../screens/UserSearch/UserSearch.stories',
'./stories/Button.stories',
];
module.exports = {
loadStories,
stories,
};
|
mycallback( {"ELECTION CODE": "G2010", "EXPENDITURE PURPOSE DESCRIP": "Advertising: Tele-Town Hall Fee", "BENEFICIARY CANDIDATE OFFICE": "", "PAYEE ZIP": "222031553", "MEMO CODE": "", "PAYEE STATE": "VA", "PAYEE LAST NAME": "", "PAYEE CITY": "Arlington", "PAYEE SUFFIX": "", "CONDUIT STREET 2": "", "CONDUIT STREET 1": "", "PAYEE FIRST NAME": "", "BACK REFERENCE SCHED NAME": "", "BENEFICIARY COMMITTEE NAME": "", "PAYEE PREFIX": "", "MEMO TEXT/DESCRIPTION": "", "FILER COMMITTEE ID NUMBER": "C00286187", "EXPENDITURE AMOUNT (F3L Bundled)": "2127.98", "BENEFICIARY CANDIDATE MIDDLE NAME": "", "BENEFICIARY CANDIDATE LAST NAME": "", "_record_type": "fec.version.v7_0.SB", "PAYEE STREET 2": "Suite 802", "PAYEE STREET 1": "4600 Fairfax Drive", "SEMI-ANNUAL REFUNDED BUNDLED AMT": "", "Reference to SI or SL system code that identifies the Account": "", "CONDUIT CITY": "", "ENTITY TYPE": "ORG", "BENEFICIARY CANDIDATE FEC ID": "", "BENEFICIARY COMMITTEE FEC ID": "", "BENEFICIARY CANDIDATE STATE": "", "BENEFICIARY CANDIDATE FIRST NAME": "", "PAYEE MIDDLE NAME": "", "ELECTION OTHER DESCRIPTION": "", "_src_file": "2011/20110411/721439.fec_1.yml", "CONDUIT STATE": "", "CATEGORY CODE": "004", "EXPENDITURE PURPOSE CODE": "", "BENEFICIARY CANDIDATE DISTRICT": "", "TRANSACTION ID NUMBER": "B-E-24666", "BACK REFERENCE TRAN ID NUMBER": "", "EXPENDITURE DATE": "20101013", "BENEFICIARY CANDIDATE PREFIX": "", "CONDUIT NAME": "", "PAYEE ORGANIZATION NAME": "Tele-Town Hall Services", "BENEFICIARY CANDIDATE SUFFIX": "", "CONDUIT ZIP": "", "FORM TYPE": "SB17"});
|
#ifndef BUILDINGSPRITE_H
#define BUILDINGSPRITE_H
#include "SpriteLibrary.h"
#include "UnitSprite.h"
#include "Building.h"
#include <vector>
class BuildingSprite :
public UnitSprite
{
public:
BuildingSprite(Building *m_building, StiGame::SpriteLibrary *m_library);
virtual ~BuildingSprite();
void render(void);
Unit* getUnit(void);
protected:
StiGame::SpriteLibrary *library;
Building *building;
std::map<std::string, StiGame::ClonedSprite> sprites;
};
#endif // BUILDINGSPRITE_H
|
# Run the _testcapi module tests (tests for the Python/C API): by defn,
# these are all functions _testcapi exports whose name begins with 'test_'.
from collections import namedtuple, OrderedDict
import os
import pickle
import platform
import random
import re
import subprocess
import sys
import sysconfig
import textwrap
import threading
import time
import unittest
from test import support
from test.support import MISSING_C_DOCSTRINGS
from test.support.script_helper import assert_python_failure, assert_python_ok
try:
import _posixsubprocess
except ImportError:
_posixsubprocess = None
# Skip this test if the _testcapi module isn't available.
_testcapi = support.import_module('_testcapi')
# Were we compiled --with-pydebug or with #define Py_DEBUG?
Py_DEBUG = hasattr(sys, 'gettotalrefcount')
def testfunction(self):
"""some doc"""
return self
class InstanceMethod:
id = _testcapi.instancemethod(id)
testfunction = _testcapi.instancemethod(testfunction)
class CAPITest(unittest.TestCase):
def test_instancemethod(self):
inst = InstanceMethod()
self.assertEqual(id(inst), inst.id())
self.assertTrue(inst.testfunction() is inst)
self.assertEqual(inst.testfunction.__doc__, testfunction.__doc__)
self.assertEqual(InstanceMethod.testfunction.__doc__, testfunction.__doc__)
InstanceMethod.testfunction.attribute = "test"
self.assertEqual(testfunction.attribute, "test")
self.assertRaises(AttributeError, setattr, inst.testfunction, "attribute", "test")
def test_no_FatalError_infinite_loop(self):
with support.SuppressCrashReport():
p = subprocess.Popen([sys.executable, "-c",
'import _testcapi;'
'_testcapi.crash_no_current_thread()'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(out, err) = p.communicate()
self.assertEqual(out, b'')
# This used to cause an infinite loop.
self.assertTrue(err.rstrip().startswith(
b'Fatal Python error:'
b' PyThreadState_Get: no current thread'))
def test_memoryview_from_NULL_pointer(self):
self.assertRaises(ValueError, _testcapi.make_memoryview_from_NULL_pointer)
def test_exc_info(self):
raised_exception = ValueError("5")
new_exc = TypeError("TEST")
try:
raise raised_exception
except ValueError as e:
tb = e.__traceback__
orig_sys_exc_info = sys.exc_info()
orig_exc_info = _testcapi.set_exc_info(new_exc.__class__, new_exc, None)
new_sys_exc_info = sys.exc_info()
new_exc_info = _testcapi.set_exc_info(*orig_exc_info)
reset_sys_exc_info = sys.exc_info()
self.assertEqual(orig_exc_info[1], e)
self.assertSequenceEqual(orig_exc_info, (raised_exception.__class__, raised_exception, tb))
self.assertSequenceEqual(orig_sys_exc_info, orig_exc_info)
self.assertSequenceEqual(reset_sys_exc_info, orig_exc_info)
self.assertSequenceEqual(new_exc_info, (new_exc.__class__, new_exc, None))
self.assertSequenceEqual(new_sys_exc_info, new_exc_info)
else:
self.assertTrue(False)
@unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
def test_seq_bytes_to_charp_array(self):
# Issue #15732: crash in _PySequence_BytesToCharpArray()
class Z(object):
def __len__(self):
return 1
self.assertRaises(TypeError, _posixsubprocess.fork_exec,
1,Z(),3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17)
# Issue #15736: overflow in _PySequence_BytesToCharpArray()
class Z(object):
def __len__(self):
return sys.maxsize
def __getitem__(self, i):
return b'x'
self.assertRaises(MemoryError, _posixsubprocess.fork_exec,
1,Z(),3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17)
@unittest.skipUnless(_posixsubprocess, '_posixsubprocess required for this test.')
def test_subprocess_fork_exec(self):
class Z(object):
def __len__(self):
return 1
# Issue #15738: crash in subprocess_fork_exec()
self.assertRaises(TypeError, _posixsubprocess.fork_exec,
Z(),[b'1'],3,(1, 2),5,6,7,8,9,10,11,12,13,14,15,16,17)
@unittest.skipIf(MISSING_C_DOCSTRINGS,
"Signature information for builtins requires docstrings")
def test_docstring_signature_parsing(self):
self.assertEqual(_testcapi.no_docstring.__doc__, None)
self.assertEqual(_testcapi.no_docstring.__text_signature__, None)
self.assertEqual(_testcapi.docstring_empty.__doc__, None)
self.assertEqual(_testcapi.docstring_empty.__text_signature__, None)
self.assertEqual(_testcapi.docstring_no_signature.__doc__,
"This docstring has no signature.")
self.assertEqual(_testcapi.docstring_no_signature.__text_signature__, None)
self.assertEqual(_testcapi.docstring_with_invalid_signature.__doc__,
"docstring_with_invalid_signature($module, /, boo)\n"
"\n"
"This docstring has an invalid signature."
)
self.assertEqual(_testcapi.docstring_with_invalid_signature.__text_signature__, None)
self.assertEqual(_testcapi.docstring_with_invalid_signature2.__doc__,
"docstring_with_invalid_signature2($module, /, boo)\n"
"\n"
"--\n"
"\n"
"This docstring also has an invalid signature."
)
self.assertEqual(_testcapi.docstring_with_invalid_signature2.__text_signature__, None)
self.assertEqual(_testcapi.docstring_with_signature.__doc__,
"This docstring has a valid signature.")
self.assertEqual(_testcapi.docstring_with_signature.__text_signature__, "($module, /, sig)")
self.assertEqual(_testcapi.docstring_with_signature_but_no_doc.__doc__, None)
self.assertEqual(_testcapi.docstring_with_signature_but_no_doc.__text_signature__,
"($module, /, sig)")
self.assertEqual(_testcapi.docstring_with_signature_and_extra_newlines.__doc__,
"\nThis docstring has a valid signature and some extra newlines.")
self.assertEqual(_testcapi.docstring_with_signature_and_extra_newlines.__text_signature__,
"($module, /, parameter)")
def test_c_type_with_matrix_multiplication(self):
M = _testcapi.matmulType
m1 = M()
m2 = M()
self.assertEqual(m1 @ m2, ("matmul", m1, m2))
self.assertEqual(m1 @ 42, ("matmul", m1, 42))
self.assertEqual(42 @ m1, ("matmul", 42, m1))
o = m1
o @= m2
self.assertEqual(o, ("imatmul", m1, m2))
o = m1
o @= 42
self.assertEqual(o, ("imatmul", m1, 42))
o = 42
o @= m1
self.assertEqual(o, ("matmul", 42, m1))
def test_return_null_without_error(self):
# Issue #23571: A function must not return NULL without setting an
# error
if Py_DEBUG:
code = textwrap.dedent("""
import _testcapi
from test import support
with support.SuppressCrashReport():
_testcapi.return_null_without_error()
""")
rc, out, err = assert_python_failure('-c', code)
self.assertRegex(err.replace(b'\r', b''),
br'Fatal Python error: a function returned NULL '
br'without setting an error\n'
br'SystemError: <built-in function '
br'return_null_without_error> returned NULL '
br'without setting an error\n'
br'\n'
br'Current thread.*:\n'
br' File .*", line 6 in <module>')
else:
with self.assertRaises(SystemError) as cm:
_testcapi.return_null_without_error()
self.assertRegex(str(cm.exception),
'return_null_without_error.* '
'returned NULL without setting an error')
def test_return_result_with_error(self):
# Issue #23571: A function must not return a result with an error set
if Py_DEBUG:
code = textwrap.dedent("""
import _testcapi
from test import support
with support.SuppressCrashReport():
_testcapi.return_result_with_error()
""")
rc, out, err = assert_python_failure('-c', code)
self.assertRegex(err.replace(b'\r', b''),
br'Fatal Python error: a function returned a '
br'result with an error set\n'
br'ValueError\n'
br'\n'
br'The above exception was the direct cause '
br'of the following exception:\n'
br'\n'
br'SystemError: <built-in '
br'function return_result_with_error> '
br'returned a result with an error set\n'
br'\n'
br'Current thread.*:\n'
br' File .*, line 6 in <module>')
else:
with self.assertRaises(SystemError) as cm:
_testcapi.return_result_with_error()
self.assertRegex(str(cm.exception),
'return_result_with_error.* '
'returned a result with an error set')
def test_buildvalue_N(self):
_testcapi.test_buildvalue_N()
def test_set_nomemory(self):
code = """if 1:
import _testcapi
class C(): pass
# The first loop tests both functions and that remove_mem_hooks()
# can be called twice in a row. The second loop checks a call to
# set_nomemory() after a call to remove_mem_hooks(). The third
# loop checks the start and stop arguments of set_nomemory().
for outer_cnt in range(1, 4):
start = 10 * outer_cnt
for j in range(100):
if j == 0:
if outer_cnt != 3:
_testcapi.set_nomemory(start)
else:
_testcapi.set_nomemory(start, start + 1)
try:
C()
except MemoryError as e:
if outer_cnt != 3:
_testcapi.remove_mem_hooks()
print('MemoryError', outer_cnt, j)
_testcapi.remove_mem_hooks()
break
"""
rc, out, err = assert_python_ok('-c', code)
self.assertIn(b'MemoryError 1 10', out)
self.assertIn(b'MemoryError 2 20', out)
self.assertIn(b'MemoryError 3 30', out)
def test_mapping_keys_values_items(self):
class Mapping1(dict):
def keys(self):
return list(super().keys())
def values(self):
return list(super().values())
def items(self):
return list(super().items())
class Mapping2(dict):
def keys(self):
return tuple(super().keys())
def values(self):
return tuple(super().values())
def items(self):
return tuple(super().items())
dict_obj = {'foo': 1, 'bar': 2, 'spam': 3}
for mapping in [{}, OrderedDict(), Mapping1(), Mapping2(),
dict_obj, OrderedDict(dict_obj),
Mapping1(dict_obj), Mapping2(dict_obj)]:
self.assertListEqual(_testcapi.get_mapping_keys(mapping),
list(mapping.keys()))
self.assertListEqual(_testcapi.get_mapping_values(mapping),
list(mapping.values()))
self.assertListEqual(_testcapi.get_mapping_items(mapping),
list(mapping.items()))
def test_mapping_keys_values_items_bad_arg(self):
self.assertRaises(AttributeError, _testcapi.get_mapping_keys, None)
self.assertRaises(AttributeError, _testcapi.get_mapping_values, None)
self.assertRaises(AttributeError, _testcapi.get_mapping_items, None)
class BadMapping:
def keys(self):
return None
def values(self):
return None
def items(self):
return None
bad_mapping = BadMapping()
self.assertRaises(TypeError, _testcapi.get_mapping_keys, bad_mapping)
self.assertRaises(TypeError, _testcapi.get_mapping_values, bad_mapping)
self.assertRaises(TypeError, _testcapi.get_mapping_items, bad_mapping)
class TestPendingCalls(unittest.TestCase):
def pendingcalls_submit(self, l, n):
def callback():
#this function can be interrupted by thread switching so let's
#use an atomic operation
l.append(None)
for i in range(n):
time.sleep(random.random()*0.02) #0.01 secs on average
#try submitting callback until successful.
#rely on regular interrupt to flush queue if we are
#unsuccessful.
while True:
if _testcapi._pending_threadfunc(callback):
break;
def pendingcalls_wait(self, l, n, context = None):
#now, stick around until l[0] has grown to 10
count = 0;
while len(l) != n:
#this busy loop is where we expect to be interrupted to
#run our callbacks. Note that callbacks are only run on the
#main thread
if False and support.verbose:
print("(%i)"%(len(l),),)
for i in range(1000):
a = i*i
if context and not context.event.is_set():
continue
count += 1
self.assertTrue(count < 10000,
"timeout waiting for %i callbacks, got %i"%(n, len(l)))
if False and support.verbose:
print("(%i)"%(len(l),))
def test_pendingcalls_threaded(self):
#do every callback on a separate thread
n = 32 #total callbacks
threads = []
class foo(object):pass
context = foo()
context.l = []
context.n = 2 #submits per thread
context.nThreads = n // context.n
context.nFinished = 0
context.lock = threading.Lock()
context.event = threading.Event()
threads = [threading.Thread(target=self.pendingcalls_thread,
args=(context,))
for i in range(context.nThreads)]
with support.start_threads(threads):
self.pendingcalls_wait(context.l, n, context)
def pendingcalls_thread(self, context):
try:
self.pendingcalls_submit(context.l, context.n)
finally:
with context.lock:
context.nFinished += 1
nFinished = context.nFinished
if False and support.verbose:
print("finished threads: ", nFinished)
if nFinished == context.nThreads:
context.event.set()
def test_pendingcalls_non_threaded(self):
#again, just using the main thread, likely they will all be dispatched at
#once. It is ok to ask for too many, because we loop until we find a slot.
#the loop can be interrupted to dispatch.
#there are only 32 dispatch slots, so we go for twice that!
l = []
n = 64
self.pendingcalls_submit(l, n)
self.pendingcalls_wait(l, n)
class SubinterpreterTest(unittest.TestCase):
def test_subinterps(self):
import builtins
r, w = os.pipe()
code = """if 1:
import sys, builtins, pickle
with open({:d}, "wb") as f:
pickle.dump(id(sys.modules), f)
pickle.dump(id(builtins), f)
""".format(w)
with open(r, "rb") as f:
ret = support.run_in_subinterp(code)
self.assertEqual(ret, 0)
self.assertNotEqual(pickle.load(f), id(sys.modules))
self.assertNotEqual(pickle.load(f), id(builtins))
# Bug #6012
class Test6012(unittest.TestCase):
def test(self):
self.assertEqual(_testcapi.argparsing("Hello", "World"), 1)
class EmbeddingTests(unittest.TestCase):
def setUp(self):
here = os.path.abspath(__file__)
basepath = os.path.dirname(os.path.dirname(os.path.dirname(here)))
exename = "_testembed"
if sys.platform.startswith("win"):
ext = ("_d" if "_d" in sys.executable else "") + ".exe"
exename += ext
exepath = os.path.dirname(sys.executable)
else:
exepath = os.path.join(basepath, "Programs")
self.test_exe = exe = os.path.join(exepath, exename)
if not os.path.exists(exe):
self.skipTest("%r doesn't exist" % exe)
# This is needed otherwise we get a fatal error:
# "Py_Initialize: Unable to get the locale encoding
# LookupError: no codec search functions registered: can't find encoding"
self.oldcwd = os.getcwd()
os.chdir(basepath)
def tearDown(self):
os.chdir(self.oldcwd)
def run_embedded_interpreter(self, *args, env=None):
"""Runs a test in the embedded interpreter"""
cmd = [self.test_exe]
cmd.extend(args)
if env is not None and sys.platform == 'win32':
# Windows requires at least the SYSTEMROOT environment variable to
# start Python.
env = env.copy()
env['SYSTEMROOT'] = os.environ['SYSTEMROOT']
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
env=env)
(out, err) = p.communicate()
self.assertEqual(p.returncode, 0,
"bad returncode %d, stderr is %r" %
(p.returncode, err))
return out, err
def run_repeated_init_and_subinterpreters(self):
out, err = self.run_embedded_interpreter("repeated_init_and_subinterpreters")
self.assertEqual(err, "")
# The output from _testembed looks like this:
# --- Pass 0 ---
# interp 0 <0x1cf9330>, thread state <0x1cf9700>: id(modules) = 139650431942728
# interp 1 <0x1d4f690>, thread state <0x1d35350>: id(modules) = 139650431165784
# interp 2 <0x1d5a690>, thread state <0x1d99ed0>: id(modules) = 139650413140368
# interp 3 <0x1d4f690>, thread state <0x1dc3340>: id(modules) = 139650412862200
# interp 0 <0x1cf9330>, thread state <0x1cf9700>: id(modules) = 139650431942728
# --- Pass 1 ---
# ...
interp_pat = (r"^interp (\d+) <(0x[\dA-F]+)>, "
r"thread state <(0x[\dA-F]+)>: "
r"id\(modules\) = ([\d]+)$")
Interp = namedtuple("Interp", "id interp tstate modules")
numloops = 0
current_run = []
for line in out.splitlines():
if line == "--- Pass {} ---".format(numloops):
self.assertEqual(len(current_run), 0)
if support.verbose:
print(line)
numloops += 1
continue
self.assertLess(len(current_run), 5)
match = re.match(interp_pat, line)
if match is None:
self.assertRegex(line, interp_pat)
# Parse the line from the loop. The first line is the main
# interpreter and the 3 afterward are subinterpreters.
interp = Interp(*match.groups())
if support.verbose:
print(interp)
self.assertTrue(interp.interp)
self.assertTrue(interp.tstate)
self.assertTrue(interp.modules)
current_run.append(interp)
# The last line in the loop should be the same as the first.
if len(current_run) == 5:
main = current_run[0]
self.assertEqual(interp, main)
yield current_run
current_run = []
def test_subinterps_main(self):
for run in self.run_repeated_init_and_subinterpreters():
main = run[0]
self.assertEqual(main.id, '0')
def test_subinterps_different_ids(self):
for run in self.run_repeated_init_and_subinterpreters():
main, *subs, _ = run
mainid = int(main.id)
for i, sub in enumerate(subs):
self.assertEqual(sub.id, str(mainid + i + 1))
def test_subinterps_distinct_state(self):
for run in self.run_repeated_init_and_subinterpreters():
main, *subs, _ = run
if '0x0' in main:
# XXX Fix on Windows (and other platforms): something
# is going on with the pointers in Programs/_testembed.c.
# interp.interp is 0x0 and interp.modules is the same
# between interpreters.
raise unittest.SkipTest('platform prints pointers as 0x0')
for sub in subs:
# A new subinterpreter may have the same
# PyInterpreterState pointer as a previous one if
# the earlier one has already been destroyed. So
# we compare with the main interpreter. The same
# applies to tstate.
self.assertNotEqual(sub.interp, main.interp)
self.assertNotEqual(sub.tstate, main.tstate)
self.assertNotEqual(sub.modules, main.modules)
def test_forced_io_encoding(self):
# Checks forced configuration of embedded interpreter IO streams
env = dict(os.environ, PYTHONIOENCODING="utf-8:surrogateescape")
out, err = self.run_embedded_interpreter("forced_io_encoding", env=env)
if support.verbose > 1:
print()
print(out)
print(err)
expected_stream_encoding = "utf-8"
expected_errors = "surrogateescape"
expected_output = '\n'.join([
"--- Use defaults ---",
"Expected encoding: default",
"Expected errors: default",
"stdin: {in_encoding}:{errors}",
"stdout: {out_encoding}:{errors}",
"stderr: {out_encoding}:backslashreplace",
"--- Set errors only ---",
"Expected encoding: default",
"Expected errors: ignore",
"stdin: {in_encoding}:ignore",
"stdout: {out_encoding}:ignore",
"stderr: {out_encoding}:backslashreplace",
"--- Set encoding only ---",
"Expected encoding: latin-1",
"Expected errors: default",
"stdin: latin-1:{errors}",
"stdout: latin-1:{errors}",
"stderr: latin-1:backslashreplace",
"--- Set encoding and errors ---",
"Expected encoding: latin-1",
"Expected errors: replace",
"stdin: latin-1:replace",
"stdout: latin-1:replace",
"stderr: latin-1:backslashreplace"])
expected_output = expected_output.format(
in_encoding=expected_stream_encoding,
out_encoding=expected_stream_encoding,
errors=expected_errors)
# This is useful if we ever trip over odd platform behaviour
self.maxDiff = None
self.assertEqual(out.strip(), expected_output)
class SkipitemTest(unittest.TestCase):
def test_skipitem(self):
"""
If this test failed, you probably added a new "format unit"
in Python/getargs.c, but neglected to update our poor friend
skipitem() in the same file. (If so, shame on you!)
With a few exceptions**, this function brute-force tests all
printable ASCII*** characters (32 to 126 inclusive) as format units,
checking to see that PyArg_ParseTupleAndKeywords() return consistent
errors both when the unit is attempted to be used and when it is
skipped. If the format unit doesn't exist, we'll get one of two
specific error messages (one for used, one for skipped); if it does
exist we *won't* get that error--we'll get either no error or some
other error. If we get the specific "does not exist" error for one
test and not for the other, there's a mismatch, and the test fails.
** Some format units have special funny semantics and it would
be difficult to accommodate them here. Since these are all
well-established and properly skipped in skipitem() we can
get away with not testing them--this test is really intended
to catch *new* format units.
*** Python C source files must be ASCII. Therefore it's impossible
to have non-ASCII format units.
"""
empty_tuple = ()
tuple_1 = (0,)
dict_b = {'b':1}
keywords = ["a", "b"]
for i in range(32, 127):
c = chr(i)
# skip parentheses, the error reporting is inconsistent about them
# skip 'e', it's always a two-character code
# skip '|' and '$', they don't represent arguments anyway
if c in '()e|$':
continue
# test the format unit when not skipped
format = c + "i"
try:
_testcapi.parse_tuple_and_keywords(tuple_1, dict_b,
format, keywords)
when_not_skipped = False
except SystemError as e:
s = "argument 1 (impossible<bad format char>)"
when_not_skipped = (str(e) == s)
except TypeError:
when_not_skipped = False
# test the format unit when skipped
optional_format = "|" + format
try:
_testcapi.parse_tuple_and_keywords(empty_tuple, dict_b,
optional_format, keywords)
when_skipped = False
except SystemError as e:
s = "impossible<bad format char>: '{}'".format(format)
when_skipped = (str(e) == s)
message = ("test_skipitem_parity: "
"detected mismatch between convertsimple and skipitem "
"for format unit '{}' ({}), not skipped {}, skipped {}".format(
c, i, when_skipped, when_not_skipped))
self.assertIs(when_skipped, when_not_skipped, message)
def test_parse_tuple_and_keywords(self):
# Test handling errors in the parse_tuple_and_keywords helper itself
self.assertRaises(TypeError, _testcapi.parse_tuple_and_keywords,
(), {}, 42, [])
self.assertRaises(ValueError, _testcapi.parse_tuple_and_keywords,
(), {}, '', 42)
self.assertRaises(ValueError, _testcapi.parse_tuple_and_keywords,
(), {}, '', [''] * 42)
self.assertRaises(ValueError, _testcapi.parse_tuple_and_keywords,
(), {}, '', [42])
def test_bad_use(self):
# Test handling invalid format and keywords in
# PyArg_ParseTupleAndKeywords()
self.assertRaises(SystemError, _testcapi.parse_tuple_and_keywords,
(1,), {}, '||O', ['a'])
self.assertRaises(SystemError, _testcapi.parse_tuple_and_keywords,
(1, 2), {}, '|O|O', ['a', 'b'])
self.assertRaises(SystemError, _testcapi.parse_tuple_and_keywords,
(), {'a': 1}, '$$O', ['a'])
self.assertRaises(SystemError, _testcapi.parse_tuple_and_keywords,
(), {'a': 1, 'b': 2}, '$O$O', ['a', 'b'])
self.assertRaises(SystemError, _testcapi.parse_tuple_and_keywords,
(), {'a': 1}, '$|O', ['a'])
self.assertRaises(SystemError, _testcapi.parse_tuple_and_keywords,
(), {'a': 1, 'b': 2}, '$O|O', ['a', 'b'])
self.assertRaises(SystemError, _testcapi.parse_tuple_and_keywords,
(1,), {}, '|O', ['a', 'b'])
self.assertRaises(SystemError, _testcapi.parse_tuple_and_keywords,
(1,), {}, '|OO', ['a'])
self.assertRaises(SystemError, _testcapi.parse_tuple_and_keywords,
(), {}, '|$O', [''])
self.assertRaises(SystemError, _testcapi.parse_tuple_and_keywords,
(), {}, '|OO', ['a', ''])
def test_positional_only(self):
parse = _testcapi.parse_tuple_and_keywords
parse((1, 2, 3), {}, 'OOO', ['', '', 'a'])
parse((1, 2), {'a': 3}, 'OOO', ['', '', 'a'])
with self.assertRaisesRegex(TypeError,
r'function takes at least 2 positional arguments \(1 given\)'):
parse((1,), {'a': 3}, 'OOO', ['', '', 'a'])
parse((1,), {}, 'O|OO', ['', '', 'a'])
with self.assertRaisesRegex(TypeError,
r'function takes at least 1 positional arguments \(0 given\)'):
parse((), {}, 'O|OO', ['', '', 'a'])
parse((1, 2), {'a': 3}, 'OO$O', ['', '', 'a'])
with self.assertRaisesRegex(TypeError,
r'function takes exactly 2 positional arguments \(1 given\)'):
parse((1,), {'a': 3}, 'OO$O', ['', '', 'a'])
parse((1,), {}, 'O|O$O', ['', '', 'a'])
with self.assertRaisesRegex(TypeError,
r'function takes at least 1 positional arguments \(0 given\)'):
parse((), {}, 'O|O$O', ['', '', 'a'])
with self.assertRaisesRegex(SystemError, r'Empty parameter name after \$'):
parse((1,), {}, 'O|$OO', ['', '', 'a'])
with self.assertRaisesRegex(SystemError, 'Empty keyword'):
parse((1,), {}, 'O|OO', ['', 'a', ''])
class TestThreadState(unittest.TestCase):
@support.reap_threads
def test_thread_state(self):
# some extra thread-state tests driven via _testcapi
def target():
idents = []
def callback():
idents.append(threading.get_ident())
_testcapi._test_thread_state(callback)
a = b = callback
time.sleep(1)
# Check our main thread is in the list exactly 3 times.
self.assertEqual(idents.count(threading.get_ident()), 3,
"Couldn't find main thread correctly in the list")
target()
t = threading.Thread(target=target)
t.start()
t.join()
class Test_testcapi(unittest.TestCase):
def test__testcapi(self):
if support.verbose:
print()
for name in dir(_testcapi):
if not name.startswith('test_'):
continue
with self.subTest("internal", name=name):
if support.verbose:
print(f" {name}", flush=True)
test = getattr(_testcapi, name)
test()
class PyMemDebugTests(unittest.TestCase):
PYTHONMALLOC = 'debug'
# '0x04c06e0' or '04C06E0'
PTR_REGEX = r'(?:0x)?[0-9a-fA-F]+'
def check(self, code):
with support.SuppressCrashReport():
out = assert_python_failure('-c', code,
PYTHONMALLOC=self.PYTHONMALLOC)
stderr = out.err
return stderr.decode('ascii', 'replace')
def test_buffer_overflow(self):
out = self.check('import _testcapi; _testcapi.pymem_buffer_overflow()')
regex = (r"Debug memory block at address p={ptr}: API 'm'\n"
r" 16 bytes originally requested\n"
r" The [0-9] pad bytes at p-[0-9] are FORBIDDENBYTE, as expected.\n"
r" The [0-9] pad bytes at tail={ptr} are not all FORBIDDENBYTE \(0x[0-9a-f]{{2}}\):\n"
r" at tail\+0: 0x78 \*\*\* OUCH\n"
r" at tail\+1: 0xfb\n"
r" at tail\+2: 0xfb\n"
r" .*\n"
r" The block was made by call #[0-9]+ to debug malloc/realloc.\n"
r" Data at p: cb cb cb .*\n"
r"\n"
r"Fatal Python error: bad trailing pad byte")
regex = regex.format(ptr=self.PTR_REGEX)
regex = re.compile(regex, flags=re.DOTALL)
self.assertRegex(out, regex)
def test_api_misuse(self):
out = self.check('import _testcapi; _testcapi.pymem_api_misuse()')
regex = (r"Debug memory block at address p={ptr}: API 'm'\n"
r" 16 bytes originally requested\n"
r" The [0-9] pad bytes at p-[0-9] are FORBIDDENBYTE, as expected.\n"
r" The [0-9] pad bytes at tail={ptr} are FORBIDDENBYTE, as expected.\n"
r" The block was made by call #[0-9]+ to debug malloc/realloc.\n"
r" Data at p: cb cb cb .*\n"
r"\n"
r"Fatal Python error: bad ID: Allocated using API 'm', verified using API 'r'\n")
regex = regex.format(ptr=self.PTR_REGEX)
self.assertRegex(out, regex)
def check_malloc_without_gil(self, code):
out = self.check(code)
expected = ('Fatal Python error: Python memory allocator called '
'without holding the GIL')
self.assertIn(expected, out)
def test_pymem_malloc_without_gil(self):
# Debug hooks must raise an error if PyMem_Malloc() is called
# without holding the GIL
code = 'import _testcapi; _testcapi.pymem_malloc_without_gil()'
self.check_malloc_without_gil(code)
def test_pyobject_malloc_without_gil(self):
# Debug hooks must raise an error if PyObject_Malloc() is called
# without holding the GIL
code = 'import _testcapi; _testcapi.pyobject_malloc_without_gil()'
self.check_malloc_without_gil(code)
class PyMemMallocDebugTests(PyMemDebugTests):
PYTHONMALLOC = 'malloc_debug'
@unittest.skipUnless(sysconfig.get_config_var('WITH_PYMALLOC') == 1,
'need pymalloc')
class PyMemPymallocDebugTests(PyMemDebugTests):
PYTHONMALLOC = 'pymalloc_debug'
@unittest.skipUnless(Py_DEBUG, 'need Py_DEBUG')
class PyMemDefaultTests(PyMemDebugTests):
# test default allocator of Python compiled in debug mode
PYTHONMALLOC = ''
if __name__ == "__main__":
unittest.main()
|
define({
"add": "Klõpsa uue lisamiseks",
"title": "Pealkiri",
"placeholderBookmarkName": "Järjehoidja nimi",
"ok": "OK",
"cancel": "Tühista",
"warning": "Viige muutmine lõpule!",
"edit": "Muuda järjehoidjat",
"errorNameExist": "Järjehoidja on olemas!",
"errorNameNull": "Vigane järjehoidja nimi!",
"addBookmark": "Loo uus",
"thumbnail": "Pisipilt",
"thumbnailHint": "Uuendamiseks klõpsake pilti",
"displayBookmarksAs": "Kuva järjehoidjad",
"cards": "kaartidena",
"list": "loendina",
"cardsTips": "Kaardivaade",
"listTips": "Loendivaade",
"makeAsDefault": "Määra vaikeväärtuseks",
"default": "Vaikimisi",
"editable": "Saate lubada järjehoidjate lisamise vidinasse.",
"alwaysSync": "Kuva järjehoidjad veebikaardilt",
"configCustom": "Kuva kohandatud järjehoidjad",
"import": "Impordi",
"create": "Loo",
"importTitle": "Impordi järjehoidjad",
"importFromWeb": "Kuva järjehoidjad praeguselt veebikaardilt",
"selectAll": "Vali kõik",
"noBookmarkInConfig": "Järjehoidjate lisamiseks klõpsake nuppu “Impordi” või “Loo uus”.",
"noBookmarkInWebMap": "Kaardil pole ühtegi järjehoidjat konfigureeritud.",
"extent": "Kuvaulatus",
"saveExtent": "Salvesta kaardi ulatus sellesse järjehoidjasse",
"savelayers": "Salvesta kihi nähtavus",
"withVisibility": "Kihi nähtavusega",
"bookmark": "Järjehoidja",
"addBtn": "Lisa",
"deleteBtn": "Kustuta",
"editBtn": "Muuda",
"dragReorderTip": "Ümberjärjestamiseks hoidke all ja lohistage",
"deleteBtnTip": "Kustuta järjehoidja",
"editBtnTip": "Muuda järjehoidjat"
});
|
#!/usr/bin/env python3
# Based on https://stackoverflow.com/a/41751956
import os, sys
from PyQt5.QtCore import pyqtSignal, pyqtSlot, QProcess, QTextCodec
from PyQt5.QtGui import QTextCursor, QPixmap, QIcon
from PyQt5.QtWidgets import QApplication, QPlainTextEdit, QAction, QMessageBox, QMainWindow
class ProcessOutputReader(QProcess):
produce_output = pyqtSignal(str)
def __init__(self, parent=None):
super().__init__(parent=parent)
self.setProcessChannelMode(QProcess.MergedChannels)
codec = QTextCodec.codecForLocale()
self._decoder_stdout = codec.makeDecoder()
self.readyReadStandardOutput.connect(self._ready_read_standard_output)
@pyqtSlot()
def _ready_read_standard_output(self):
raw_bytes = self.readAllStandardOutput()
text = self._decoder_stdout.toUnicode(raw_bytes)
self.produce_output.emit(text)
class MyConsole(QMainWindow):
def __init__(self, parent=None):
super().__init__(parent=parent)
self.setWindowTitle("Logs")
self._showMenu()
self.setFixedWidth(1024)
self.setFixedHeight(600)
self.plainTextEdit = QPlainTextEdit()
font = self.font()
font.setPointSize(9)
# font.setFamily("monospace")
self.plainTextEdit.setFont(font)
self.plainTextEdit.setReadOnly(True)
self.plainTextEdit.setMaximumBlockCount(10000) # limit console to 10000 lines
self._cursor_output = self.plainTextEdit.textCursor()
self.setCentralWidget(self.plainTextEdit)
@pyqtSlot(str)
def append_output(self, text):
self._cursor_output.insertText(text)
self.scroll_to_last_line()
def scroll_to_last_line(self):
cursor = self.plainTextEdit.textCursor()
cursor.movePosition(QTextCursor.End)
cursor.movePosition(QTextCursor.Up if cursor.atBlockStart() else
QTextCursor.StartOfLine)
self.plainTextEdit.setTextCursor(cursor)
def _showMenu(self):
exitAct = QAction('&Quit', self)
exitAct.setShortcut('Ctrl+Q')
exitAct.setStatusTip('Exit application')
exitAct.triggered.connect(QApplication.quit)
menubar = self.menuBar()
fileMenu = menubar.addMenu('&File')
fileMenu.addAction(exitAct)
aboutAct = QAction('&About', self)
aboutAct.setStatusTip('About this application')
aboutAct.triggered.connect(self._showAbout)
helpMenu = menubar.addMenu('&Help')
helpMenu.addAction(aboutAct)
def _showAbout(self):
print("showDialog")
msg = QMessageBox()
msg.setWindowTitle("About")
msg.setIconPixmap(QPixmap(os.path.dirname(__file__) + "/Logs.png"))
# msg.setIconPixmap(QIcon.fromTheme("logviewer").pixmap(128))
candidates = ["COPYRIGHT", "COPYING", "LICENSE"]
for candidate in candidates:
if os.path.exists(os.path.dirname(__file__) + "/" + candidate):
with open(os.path.dirname(__file__) + "/" + candidate, 'r') as file:
data = file.read()
msg.setDetailedText(data)
msg.setText("<h3>Logs</h3>")
msg.setInformativeText(
"A simple utility to view log files<br>in <code>/var/log</code><br><br><a href='https://github.com/helloSystem/Utilities'>https://github.com/helloSystem/Utilities</a>")
msg.exec()
app = QApplication(sys.argv)
reader = ProcessOutputReader()
console = MyConsole()
reader.produce_output.connect(console.append_output)
reader.start('sh', ['-c', "tail -n 5 -f /var/log/*.log"])
console.show()
app.exec_()
|
/*! jQuery UI - v1.9.2 - 2018-05-13
* http://jqueryui.com
* Copyright jQuery Foundation and other contributors; Licensed MIT */
jQuery(function(t){t.datepicker.regional.sk={closeText:"Zavrieť",prevText:"<Predchádzajúci",nextText:"Nasledujúci>",currentText:"Dnes",monthNames:["Január","Február","Marec","Apríl","Máj","Jún","Júl","August","September","Október","November","December"],monthNamesShort:["Jan","Feb","Mar","Apr","Máj","Jún","Júl","Aug","Sep","Okt","Nov","Dec"],dayNames:["Nedeľa","Pondelok","Utorok","Streda","Štvrtok","Piatok","Sobota"],dayNamesShort:["Ned","Pon","Uto","Str","Štv","Pia","Sob"],dayNamesMin:["Ne","Po","Ut","St","Št","Pia","So"],weekHeader:"Ty",dateFormat:"dd.mm.yy",firstDay:1,isRTL:!1,showMonthAfterYear:!1,yearSuffix:""},t.datepicker.setDefaults(t.datepicker.regional.sk)});
|
"""
Checks that the implementation does not make use of boolean operations (==, <=, !, etc)
in assignments or function calls.
"""
import os
import pytest
import helpers
import pqclean
import pycparser
def setup_module():
if not(os.path.exists(os.path.join('pycparser', '.git'))):
print("Please run `git submodule update --init`")
class ForbiddenLineVisitor(pycparser.c_ast.NodeVisitor):
def __init__(self):
self.errors = []
def visit_Assignment(self, node):
v = ForbiddenOpVisitor();
v.visit(node.rvalue)
self.errors.extend(v.errors)
def visit_Decl(self, node):
if node.init:
v = ForbiddenOpVisitor();
v.visit(node.init)
self.errors.extend(v.errors)
def visit_FuncCall(self, node):
if node.args:
v = ForbiddenOpVisitor();
v.visit(node.args)
self.errors.extend(v.errors)
class ForbiddenOpVisitor(pycparser.c_ast.NodeVisitor):
def __init__(self):
self.errors = []
def visit_BinaryOp(self, node):
v = ForbiddenOpVisitor();
v.visit(node.left)
self.errors.extend(v.errors)
if node.op in ['<', '<=', '>', '>=', '==', '!=', '&&', '||']:
err = "\n {} at {c.file}:{c.line}:{c.column}".format(node.op, c=node.coord)
self.errors.append(err)
v = ForbiddenOpVisitor();
v.visit(node.right)
self.errors.extend(v.errors)
def visit_UnaryOp(self, node):
if node.op == '!':
err = "\n {} at {c.file}:{c.line}:{c.column}".format(node.op, c=node.coord)
self.errors.append(err)
v = ForbiddenOpVisitor();
v.visit(node.expr)
self.errors.extend(v.errors)
def visit_TernaryOp(self, node):
err = "\n ternary operator at {c.file}:{c.line}:{c.column}".format(c=node.coord)
self.errors.append(err)
@pytest.mark.parametrize(
'implementation',
pqclean.Scheme.all_implementations(),
ids=str,
)
@helpers.skip_windows()
@helpers.filtered_test
def test_boolean(implementation):
errors = []
for fname in os.listdir(implementation.path()):
if not fname.endswith(".c"):
continue
tdir, _ = os.path.split(os.path.realpath(__file__))
ast = pycparser.parse_file(
os.path.join(implementation.path(), fname),
use_cpp=True,
cpp_path='cc', # not all platforms link cpp correctly; cc -E works
cpp_args=[
'-E',
'-std=c99',
'-nostdinc', # pycparser cannot deal with e.g. __attribute__
'-I{}'.format(os.path.join(tdir, "../common")),
# necessary to mock e.g. <stdint.h>
'-I{}'.format(
os.path.join(tdir, 'pycparser/utils/fake_libc_include')),
]
)
v = ForbiddenLineVisitor()
v.visit(ast)
errors.extend(v.errors)
if errors:
raise AssertionError(
"Prohibited use of boolean operations in assignment or function call" +
"".join(errors)
)
if __name__ == "__main__":
import sys
pytest.main(sys.argv)
|
/*!
* VisualEditor ContentEditable Node tests.
*
* @copyright 2011-2020 VisualEditor Team and others; see http://ve.mit-license.org
*/
QUnit.module( 've.ce.Node' );
/* Stubs */
ve.ce.NodeStub = function VeCeNodeStub() {
// Parent constructor
ve.ce.NodeStub.super.apply( this, arguments );
};
OO.inheritClass( ve.ce.NodeStub, ve.ce.Node );
ve.ce.NodeStub.static.name = 'stub';
ve.ce.nodeFactory.register( ve.ce.NodeStub );
/* Tests */
QUnit.test( 'getModel', function ( assert ) {
var model = new ve.dm.NodeStub(),
view = new ve.ce.NodeStub( model );
assert.strictEqual( view.getModel(), model, 'returns reference to model given to constructor' );
} );
QUnit.test( 'getParent', function ( assert ) {
var a = new ve.ce.NodeStub( new ve.dm.NodeStub() );
assert.strictEqual( a.getParent(), null, 'returns null if not attached' );
} );
QUnit.test( 'attach', function ( assert ) {
var a = new ve.ce.NodeStub( new ve.dm.NodeStub() ),
b = new ve.ce.NodeStub( new ve.dm.NodeStub() );
a.on( 'attach', function ( parent ) {
assert.strictEqual( parent, b, 'attach event is called with parent as first argument' );
} );
a.attach( b );
assert.strictEqual( a.getParent(), b, 'parent is set to given object after attach' );
} );
QUnit.test( 'detach', function ( assert ) {
var a = new ve.ce.NodeStub( new ve.dm.NodeStub() ),
b = new ve.ce.NodeStub( new ve.dm.NodeStub() );
a.attach( b );
a.on( 'detach', function ( parent ) {
assert.strictEqual( parent, b, 'detach event is called with parent as first argument' );
} );
a.detach();
assert.strictEqual( a.getParent(), null, 'parent is set null after detach' );
} );
|
#pragma once
#include <iostream>
#include <memory>
#include <utility>
#include <algorithm>
#include <functional>
#include <string>
#include <sstream>
#include <vector>
#include <unordered_map>
#include <unordered_set>
#ifdef INF_PLATFORM_WINDOWS
#include <Windows.h>
#endif
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-10-17 13:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0003_auto_20181017_1631'),
]
operations = [
migrations.AlterField(
model_name='review',
name='rating',
field=models.IntegerField(choices=[(1, '1'), (2, '2'), (3, '3'), (4, '4'), (5, '5'), (6, '6'), (7, '7'), (8, '8'), (9, '9'), (10, '10')]),
),
]
|
import random
from time import sleep
lista = ['PAPEL', 'PEDRA', 'TESOURA']
x = random.choice(lista)
escolha = str(input('Escolha uma opção: Pedra, Papel ou Tesoura? ')).upper()
print('JO')
sleep(1)
print('KEN')
sleep(1)
print('PO')
sleep(1)
if x == 'PEDRA' and escolha == 'PAPEL':
print('Você: {}\nComputador: {}\nVocê Ganhou do Computador'.format(escolha, x))
elif x == 'TESOURA' and escolha == 'PEDRA':
print('Você: {}\nComputador: {}\nVocê Ganhou do Computador'.format(escolha, x))
elif x == 'PAPEL' and escolha == 'TESOURA':
print('Você: {}\nComputador: {}\nVocê Ganhou do Computador'.format(escolha, x))
elif x == escolha:
print('Você: {}\nComputador: {}\nEmpate, Jogar Novamente!'.format(escolha, x))
else:
print('Você: {}\nComputador: {}\n>> Você perdeu!!! <<'.format(escolha, x))
|
describe("multi-rpc-tcp-transport", function () {
require("./TCPTransport");
});
|
/*
*
* Copyright 2015, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "test/core/end2end/end2end_tests.h"
#include <stdio.h>
#include <string.h>
#include <grpc/byte_buffer.h>
#include <grpc/support/alloc.h>
#include <grpc/support/log.h>
#include <grpc/support/time.h>
#include <grpc/support/useful.h>
#include "test/core/end2end/cq_verifier.h"
static void *tag(intptr_t t) { return (void *)t; }
static grpc_end2end_test_fixture begin_test(grpc_end2end_test_config config,
const char *test_name,
grpc_channel_args *client_args,
grpc_channel_args *server_args) {
grpc_end2end_test_fixture f;
gpr_log(GPR_INFO, "%s/%s", test_name, config.name);
f = config.create_fixture(client_args, server_args);
config.init_server(&f, server_args);
config.init_client(&f, client_args);
return f;
}
static gpr_timespec n_seconds_time(int n) {
return GRPC_TIMEOUT_SECONDS_TO_DEADLINE(n);
}
static gpr_timespec five_seconds_time(void) { return n_seconds_time(5); }
static void drain_cq(grpc_completion_queue *cq) {
grpc_event ev;
do {
ev = grpc_completion_queue_next(cq, five_seconds_time(), NULL);
} while (ev.type != GRPC_QUEUE_SHUTDOWN);
}
static void shutdown_server(grpc_end2end_test_fixture *f) {
if (!f->server) return;
grpc_server_shutdown_and_notify(f->server, f->cq, tag(1000));
GPR_ASSERT(grpc_completion_queue_pluck(
f->cq, tag(1000), GRPC_TIMEOUT_SECONDS_TO_DEADLINE(5), NULL)
.type == GRPC_OP_COMPLETE);
grpc_server_destroy(f->server);
f->server = NULL;
}
static void shutdown_client(grpc_end2end_test_fixture *f) {
if (!f->client) return;
grpc_channel_destroy(f->client);
f->client = NULL;
}
static void end_test(grpc_end2end_test_fixture *f) {
shutdown_server(f);
shutdown_client(f);
grpc_completion_queue_shutdown(f->cq);
drain_cq(f->cq);
grpc_completion_queue_destroy(f->cq);
}
static void simple_request_body(grpc_end2end_test_fixture f) {
grpc_call *c;
grpc_call *s;
gpr_timespec deadline = five_seconds_time();
cq_verifier *cqv = cq_verifier_create(f.cq);
grpc_op ops[6];
grpc_op *op;
grpc_metadata_array initial_metadata_recv;
grpc_metadata_array trailing_metadata_recv;
grpc_metadata_array request_metadata_recv;
grpc_call_details call_details;
grpc_status_code status;
grpc_call_error error;
char *details = NULL;
size_t details_capacity = 0;
int was_cancelled = 2;
c = grpc_channel_create_call(f.client, NULL, GRPC_PROPAGATE_DEFAULTS, f.cq,
"/foo", "foo.test.google.fr:1234", deadline,
NULL);
GPR_ASSERT(c);
grpc_metadata_array_init(&initial_metadata_recv);
grpc_metadata_array_init(&trailing_metadata_recv);
grpc_metadata_array_init(&request_metadata_recv);
grpc_call_details_init(&call_details);
memset(ops, 0, sizeof(ops));
op = ops;
op->op = GRPC_OP_SEND_INITIAL_METADATA;
op->data.send_initial_metadata.count = 0;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_SEND_CLOSE_FROM_CLIENT;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_RECV_INITIAL_METADATA;
op->data.recv_initial_metadata = &initial_metadata_recv;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_RECV_STATUS_ON_CLIENT;
op->data.recv_status_on_client.trailing_metadata = &trailing_metadata_recv;
op->data.recv_status_on_client.status = &status;
op->data.recv_status_on_client.status_details = &details;
op->data.recv_status_on_client.status_details_capacity = &details_capacity;
op->flags = 0;
op->reserved = NULL;
op++;
error = grpc_call_start_batch(c, ops, (size_t)(op - ops), tag(1), NULL);
GPR_ASSERT(GRPC_CALL_OK == error);
error =
grpc_server_request_call(f.server, &s, &call_details,
&request_metadata_recv, f.cq, f.cq, tag(101));
GPR_ASSERT(GRPC_CALL_OK == error);
CQ_EXPECT_COMPLETION(cqv, tag(101), 1);
cq_verify(cqv);
memset(ops, 0, sizeof(ops));
op = ops;
op->op = GRPC_OP_SEND_INITIAL_METADATA;
op->data.send_initial_metadata.count = 0;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_SEND_STATUS_FROM_SERVER;
op->data.send_status_from_server.trailing_metadata_count = 0;
op->data.send_status_from_server.status = GRPC_STATUS_UNIMPLEMENTED;
op->data.send_status_from_server.status_details = "xyz";
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_RECV_CLOSE_ON_SERVER;
op->data.recv_close_on_server.cancelled = &was_cancelled;
op->flags = 0;
op->reserved = NULL;
op++;
error = grpc_call_start_batch(s, ops, (size_t)(op - ops), tag(102), NULL);
GPR_ASSERT(GRPC_CALL_OK == error);
CQ_EXPECT_COMPLETION(cqv, tag(102), 1);
CQ_EXPECT_COMPLETION(cqv, tag(1), 1);
cq_verify(cqv);
GPR_ASSERT(status == GRPC_STATUS_UNIMPLEMENTED);
GPR_ASSERT(0 == strcmp(details, "xyz"));
GPR_ASSERT(0 == strcmp(call_details.method, "/foo"));
GPR_ASSERT(0 == strcmp(call_details.host, "foo.test.google.fr:1234"));
GPR_ASSERT(was_cancelled == 1);
gpr_free(details);
grpc_metadata_array_destroy(&initial_metadata_recv);
grpc_metadata_array_destroy(&trailing_metadata_recv);
grpc_metadata_array_destroy(&request_metadata_recv);
grpc_call_details_destroy(&call_details);
grpc_call_destroy(c);
grpc_call_destroy(s);
cq_verifier_destroy(cqv);
}
static void test_max_concurrent_streams(grpc_end2end_test_config config) {
grpc_end2end_test_fixture f;
grpc_arg server_arg;
grpc_channel_args server_args;
grpc_call *c1;
grpc_call *c2;
grpc_call *s1;
grpc_call *s2;
int live_call;
gpr_timespec deadline;
cq_verifier *cqv;
grpc_event ev;
grpc_call_details call_details;
grpc_metadata_array request_metadata_recv;
grpc_metadata_array initial_metadata_recv1;
grpc_metadata_array trailing_metadata_recv1;
grpc_metadata_array initial_metadata_recv2;
grpc_metadata_array trailing_metadata_recv2;
grpc_status_code status1;
grpc_call_error error;
char *details1 = NULL;
size_t details_capacity1 = 0;
grpc_status_code status2;
char *details2 = NULL;
size_t details_capacity2 = 0;
grpc_op ops[6];
grpc_op *op;
int was_cancelled;
int got_client_start;
int got_server_start;
server_arg.key = GRPC_ARG_MAX_CONCURRENT_STREAMS;
server_arg.type = GRPC_ARG_INTEGER;
server_arg.value.integer = 1;
server_args.num_args = 1;
server_args.args = &server_arg;
f = begin_test(config, "test_max_concurrent_streams", NULL, &server_args);
cqv = cq_verifier_create(f.cq);
grpc_metadata_array_init(&request_metadata_recv);
grpc_metadata_array_init(&initial_metadata_recv1);
grpc_metadata_array_init(&trailing_metadata_recv1);
grpc_metadata_array_init(&initial_metadata_recv2);
grpc_metadata_array_init(&trailing_metadata_recv2);
grpc_call_details_init(&call_details);
/* perform a ping-pong to ensure that settings have had a chance to round
trip */
simple_request_body(f);
/* perform another one to make sure that the one stream case still works */
simple_request_body(f);
/* start two requests - ensuring that the second is not accepted until
the first completes */
deadline = n_seconds_time(1000);
c1 = grpc_channel_create_call(f.client, NULL, GRPC_PROPAGATE_DEFAULTS, f.cq,
"/alpha", "foo.test.google.fr:1234", deadline,
NULL);
GPR_ASSERT(c1);
c2 = grpc_channel_create_call(f.client, NULL, GRPC_PROPAGATE_DEFAULTS, f.cq,
"/beta", "foo.test.google.fr:1234", deadline,
NULL);
GPR_ASSERT(c2);
GPR_ASSERT(GRPC_CALL_OK == grpc_server_request_call(
f.server, &s1, &call_details,
&request_metadata_recv, f.cq, f.cq, tag(101)));
memset(ops, 0, sizeof(ops));
op = ops;
op->op = GRPC_OP_SEND_INITIAL_METADATA;
op->data.send_initial_metadata.count = 0;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_SEND_CLOSE_FROM_CLIENT;
op->flags = 0;
op->reserved = NULL;
op++;
error = grpc_call_start_batch(c1, ops, (size_t)(op - ops), tag(301), NULL);
GPR_ASSERT(GRPC_CALL_OK == error);
memset(ops, 0, sizeof(ops));
op = ops;
op->op = GRPC_OP_RECV_STATUS_ON_CLIENT;
op->data.recv_status_on_client.trailing_metadata = &trailing_metadata_recv1;
op->data.recv_status_on_client.status = &status1;
op->data.recv_status_on_client.status_details = &details1;
op->data.recv_status_on_client.status_details_capacity = &details_capacity1;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_RECV_INITIAL_METADATA;
op->data.recv_initial_metadata = &initial_metadata_recv1;
op->flags = 0;
op->reserved = NULL;
op++;
error = grpc_call_start_batch(c1, ops, (size_t)(op - ops), tag(302), NULL);
GPR_ASSERT(GRPC_CALL_OK == error);
memset(ops, 0, sizeof(ops));
op = ops;
op->op = GRPC_OP_SEND_INITIAL_METADATA;
op->data.send_initial_metadata.count = 0;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_SEND_CLOSE_FROM_CLIENT;
op->flags = 0;
op->reserved = NULL;
op++;
error = grpc_call_start_batch(c2, ops, (size_t)(op - ops), tag(401), NULL);
GPR_ASSERT(GRPC_CALL_OK == error);
memset(ops, 0, sizeof(ops));
op = ops;
op->op = GRPC_OP_RECV_STATUS_ON_CLIENT;
op->data.recv_status_on_client.trailing_metadata = &trailing_metadata_recv2;
op->data.recv_status_on_client.status = &status2;
op->data.recv_status_on_client.status_details = &details2;
op->data.recv_status_on_client.status_details_capacity = &details_capacity2;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_RECV_INITIAL_METADATA;
op->data.recv_initial_metadata = &initial_metadata_recv1;
op->flags = 0;
op->reserved = NULL;
op++;
error = grpc_call_start_batch(c2, ops, (size_t)(op - ops), tag(402), NULL);
GPR_ASSERT(GRPC_CALL_OK == error);
got_client_start = 0;
got_server_start = 0;
live_call = -1;
while (!got_client_start || !got_server_start) {
ev = grpc_completion_queue_next(f.cq, GRPC_TIMEOUT_SECONDS_TO_DEADLINE(3),
NULL);
GPR_ASSERT(ev.type == GRPC_OP_COMPLETE);
GPR_ASSERT(ev.success);
if (ev.tag == tag(101)) {
GPR_ASSERT(!got_server_start);
got_server_start = 1;
} else {
GPR_ASSERT(!got_client_start);
GPR_ASSERT(ev.tag == tag(301) || ev.tag == tag(401));
/* The /alpha or /beta calls started above could be invoked (but NOT
* both);
* check this here */
/* We'll get tag 303 or 403, we want 300, 400 */
live_call = ((int)(intptr_t)ev.tag) - 1;
got_client_start = 1;
}
}
GPR_ASSERT(live_call == 300 || live_call == 400);
memset(ops, 0, sizeof(ops));
op = ops;
op->op = GRPC_OP_SEND_INITIAL_METADATA;
op->data.send_initial_metadata.count = 0;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_RECV_CLOSE_ON_SERVER;
op->data.recv_close_on_server.cancelled = &was_cancelled;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_SEND_STATUS_FROM_SERVER;
op->data.send_status_from_server.trailing_metadata_count = 0;
op->data.send_status_from_server.status = GRPC_STATUS_UNIMPLEMENTED;
op->data.send_status_from_server.status_details = "xyz";
op->flags = 0;
op->reserved = NULL;
op++;
error = grpc_call_start_batch(s1, ops, (size_t)(op - ops), tag(102), NULL);
GPR_ASSERT(GRPC_CALL_OK == error);
CQ_EXPECT_COMPLETION(cqv, tag(102), 1);
CQ_EXPECT_COMPLETION(cqv, tag(live_call + 2), 1);
/* first request is finished, we should be able to start the second */
live_call = (live_call == 300) ? 400 : 300;
CQ_EXPECT_COMPLETION(cqv, tag(live_call + 1), 1);
cq_verify(cqv);
GPR_ASSERT(GRPC_CALL_OK == grpc_server_request_call(
f.server, &s2, &call_details,
&request_metadata_recv, f.cq, f.cq, tag(201)));
CQ_EXPECT_COMPLETION(cqv, tag(201), 1);
cq_verify(cqv);
memset(ops, 0, sizeof(ops));
op = ops;
op->op = GRPC_OP_SEND_INITIAL_METADATA;
op->data.send_initial_metadata.count = 0;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_RECV_CLOSE_ON_SERVER;
op->data.recv_close_on_server.cancelled = &was_cancelled;
op->flags = 0;
op->reserved = NULL;
op++;
op->op = GRPC_OP_SEND_STATUS_FROM_SERVER;
op->data.send_status_from_server.trailing_metadata_count = 0;
op->data.send_status_from_server.status = GRPC_STATUS_UNIMPLEMENTED;
op->data.send_status_from_server.status_details = "xyz";
op->flags = 0;
op->reserved = NULL;
op++;
error = grpc_call_start_batch(s2, ops, (size_t)(op - ops), tag(202), NULL);
GPR_ASSERT(GRPC_CALL_OK == error);
CQ_EXPECT_COMPLETION(cqv, tag(live_call + 2), 1);
CQ_EXPECT_COMPLETION(cqv, tag(202), 1);
cq_verify(cqv);
cq_verifier_destroy(cqv);
grpc_call_destroy(c1);
grpc_call_destroy(s1);
grpc_call_destroy(c2);
grpc_call_destroy(s2);
gpr_free(details1);
gpr_free(details2);
grpc_metadata_array_destroy(&initial_metadata_recv1);
grpc_metadata_array_destroy(&trailing_metadata_recv1);
grpc_metadata_array_destroy(&initial_metadata_recv2);
grpc_metadata_array_destroy(&trailing_metadata_recv2);
grpc_metadata_array_destroy(&request_metadata_recv);
grpc_call_details_destroy(&call_details);
end_test(&f);
config.tear_down_data(&f);
}
void max_concurrent_streams(grpc_end2end_test_config config) {
test_max_concurrent_streams(config);
}
void max_concurrent_streams_pre_init(void) {}
|
import argparse
import os
from typing import Text
from datetime import datetime
import json
import shutil
import tensorflow as tf
import numpy as np
from tabnet.models.classify import TabNetClassifier
from tabnet.datasets.covertype import get_dataset, get_data
from tabnet.callbacks import TensorBoardWithLR, LRFinder
from tabnet.schedules import DecayWithWarmupSchedule
from tabnet.utils import set_seed
TMPDIR = ".tmp"
LOGDIR = ".logs"
OUTDIR = ".outs"
DATA_PATH = "data/covtype.csv"
CONFIGS = {
"feature_dim": 64,
"output_dim": 64,
"num_features": 54,
"sparsity_coefficient": 0.0001,
"batch_size": 16384,
"bn_virtual_bs": 512,
"bn_momentum": 0.7,
"n_steps": 5,
"relaxation_factor": 1.5,
"n_classes": 7,
"learning_rate": 0.02,
"min_learning_rate": 1e-6,
"decay_steps": 500,
"decay_rate": 0.95,
"total_steps": 130000,
"clipnorm": 2.0,
"dp": 0.2,
"seed": 42,
}
def clean_tmp_dir():
if os.path.exists(TMPDIR):
shutil.rmtree(TMPDIR)
os.makedirs(TMPDIR)
def run_lrfinder(
ds: tf.data.Dataset, model: tf.keras.Model, optimizer, lossf, steps_per_epoch: int,
) -> None:
lrfinder = LRFinder(num_steps=steps_per_epoch, max_lr=1)
_ = model.fit(ds, epochs=1, steps_per_epoch=steps_per_epoch, callbacks=[lrfinder],)
def train(
run_name: Text,
data_path: Text,
out_dir: Text,
bn_momentum: float,
bn_virtual_bs: int,
clipnorm: float,
decay_rate: float,
decay_steps: int,
learning_rate: float,
sparsity_coefficient: float,
epochs: int,
cleanup: bool,
warmup: int,
dp: float,
seed: int,
do_lr_finder: bool,
):
set_seed(seed)
clean_tmp_dir()
if cleanup:
out_dir = os.path.join(out_dir, run_name)
if os.path.exists(out_dir):
shutil.rmtree(out_dir)
df_tr, df_val, df_test = get_data(data_path)
ds_tr = get_dataset(
df_tr, shuffle=True, batch_size=CONFIGS["batch_size"], seed=seed
)
ds_val = get_dataset(
df_val, shuffle=False, batch_size=CONFIGS["batch_size"], drop_remainder=False
)
ds_test = get_dataset(
df_test, shuffle=False, batch_size=CONFIGS["batch_size"], drop_remainder=False
)
num_train_steps = np.floor(len(df_tr) / CONFIGS["batch_size"])
num_valid_steps = np.ceil(len(df_val) / CONFIGS["batch_size"])
num_test_steps = np.ceil(len(df_test) / CONFIGS["batch_size"])
model = TabNetClassifier(
num_features=CONFIGS["num_features"],
feature_dim=CONFIGS["feature_dim"],
output_dim=CONFIGS["output_dim"],
n_classes=CONFIGS["n_classes"],
n_step=CONFIGS["n_steps"],
relaxation_factor=CONFIGS["relaxation_factor"],
sparsity_coefficient=sparsity_coefficient,
bn_momentum=bn_momentum,
bn_virtual_divider=int(CONFIGS["batch_size"] / CONFIGS["bn_virtual_bs"]),
dp=dp if dp > 0 else None,
)
model.build((None, CONFIGS["num_features"]))
model.summary()
if warmup:
lr = DecayWithWarmupSchedule(
learning_rate, CONFIGS["min_learning_rate"], warmup, decay_rate, decay_steps
)
elif do_lr_finder:
lr = learning_rate
else:
lr = tf.keras.optimizers.schedules.ExponentialDecay(
learning_rate,
decay_steps=decay_steps,
decay_rate=decay_rate,
staircase=False,
)
optimizer = tf.keras.optimizers.Adam(learning_rate=lr, clipnorm=clipnorm)
lossf = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
model.compile(
optimizer,
loss=lossf,
metrics=[tf.keras.metrics.SparseCategoricalAccuracy(name="accuracy")],
)
if do_lr_finder:
run_lrfinder(ds_tr, model, optimizer, lossf, num_train_steps)
return
epochs = (
int(np.ceil(CONFIGS["total_steps"] / num_train_steps))
if epochs is None
else epochs
)
log_dir = (
os.path.join(LOGDIR, datetime.strftime(datetime.now(), "%Y-%m-%d-%H-%M-%S"))
if run_name is None
else os.path.join(LOGDIR, run_name)
)
if os.path.exists(log_dir):
shutil.rmtree(log_dir)
checkpoint_path = os.path.join(TMPDIR, "checkpoint")
callbacks = [
TensorBoardWithLR(log_dir=log_dir, write_graph=True, profile_batch=0),
tf.keras.callbacks.ModelCheckpoint(
filepath=checkpoint_path,
monitor="val_accuracy",
verbose=1,
mode="max",
save_best_only=True,
),
]
# train
h = model.fit(
ds_tr,
epochs=epochs,
validation_data=ds_val,
steps_per_epoch=num_train_steps,
validation_steps=num_valid_steps,
callbacks=callbacks,
)
model.load_weights(checkpoint_path)
model.save_to_directory(out_dir)
# evaluate
metrics = model.evaluate(ds_test, steps=num_test_steps, return_dict=True)
with open(os.path.join(out_dir, "test_results.json"), "w") as f:
json.dump(metrics, f)
print(metrics)
# example: python benchmarks/covertype.py --run_name w200_dp0.4 --epochs 1500 --warmup 200 --dp 0.4
if __name__ == "__main__":
parser = argparse.ArgumentParser("TabNet Covertype Training")
parser.add_argument("--run_name", default=None, type=str)
parser.add_argument("--data_path", default=DATA_PATH, type=str)
parser.add_argument("--out_dir", default=OUTDIR, type=str)
parser.add_argument("--bn_momentum", default=CONFIGS["bn_momentum"], type=float)
parser.add_argument("--bn_virtual_bs", default=CONFIGS["bn_virtual_bs"], type=int)
parser.add_argument("--clipnorm", default=CONFIGS["clipnorm"], type=float)
parser.add_argument("--decay_rate", default=CONFIGS["decay_rate"], type=float)
parser.add_argument("--decay_steps", default=CONFIGS["decay_steps"], type=int)
parser.add_argument("--learning_rate", default=CONFIGS["learning_rate"], type=int)
parser.add_argument("--dp", default=CONFIGS["dp"], type=float)
parser.add_argument("--seed", default=CONFIGS["seed"], type=int)
parser.add_argument(
"--sparsity_coefficient", default=CONFIGS["sparsity_coefficient"], type=float
)
parser.add_argument("--epochs", default=None, type=int)
parser.add_argument(
"--cleanup",
action="store_true",
help="Cleanup the output folder before starting the training.",
)
parser.add_argument("--warmup", default=None, type=int)
parser.add_argument(
"--do_lr_finder", action="store_true", help="Runs only the LR finder only"
)
args = parser.parse_args()
train(
args.run_name,
args.data_path,
args.out_dir,
args.bn_momentum,
args.bn_virtual_bs,
args.clipnorm,
args.decay_rate,
args.decay_steps,
args.learning_rate,
args.sparsity_coefficient,
args.epochs,
args.cleanup,
args.warmup,
args.dp,
args.seed,
args.do_lr_finder,
)
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements Randomized LayerNormalization, a Batch Normalization substitute.
For every item in a batch and for every layer, we calculate the mean and
variance across the spatial dimensions, and multiply them by Gaussian noise with
a mean equal to 1.0 (at training time only). This improved the results compared
to batch normalization - more in https://arxiv.org/abs/1904.04998.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v1 as tf
def normalize(x, is_train, name='bn', stddev=0.5):
"""Applies layer normalization and applies noise on the mean and variance.
Args:
x: tf.Tensor to normalize, of shape [B, H, W, C].
is_train: A boolean, True at training mode.
name: A string, a name scope.
stddev: Standard deviation of the Gaussian noise. Defaults to 0.5 because
this is the largest value where the noise is guaranteed to be a
non-negative multiplicative factor
Returns:
A tf.Tensor of shape [B, H, W, C], the normalized tensor.
"""
with tf.variable_scope(name, None, [x]):
inputs_shape = x.shape.as_list()
params_shape = inputs_shape[-1:]
beta = tf.get_variable(
'beta', shape=params_shape, initializer=tf.initializers.zeros())
gamma = tf.get_variable(
'gamma', shape=params_shape, initializer=tf.initializers.ones())
mean, variance = tf.nn.moments(x, [1, 2], keep_dims=True)
if is_train:
mean *= 1.0 + tf.random.truncated_normal(tf.shape(mean), stddev=stddev)
variance *= 1.0 + tf.random.truncated_normal(
tf.shape(variance), stddev=stddev)
outputs = tf.nn.batch_normalization(
x,
mean,
variance,
offset=beta,
scale=gamma,
variance_epsilon=1e-3)
outputs.set_shape(x.shape)
return outputs
|