hexsha
stringlengths
40
40
size
int64
2
1.02M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
245
max_stars_repo_name
stringlengths
6
130
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
245
max_issues_repo_name
stringlengths
6
130
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
245
max_forks_repo_name
stringlengths
6
130
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
1.02M
avg_line_length
float64
1
417k
max_line_length
int64
1
987k
alphanum_fraction
float64
0
1
content_no_comment
stringlengths
0
1.01M
is_comment_constant_removed
bool
1 class
is_sharp_comment_removed
bool
1 class
f71cc7dd876392c1eb5462cd7fd83e0f8c22bec2
19,039
py
Python
convlab2/policy/larl/multiwoz/latent_dialog/enc2dec/decoders.py
ljw23/ConvLab-2
13d48ea0e441701bd66100689b6c25b561f15525
[ "Apache-2.0" ]
339
2020-03-04T09:43:22.000Z
2022-03-26T17:27:38.000Z
convlab2/policy/larl/multiwoz/latent_dialog/enc2dec/decoders.py
ljw23/ConvLab-2
13d48ea0e441701bd66100689b6c25b561f15525
[ "Apache-2.0" ]
122
2020-04-12T04:19:06.000Z
2022-03-23T14:20:57.000Z
convlab2/policy/larl/multiwoz/latent_dialog/enc2dec/decoders.py
ljw23/ConvLab-2
13d48ea0e441701bd66100689b6c25b561f15525
[ "Apache-2.0" ]
138
2020-02-18T16:48:04.000Z
2022-03-26T17:27:43.000Z
import torch as th import torch.nn as nn import torch.nn.functional as F import torch.optim as optim from torch.autograd import Variable import numpy as np from convlab2.policy.larl.multiwoz.latent_dialog.enc2dec.base_modules import BaseRNN from convlab2.policy.larl.multiwoz.latent_dialog.utils import cast_type, LONG, FLOAT from convlab2.policy.larl.multiwoz.latent_dialog.corpora import DECODING_MASKED_TOKENS, EOS TEACH_FORCE = 'teacher_forcing' TEACH_GEN = 'teacher_gen' GEN = 'gen' GEN_VALID = 'gen_valid' class Attention(nn.Module): def __init__(self, dec_cell_size, ctx_cell_size, attn_mode, project): super(Attention, self).__init__() self.dec_cell_size = dec_cell_size self.ctx_cell_size = ctx_cell_size self.attn_mode = attn_mode if project: self.linear_out = nn.Linear( dec_cell_size+ctx_cell_size, dec_cell_size) else: self.linear_out = None if attn_mode == 'general': self.dec_w = nn.Linear(dec_cell_size, ctx_cell_size) elif attn_mode == 'cat': self.dec_w = nn.Linear(dec_cell_size, dec_cell_size) self.attn_w = nn.Linear(ctx_cell_size, dec_cell_size) self.query_w = nn.Linear(dec_cell_size, 1) def forward(self, output, context): # output: (batch_size, output_seq_len, dec_cell_size) # context: (batch_size, max_ctx_len, ctx_cell_size) batch_size = output.size(0) max_ctx_len = context.size(1) if self.attn_mode == 'dot': # (batch_size, output_seq_len, max_ctx_len) attn = th.bmm(output, context.transpose(1, 2)) elif self.attn_mode == 'general': # (batch_size, output_seq_len, ctx_cell_size) mapped_output = self.dec_w(output) # (batch_size, output_seq_len, max_ctx_len) attn = th.bmm(mapped_output, context.transpose(1, 2)) elif self.attn_mode == 'cat': # (batch_size, output_seq_len, dec_cell_size) mapped_output = self.dec_w(output) # (batch_size, max_ctx_len, dec_cell_size) mapped_attn = self.attn_w(context) # (batch_size, output_seq_len, max_ctx_len, dec_cell_size) tiled_output = mapped_output.unsqueeze( 2).repeat(1, 1, max_ctx_len, 1) # (batch_size, 1, max_ctx_len, dec_cell_size) tiled_attn = mapped_attn.unsqueeze(1) # (batch_size, output_seq_len, max_ctx_len, dec_cell_size) fc1 = F.tanh(tiled_output+tiled_attn) # (batch_size, otuput_seq_len, max_ctx_len) attn = self.query_w(fc1).squeeze(-1) else: raise ValueError('Unknown attention mode') # TODO mask # if self.mask is not None: # (batch_size, output_seq_len, max_ctx_len) attn = F.softmax(attn.view(-1, max_ctx_len), dim=1).view(batch_size, -1, max_ctx_len) # (batch_size, output_seq_len, ctx_cell_size) mix = th.bmm(attn, context) # (batch_size, output_seq_len, dec_cell_size+ctx_cell_size) combined = th.cat((mix, output), dim=2) if self.linear_out is None: return combined, attn else: output = F.tanh( self.linear_out(combined.view(-1, self.dec_cell_size+self.ctx_cell_size))).view( batch_size, -1, self.dec_cell_size) # (batch_size, output_seq_len, dec_cell_size) return output, attn class DecoderRNN(BaseRNN): def __init__(self, input_dropout_p, rnn_cell, input_size, hidden_size, num_layers, output_dropout_p, bidirectional, vocab_size, use_attn, ctx_cell_size, attn_mode, sys_id, eos_id, use_gpu, max_dec_len, embedding=None): super(DecoderRNN, self).__init__(input_dropout_p=input_dropout_p, rnn_cell=rnn_cell, input_size=input_size, hidden_size=hidden_size, num_layers=num_layers, output_dropout_p=output_dropout_p, bidirectional=bidirectional) # TODO embedding is None or not if embedding is None: self.embedding = nn.Embedding(vocab_size, input_size) else: self.embedding = embedding # share parameters between encoder and decoder # self.rnn = ctx_encoder.rnn # self.FC = nn.Linear(input_size, utt_encoder.output_size) self.use_attn = use_attn if self.use_attn: self.attention = Attention(dec_cell_size=hidden_size, ctx_cell_size=ctx_cell_size, attn_mode=attn_mode, project=True) self.dec_cell_size = hidden_size self.output_size = vocab_size self.project = nn.Linear(self.dec_cell_size, self.output_size) self.log_softmax = F.log_softmax self.sys_id = sys_id self.eos_id = eos_id self.use_gpu = use_gpu self.max_dec_len = max_dec_len def forward(self, batch_size, dec_inputs, dec_init_state, attn_context, mode, gen_type, beam_size, goal_hid=None): # dec_inputs: (batch_size, response_size-1) # attn_context: (batch_size, max_ctx_len, ctx_cell_size) # goal_hid: (batch_size, goal_nhid) ret_dict = dict() if self.use_attn: ret_dict[DecoderRNN.KEY_ATTN_SCORE] = list() if mode == GEN: dec_inputs = None if gen_type != 'beam': beam_size = 1 if dec_inputs is not None: decoder_input = dec_inputs else: # prepare the BOS inputs with th.no_grad(): bos_var = Variable(th.LongTensor([self.sys_id])) bos_var = cast_type(bos_var, LONG, self.use_gpu) decoder_input = bos_var.expand( batch_size*beam_size, 1) # (batch_size, 1) if mode == GEN and gen_type == 'beam': # TODO if beam search, repeat the initial states of the RNN pass else: decoder_hidden_state = dec_init_state # list of logprob | max_dec_len*(batch_size, 1, vocab_size) prob_outputs = [] symbol_outputs = [] # list of word ids | max_dec_len*(batch_size, 1) # back_pointers = [] # lengths = blabla... def decode(step, cum_sum, step_output, step_attn): prob_outputs.append(step_output) step_output_slice = step_output.squeeze( 1) # (batch_size, vocab_size) if self.use_attn: ret_dict[DecoderRNN.KEY_ATTN_SCORE].append(step_attn) if gen_type == 'greedy': _, symbols = step_output_slice.topk(1) # (batch_size, 1) elif gen_type == 'sample': # TODO FIXME # symbols = self.gumbel_max(step_output_slice) pass elif gen_type == 'beam': # TODO pass else: raise ValueError('Unsupported decoding mode') symbol_outputs.append(symbols) return cum_sum, symbols if mode == TEACH_FORCE: prob_outputs, decoder_hidden_state, attn = self.forward_step( input_var=decoder_input, hidden_state=decoder_hidden_state, encoder_outputs=attn_context, goal_hid=goal_hid) else: # do free running here cum_sum = None for step in range(self.max_dec_len): # Input: # decoder_input: (batch_size, 1) # decoder_hidden_state: tuple: (h, c) # attn_context: (batch_size, max_ctx_len, ctx_cell_size) # goal_hid: (batch_size, goal_nhid) # Output: # decoder_output: (batch_size, 1, vocab_size) # decoder_hidden_state: tuple: (h, c) # step_attn: (batch_size, 1, max_ctx_len) decoder_output, decoder_hidden_state, step_attn = self.forward_step( decoder_input, decoder_hidden_state, attn_context, goal_hid=goal_hid) cum_sum, symbols = decode( step, cum_sum, decoder_output, step_attn) decoder_input = symbols # (batch_size, max_dec_len, vocab_size) prob_outputs = th.cat(prob_outputs, dim=1) # back tracking to recover the 1-best in beam search # if gen_type == 'beam': ret_dict[DecoderRNN.KEY_SEQUENCE] = symbol_outputs # prob_outputs: (batch_size, max_dec_len, vocab_size) # decoder_hidden_state: tuple: (h, c) # ret_dict[DecoderRNN.KEY_ATTN_SCORE]: max_dec_len*(batch_size, 1, max_ctx_len) # ret_dict[DecoderRNN.KEY_SEQUENCE]: max_dec_len*(batch_size, 1) return prob_outputs, decoder_hidden_state, ret_dict def forward_step(self, input_var, hidden_state, encoder_outputs, goal_hid): # input_var: (batch_size, response_size-1 i.e. output_seq_len) # hidden_state: tuple: (h, c) # encoder_outputs: (batch_size, max_ctx_len, ctx_cell_size) # goal_hid: (batch_size, goal_nhid) batch_size, output_seq_len = input_var.size() # (batch_size, output_seq_len, embedding_dim) embedded = self.embedding(input_var) # add goals if goal_hid is not None: # (batch_size, 1, goal_nhid) goal_hid = goal_hid.view(goal_hid.size(0), 1, goal_hid.size(1)) # (batch_size, output_seq_len, goal_nhid) goal_rep = goal_hid.repeat(1, output_seq_len, 1) # (batch_size, output_seq_len, embedding_dim+goal_nhid) embedded = th.cat([embedded, goal_rep], dim=2) embedded = self.input_dropout(embedded) # ############ # embedded = self.FC(embedded.view(-1, embedded.size(-1))).view(batch_size, output_seq_len, -1) # output: (batch_size, output_seq_len, dec_cell_size) # hidden: tuple: (h, c) output, hidden_s = self.rnn(embedded, hidden_state) attn = None if self.use_attn: # output: (batch_size, output_seq_len, dec_cell_size) # encoder_outputs: (batch_size, max_ctx_len, ctx_cell_size) # attn: (batch_size, output_seq_len, max_ctx_len) output, attn = self.attention(output, encoder_outputs) # (batch_size*output_seq_len, vocab_size) logits = self.project(output.contiguous().view(-1, self.dec_cell_size)) prediction = self.log_softmax(logits, dim=logits.dim( )-1).view(batch_size, output_seq_len, -1) # (batch_size, output_seq_len, vocab_size) return prediction, hidden_s, attn # special for rl def _step(self, input_var, hidden_state, encoder_outputs, goal_hid): # input_var: (1, 1) # hidden_state: tuple: (h, c) # encoder_outputs: (1, max_dlg_len, dlg_cell_size) # goal_hid: (1, goal_nhid) batch_size, output_seq_len = input_var.size() embedded = self.embedding(input_var) # (1, 1, embedding_dim) if goal_hid is not None: goal_hid = goal_hid.view(goal_hid.size( 0), 1, goal_hid.size(1)) # (1, 1, goal_nhid) goal_rep = goal_hid.repeat( 1, output_seq_len, 1) # (1, 1, goal_nhid) # (1, 1, embedding_dim+goal_nhid) embedded = th.cat([embedded, goal_rep], dim=2) embedded = self.input_dropout(embedded) # ############ # embedded = self.FC(embedded.view(-1, embedded.size(-1))).view(batch_size, output_seq_len, -1) # output: (1, 1, dec_cell_size) # hidden: tuple: (h, c) output, hidden_s = self.rnn(embedded, hidden_state) attn = None if self.use_attn: # output: (1, 1, dec_cell_size) # encoder_outputs: (1, max_dlg_len, dlg_cell_size) # attn: (1, 1, max_dlg_len) output, attn = self.attention(output, encoder_outputs) # (1*1, vocab_size) logits = self.project(output.view(-1, self.dec_cell_size)) prediction = logits.view( batch_size, output_seq_len, -1) # (1, 1, vocab_size) # prediction = self.log_softmax(logits, dim=logits.dim()-1).view(batch_size, output_seq_len, -1) # (batch_size, output_seq_len, vocab_size) return prediction, hidden_s # special for rl def write(self, input_var, hidden_state, encoder_outputs, max_words, vocab, stop_tokens, goal_hid=None, mask=True, decoding_masked_tokens=DECODING_MASKED_TOKENS): # input_var: (1, 1) # hidden_state: tuple: (h, c) # encoder_outputs: max_dlg_len*(1, 1, dlg_cell_size) # goal_hid: (1, goal_nhid) logprob_outputs = [] # list of logprob | max_dec_len*(1, ) symbol_outputs = [] # list of word ids | max_dec_len*(1, ) decoder_input = input_var decoder_hidden_state = hidden_state if type(encoder_outputs) is list: # (1, max_dlg_len, dlg_cell_size) encoder_outputs = th.cat(encoder_outputs, 1) # print('encoder_outputs.size() = {}'.format(encoder_outputs.size())) if mask: special_token_mask = Variable(th.FloatTensor( [-999. if token in decoding_masked_tokens else 0. for token in vocab])) special_token_mask = cast_type( special_token_mask, FLOAT, self.use_gpu) # (vocab_size, ) def _sample(dec_output, num_i): # dec_output: (1, 1, vocab_size), need to softmax and log_softmax dec_output = dec_output.view(-1) # (vocab_size, ) # TODO temperature prob = F.softmax(dec_output/0.6, dim=0) # (vocab_size, ) logprob = F.log_softmax(dec_output, dim=0) # (vocab_size, ) symbol = prob.multinomial(num_samples=1).detach() # (1, ) # _, symbol = prob.topk(1) # (1, ) _, tmp_symbol = prob.topk(1) # (1, ) # print('multinomial symbol = {}, prob = {}'.format(symbol, prob[symbol.item()])) # print('topk symbol = {}, prob = {}'.format(tmp_symbol, prob[tmp_symbol.item()])) logprob = logprob.gather(0, symbol) # (1, ) return logprob, symbol for i in range(max_words): decoder_output, decoder_hidden_state = self._step( decoder_input, decoder_hidden_state, encoder_outputs, goal_hid) # disable special tokens from being generated in a normal turn if mask: decoder_output += special_token_mask.expand(1, 1, -1) logprob, symbol = _sample(decoder_output, i) logprob_outputs.append(logprob) symbol_outputs.append(symbol) decoder_input = symbol.view(1, -1) if vocab[symbol.item()] in stop_tokens: break assert len(logprob_outputs) == len(symbol_outputs) # logprob_list = [t.item() for t in logprob_outputs] logprob_list = logprob_outputs symbol_list = [t.item() for t in symbol_outputs] return logprob_list, symbol_list # For MultiWoz RL def forward_rl(self, batch_size, dec_init_state, attn_context, vocab, max_words, goal_hid=None, mask=True, temp=0.1): # prepare the BOS inputs with th.no_grad(): bos_var = Variable(th.LongTensor([self.sys_id])) bos_var = cast_type(bos_var, LONG, self.use_gpu) decoder_input = bos_var.expand(batch_size, 1) # (1, 1) decoder_hidden_state = dec_init_state # tuple: (h, c) encoder_outputs = attn_context # (1, ctx_len, ctx_cell_size) logprob_outputs = [] # list of logprob | max_dec_len*(1, ) symbol_outputs = [] # list of word ids | max_dec_len*(1, ) if mask: special_token_mask = Variable(th.FloatTensor( [-999. if token in DECODING_MASKED_TOKENS else 0. for token in vocab])) special_token_mask = cast_type( special_token_mask, FLOAT, self.use_gpu) # (vocab_size, ) def _sample(dec_output, num_i): # dec_output: (1, 1, vocab_size), need to softmax and log_softmax # (batch_size, vocab_size, ) dec_output = dec_output.view(batch_size, -1) # (batch_size, vocab_size, ) prob = F.softmax(dec_output/temp, dim=1) # (batch_size, vocab_size, ) logprob = F.log_softmax(dec_output, dim=1) symbol = prob.multinomial( num_samples=1).detach() # (batch_size, 1) # _, symbol = prob.topk(1) # (1, ) _, tmp_symbol = prob.topk(1) # (1, ) # print('multinomial symbol = {}, prob = {}'.format(symbol, prob[symbol.item()])) # print('topk symbol = {}, prob = {}'.format(tmp_symbol, prob[tmp_symbol.item()])) logprob = logprob.gather(1, symbol) # (1, ) return logprob, symbol stopped_samples = set() for i in range(max_words): decoder_output, decoder_hidden_state = self._step( decoder_input, decoder_hidden_state, encoder_outputs, goal_hid) # disable special tokens from being generated in a normal turn if mask: decoder_output += special_token_mask.expand(1, 1, -1) logprob, symbol = _sample(decoder_output, i) logprob_outputs.append(logprob) symbol_outputs.append(symbol) decoder_input = symbol.view(batch_size, -1) for b_id in range(batch_size): if vocab[symbol[b_id].item()] == EOS: stopped_samples.add(b_id) if len(stopped_samples) == batch_size: break assert len(logprob_outputs) == len(symbol_outputs) symbol_outputs = th.cat( symbol_outputs, dim=1).cpu().data.numpy().tolist() logprob_outputs = th.cat(logprob_outputs, dim=1) logprob_list = [] symbol_list = [] for b_id in range(batch_size): b_logprob = [] b_symbol = [] for t_id in range(logprob_outputs.shape[1]): symbol = symbol_outputs[b_id][t_id] if vocab[symbol] == EOS and t_id != 0: break b_symbol.append(symbol_outputs[b_id][t_id]) b_logprob.append(logprob_outputs[b_id][t_id]) logprob_list.append(b_logprob) symbol_list.append(b_symbol) # TODO backward compatible, if batch_size == 1, we remove the nested structure if batch_size == 1: logprob_list = logprob_list[0] symbol_list = symbol_list[0] return logprob_list, symbol_list
43.270455
147
0.59625
import torch as th import torch.nn as nn import torch.nn.functional as F import torch.optim as optim from torch.autograd import Variable import numpy as np from convlab2.policy.larl.multiwoz.latent_dialog.enc2dec.base_modules import BaseRNN from convlab2.policy.larl.multiwoz.latent_dialog.utils import cast_type, LONG, FLOAT from convlab2.policy.larl.multiwoz.latent_dialog.corpora import DECODING_MASKED_TOKENS, EOS TEACH_FORCE = 'teacher_forcing' TEACH_GEN = 'teacher_gen' GEN = 'gen' GEN_VALID = 'gen_valid' class Attention(nn.Module): def __init__(self, dec_cell_size, ctx_cell_size, attn_mode, project): super(Attention, self).__init__() self.dec_cell_size = dec_cell_size self.ctx_cell_size = ctx_cell_size self.attn_mode = attn_mode if project: self.linear_out = nn.Linear( dec_cell_size+ctx_cell_size, dec_cell_size) else: self.linear_out = None if attn_mode == 'general': self.dec_w = nn.Linear(dec_cell_size, ctx_cell_size) elif attn_mode == 'cat': self.dec_w = nn.Linear(dec_cell_size, dec_cell_size) self.attn_w = nn.Linear(ctx_cell_size, dec_cell_size) self.query_w = nn.Linear(dec_cell_size, 1) def forward(self, output, context): batch_size = output.size(0) max_ctx_len = context.size(1) if self.attn_mode == 'dot': attn = th.bmm(output, context.transpose(1, 2)) elif self.attn_mode == 'general': mapped_output = self.dec_w(output) attn = th.bmm(mapped_output, context.transpose(1, 2)) elif self.attn_mode == 'cat': mapped_output = self.dec_w(output) mapped_attn = self.attn_w(context) tiled_output = mapped_output.unsqueeze( 2).repeat(1, 1, max_ctx_len, 1) tiled_attn = mapped_attn.unsqueeze(1) fc1 = F.tanh(tiled_output+tiled_attn) attn = self.query_w(fc1).squeeze(-1) else: raise ValueError('Unknown attention mode') attn = F.softmax(attn.view(-1, max_ctx_len), dim=1).view(batch_size, -1, max_ctx_len) mix = th.bmm(attn, context) combined = th.cat((mix, output), dim=2) if self.linear_out is None: return combined, attn else: output = F.tanh( self.linear_out(combined.view(-1, self.dec_cell_size+self.ctx_cell_size))).view( batch_size, -1, self.dec_cell_size) return output, attn class DecoderRNN(BaseRNN): def __init__(self, input_dropout_p, rnn_cell, input_size, hidden_size, num_layers, output_dropout_p, bidirectional, vocab_size, use_attn, ctx_cell_size, attn_mode, sys_id, eos_id, use_gpu, max_dec_len, embedding=None): super(DecoderRNN, self).__init__(input_dropout_p=input_dropout_p, rnn_cell=rnn_cell, input_size=input_size, hidden_size=hidden_size, num_layers=num_layers, output_dropout_p=output_dropout_p, bidirectional=bidirectional) if embedding is None: self.embedding = nn.Embedding(vocab_size, input_size) else: self.embedding = embedding self.use_attn = use_attn if self.use_attn: self.attention = Attention(dec_cell_size=hidden_size, ctx_cell_size=ctx_cell_size, attn_mode=attn_mode, project=True) self.dec_cell_size = hidden_size self.output_size = vocab_size self.project = nn.Linear(self.dec_cell_size, self.output_size) self.log_softmax = F.log_softmax self.sys_id = sys_id self.eos_id = eos_id self.use_gpu = use_gpu self.max_dec_len = max_dec_len def forward(self, batch_size, dec_inputs, dec_init_state, attn_context, mode, gen_type, beam_size, goal_hid=None): ret_dict = dict() if self.use_attn: ret_dict[DecoderRNN.KEY_ATTN_SCORE] = list() if mode == GEN: dec_inputs = None if gen_type != 'beam': beam_size = 1 if dec_inputs is not None: decoder_input = dec_inputs else: with th.no_grad(): bos_var = Variable(th.LongTensor([self.sys_id])) bos_var = cast_type(bos_var, LONG, self.use_gpu) decoder_input = bos_var.expand( batch_size*beam_size, 1) if mode == GEN and gen_type == 'beam': pass else: decoder_hidden_state = dec_init_state prob_outputs = [] symbol_outputs = [] def decode(step, cum_sum, step_output, step_attn): prob_outputs.append(step_output) step_output_slice = step_output.squeeze( 1) if self.use_attn: ret_dict[DecoderRNN.KEY_ATTN_SCORE].append(step_attn) if gen_type == 'greedy': _, symbols = step_output_slice.topk(1) elif gen_type == 'sample': pass elif gen_type == 'beam': pass else: raise ValueError('Unsupported decoding mode') symbol_outputs.append(symbols) return cum_sum, symbols if mode == TEACH_FORCE: prob_outputs, decoder_hidden_state, attn = self.forward_step( input_var=decoder_input, hidden_state=decoder_hidden_state, encoder_outputs=attn_context, goal_hid=goal_hid) else: cum_sum = None for step in range(self.max_dec_len): decoder_output, decoder_hidden_state, step_attn = self.forward_step( decoder_input, decoder_hidden_state, attn_context, goal_hid=goal_hid) cum_sum, symbols = decode( step, cum_sum, decoder_output, step_attn) decoder_input = symbols prob_outputs = th.cat(prob_outputs, dim=1) ret_dict[DecoderRNN.KEY_SEQUENCE] = symbol_outputs return prob_outputs, decoder_hidden_state, ret_dict def forward_step(self, input_var, hidden_state, encoder_outputs, goal_hid): batch_size, output_seq_len = input_var.size() embedded = self.embedding(input_var) if goal_hid is not None: goal_hid = goal_hid.view(goal_hid.size(0), 1, goal_hid.size(1)) goal_rep = goal_hid.repeat(1, output_seq_len, 1) embedded = th.cat([embedded, goal_rep], dim=2) embedded = self.input_dropout(embedded) den_state) attn = None if self.use_attn: output, attn = self.attention(output, encoder_outputs) logits = self.project(output.contiguous().view(-1, self.dec_cell_size)) prediction = self.log_softmax(logits, dim=logits.dim( )-1).view(batch_size, output_seq_len, -1) return prediction, hidden_s, attn def _step(self, input_var, hidden_state, encoder_outputs, goal_hid): batch_size, output_seq_len = input_var.size() embedded = self.embedding(input_var) if goal_hid is not None: goal_hid = goal_hid.view(goal_hid.size( 0), 1, goal_hid.size(1)) goal_rep = goal_hid.repeat( 1, output_seq_len, 1) embedded = th.cat([embedded, goal_rep], dim=2) embedded = self.input_dropout(embedded) den_state) attn = None if self.use_attn: output, attn = self.attention(output, encoder_outputs) logits = self.project(output.view(-1, self.dec_cell_size)) prediction = logits.view( batch_size, output_seq_len, -1) def write(self, input_var, hidden_state, encoder_outputs, max_words, vocab, stop_tokens, goal_hid=None, mask=True, decoding_masked_tokens=DECODING_MASKED_TOKENS): logprob_outputs = [] symbol_outputs = [] decoder_input = input_var decoder_hidden_state = hidden_state if type(encoder_outputs) is list: encoder_outputs = th.cat(encoder_outputs, 1) if mask: special_token_mask = Variable(th.FloatTensor( [-999. if token in decoding_masked_tokens else 0. for token in vocab])) special_token_mask = cast_type( special_token_mask, FLOAT, self.use_gpu) def _sample(dec_output, num_i): dec_output = dec_output.view(-1) prob = F.softmax(dec_output/0.6, dim=0) logprob = F.log_softmax(dec_output, dim=0) symbol = prob.multinomial(num_samples=1).detach() _, tmp_symbol = prob.topk(1) logprob = logprob.gather(0, symbol) return logprob, symbol for i in range(max_words): decoder_output, decoder_hidden_state = self._step( decoder_input, decoder_hidden_state, encoder_outputs, goal_hid) if mask: decoder_output += special_token_mask.expand(1, 1, -1) logprob, symbol = _sample(decoder_output, i) logprob_outputs.append(logprob) symbol_outputs.append(symbol) decoder_input = symbol.view(1, -1) if vocab[symbol.item()] in stop_tokens: break assert len(logprob_outputs) == len(symbol_outputs) logprob_list = logprob_outputs symbol_list = [t.item() for t in symbol_outputs] return logprob_list, symbol_list def forward_rl(self, batch_size, dec_init_state, attn_context, vocab, max_words, goal_hid=None, mask=True, temp=0.1): with th.no_grad(): bos_var = Variable(th.LongTensor([self.sys_id])) bos_var = cast_type(bos_var, LONG, self.use_gpu) decoder_input = bos_var.expand(batch_size, 1) decoder_hidden_state = dec_init_state encoder_outputs = attn_context logprob_outputs = [] symbol_outputs = [] if mask: special_token_mask = Variable(th.FloatTensor( [-999. if token in DECODING_MASKED_TOKENS else 0. for token in vocab])) special_token_mask = cast_type( special_token_mask, FLOAT, self.use_gpu) def _sample(dec_output, num_i): dec_output = dec_output.view(batch_size, -1) prob = F.softmax(dec_output/temp, dim=1) logprob = F.log_softmax(dec_output, dim=1) symbol = prob.multinomial( num_samples=1).detach() _, tmp_symbol = prob.topk(1) logprob = logprob.gather(1, symbol) return logprob, symbol stopped_samples = set() for i in range(max_words): decoder_output, decoder_hidden_state = self._step( decoder_input, decoder_hidden_state, encoder_outputs, goal_hid) if mask: decoder_output += special_token_mask.expand(1, 1, -1) logprob, symbol = _sample(decoder_output, i) logprob_outputs.append(logprob) symbol_outputs.append(symbol) decoder_input = symbol.view(batch_size, -1) for b_id in range(batch_size): if vocab[symbol[b_id].item()] == EOS: stopped_samples.add(b_id) if len(stopped_samples) == batch_size: break assert len(logprob_outputs) == len(symbol_outputs) symbol_outputs = th.cat( symbol_outputs, dim=1).cpu().data.numpy().tolist() logprob_outputs = th.cat(logprob_outputs, dim=1) logprob_list = [] symbol_list = [] for b_id in range(batch_size): b_logprob = [] b_symbol = [] for t_id in range(logprob_outputs.shape[1]): symbol = symbol_outputs[b_id][t_id] if vocab[symbol] == EOS and t_id != 0: break b_symbol.append(symbol_outputs[b_id][t_id]) b_logprob.append(logprob_outputs[b_id][t_id]) logprob_list.append(b_logprob) symbol_list.append(b_symbol) if batch_size == 1: logprob_list = logprob_list[0] symbol_list = symbol_list[0] return logprob_list, symbol_list
true
true
f71cc8fe00c0b5c8796f9ec6ac11c85930433e40
4,318
py
Python
WebMirror/management/UrlManage.py
awesome-archive/ReadableWebProxy
360104694a21bc14c7756f29205c95823387e30b
[ "BSD-3-Clause" ]
193
2016-08-02T22:04:35.000Z
2022-03-09T20:45:41.000Z
WebMirror/management/UrlManage.py
awesome-archive/ReadableWebProxy
360104694a21bc14c7756f29205c95823387e30b
[ "BSD-3-Clause" ]
533
2016-08-23T20:48:23.000Z
2022-03-28T15:55:13.000Z
WebMirror/management/UrlManage.py
awesome-archive/ReadableWebProxy
360104694a21bc14c7756f29205c95823387e30b
[ "BSD-3-Clause" ]
19
2015-08-13T18:01:08.000Z
2021-07-12T17:13:09.000Z
import calendar import datetime import json import os import os.path import shutil import tqdm import traceback from concurrent.futures import ThreadPoolExecutor import urllib.error import urllib.parse from sqlalchemy import and_ from sqlalchemy import or_ import sqlalchemy.exc if __name__ == "__main__": import logSetup logSetup.initLogging() import common.database as db import common.Exceptions import common.management.util import common.management.file_cleanup import common.management.WebMirrorManage import WebMirror.processor.RssProcessor import flags import pprint import config from config import C_RAW_RESOURCE_DIR import WebMirror.OutputFilters.rss.FeedDataParser import WebMirror.OutputFilters.util.feedNameLut import WebRequest import WebMirror.Engine def exposed_fix_lndb_urls(): ''' Scan the qidian feed items, and extract the book url segments which are not in the feedparser url-seg -> title map. Given those segments, then do a HTTP fetch, and pull out the page title. Finally, print that information in a nice table for updating the scraper func. ''' with db.session_context() as sess: pages = sess.query(db.WebPages) \ .filter(db.WebPages.netloc == "lndb.info") \ .all() print(pages) # feed_url = feed_item.urls[0].feed_url # pfunc = feed_item.get_func() # missing = [] # for release in feed_item.releases: # item = {} # item['title'] = release.title # item['guid'] = release.contentid # item['linkUrl'] = release.contenturl # item['feedUrl'] = feed_url # item['srcname'] = "wat" # item['published'] = "wat" # ret = pfunc(item) # if not ret: # missing.append(release.contenturl) # urls = {} # for url in missing: # root, _ = url.rsplit("/", 1) # urls[root] = url # wg = WebRequest.WebGetRobust() # lines = [] # for root, url in urls.items(): # urlfrag = root.split("www")[-1] # meta = common.management.util.get_page_title(wg, url) # title = meta['title'] # outstr = " ('www{}/', '{}', 'translated'),".format(urlfrag, title) # lines.append(outstr) # for outstr in lines: # print(outstr) def exposed_fix_nu_duplicate_url_segments(): ''' So the novelupdate scrape borked somewhere, and is generating duplicate URLs. Gah. ''' with db.session_context() as sess: print("Querying for rows") res = sess.query(db.NuReleaseItem.id, db.NuReleaseItem.outbound_wrapper).all() print("Found %s nu outbound wrappers" % len(res)) total = 0 bad = 0 urlmap = {} fix_list = [] print("Parsing URLs") for dbid, url in res: total += 1 if url.count("http") > 1: bad += 1 fix_list.append((dbid, url)) # print(dbid, url) else: urlmap[url] = dbid print("Found %s links, %s of which are invalid" % (total, bad)) count = 0 with db.session_context() as sess: for dbid, url in tqdm.tqdm(fix_list, desc="Fixing duplicate NU urls."): actual_url, _ = url.split("http://") if actual_url in urlmap: res = sess.query(db.NuResolvedOutbound).filter_by(parent=dbid).update({"parent" : urlmap[actual_url]}) res = sess.query(db.NuReleaseItem).filter_by(id=dbid).delete() else: res = sess.query(db.NuReleaseItem).filter_by(id=dbid).update({"outbound_wrapper" : actual_url}) urlmap[url] = dbid count += 1 if count > 2500: count = 0 sess.commit() # res = sess.query(db.NuReleaseItem.id, db.NuReleaseItem.outbound_wrapper).all() sess.commit() # print(dbid, curl) def exposed_scan_for_masked_urls(): ''' Do a streaming iteration over the rows in the database, and run them through the url filtering mechanism to see if any are actually not wanted. ''' engine = WebMirror.Engine.SiteArchiver(None, None, None) with db.session_context() as sess: print("Querying for rows") iterable = sess.query(db.WebPages.id, db.WebPages.url, db.WebPages.netloc) \ .order_by(db.WebPages.netloc) \ .yield_per(1000) rows = 0 skipped = [] for rid, url, netloc in tqdm.tqdm(iterable): ret = engine.external_link_check(netloc, url) if not ret: skipped.append((netloc, url, rid)) rows += 1 print("Found %s rows. Saving" % rows) with open("delete_netlocs.json", "w") as fp: json.dump(skipped, fp, indent=4) print("Saved to output json")
23.988889
112
0.684576
import calendar import datetime import json import os import os.path import shutil import tqdm import traceback from concurrent.futures import ThreadPoolExecutor import urllib.error import urllib.parse from sqlalchemy import and_ from sqlalchemy import or_ import sqlalchemy.exc if __name__ == "__main__": import logSetup logSetup.initLogging() import common.database as db import common.Exceptions import common.management.util import common.management.file_cleanup import common.management.WebMirrorManage import WebMirror.processor.RssProcessor import flags import pprint import config from config import C_RAW_RESOURCE_DIR import WebMirror.OutputFilters.rss.FeedDataParser import WebMirror.OutputFilters.util.feedNameLut import WebRequest import WebMirror.Engine def exposed_fix_lndb_urls(): with db.session_context() as sess: pages = sess.query(db.WebPages) \ .filter(db.WebPages.netloc == "lndb.info") \ .all() print(pages) def exposed_fix_nu_duplicate_url_segments(): with db.session_context() as sess: print("Querying for rows") res = sess.query(db.NuReleaseItem.id, db.NuReleaseItem.outbound_wrapper).all() print("Found %s nu outbound wrappers" % len(res)) total = 0 bad = 0 urlmap = {} fix_list = [] print("Parsing URLs") for dbid, url in res: total += 1 if url.count("http") > 1: bad += 1 fix_list.append((dbid, url)) else: urlmap[url] = dbid print("Found %s links, %s of which are invalid" % (total, bad)) count = 0 with db.session_context() as sess: for dbid, url in tqdm.tqdm(fix_list, desc="Fixing duplicate NU urls."): actual_url, _ = url.split("http://") if actual_url in urlmap: res = sess.query(db.NuResolvedOutbound).filter_by(parent=dbid).update({"parent" : urlmap[actual_url]}) res = sess.query(db.NuReleaseItem).filter_by(id=dbid).delete() else: res = sess.query(db.NuReleaseItem).filter_by(id=dbid).update({"outbound_wrapper" : actual_url}) urlmap[url] = dbid count += 1 if count > 2500: count = 0 sess.commit() sess.commit() def exposed_scan_for_masked_urls(): engine = WebMirror.Engine.SiteArchiver(None, None, None) with db.session_context() as sess: print("Querying for rows") iterable = sess.query(db.WebPages.id, db.WebPages.url, db.WebPages.netloc) \ .order_by(db.WebPages.netloc) \ .yield_per(1000) rows = 0 skipped = [] for rid, url, netloc in tqdm.tqdm(iterable): ret = engine.external_link_check(netloc, url) if not ret: skipped.append((netloc, url, rid)) rows += 1 print("Found %s rows. Saving" % rows) with open("delete_netlocs.json", "w") as fp: json.dump(skipped, fp, indent=4) print("Saved to output json")
true
true
f71cc93f4fb121302f3d3b609755c636dc186814
53,235
py
Python
zerver/views/message_fetch.py
CatarinaSMorais/zulip
e943d717b84291397328bd4dc578c04eed21885e
[ "Apache-2.0" ]
1
2021-08-10T07:31:27.000Z
2021-08-10T07:31:27.000Z
zerver/views/message_fetch.py
CatarinaSMorais/zulip
e943d717b84291397328bd4dc578c04eed21885e
[ "Apache-2.0" ]
1
2021-08-05T14:46:02.000Z
2021-08-05T14:46:02.000Z
zerver/views/message_fetch.py
CatarinaSMorais/zulip
e943d717b84291397328bd4dc578c04eed21885e
[ "Apache-2.0" ]
1
2021-08-05T14:27:13.000Z
2021-08-05T14:27:13.000Z
import re from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Tuple, Union import orjson from django.conf import settings from django.contrib.auth.models import AnonymousUser from django.core.exceptions import ValidationError from django.db import connection from django.http import HttpRequest, HttpResponse from django.utils.html import escape as escape_html from django.utils.translation import gettext as _ from sqlalchemy import func from sqlalchemy.dialects import postgresql from sqlalchemy.engine import Connection, RowProxy from sqlalchemy.sql import ( ClauseElement, ColumnElement, FromClause, Select, alias, and_, column, join, literal, literal_column, not_, or_, select, table, union_all, ) from sqlalchemy.types import Boolean, Integer, Text from zerver.context_processors import get_valid_realm_from_request from zerver.decorator import REQ, has_request_variables from zerver.lib.actions import recipient_for_user_profiles from zerver.lib.addressee import get_user_profiles, get_user_profiles_by_ids from zerver.lib.exceptions import ErrorCode, JsonableError, MissingAuthenticationError from zerver.lib.message import get_first_visible_message_id, messages_for_ids from zerver.lib.narrow import is_web_public_compatible, is_web_public_narrow from zerver.lib.response import json_error, json_success from zerver.lib.sqlalchemy_utils import get_sqlalchemy_connection from zerver.lib.streams import ( can_access_stream_history_by_id, can_access_stream_history_by_name, get_public_streams_queryset, get_stream_by_narrow_operand_access_unchecked, get_web_public_streams_queryset, ) from zerver.lib.topic import DB_TOPIC_NAME, MATCH_TOPIC, topic_column_sa, topic_match_sa from zerver.lib.topic_mutes import exclude_topic_mutes from zerver.lib.types import Validator from zerver.lib.utils import statsd from zerver.lib.validator import ( check_bool, check_dict, check_int, check_list, check_required_string, check_string, check_string_or_int, check_string_or_int_list, to_non_negative_int, ) from zerver.models import ( Realm, Recipient, Stream, Subscription, UserMessage, UserProfile, get_active_streams, get_user_by_id_in_realm_including_cross_realm, get_user_including_cross_realm, ) LARGER_THAN_MAX_MESSAGE_ID = 10000000000000000 MAX_MESSAGES_PER_FETCH = 5000 class BadNarrowOperator(JsonableError): code = ErrorCode.BAD_NARROW data_fields = ["desc"] def __init__(self, desc: str) -> None: self.desc: str = desc @staticmethod def msg_format() -> str: return _("Invalid narrow operator: {desc}") ConditionTransform = Callable[[ClauseElement], ClauseElement] OptionalNarrowListT = Optional[List[Dict[str, Any]]] # These delimiters will not appear in rendered messages or HTML-escaped topics. TS_START = "<ts-match>" TS_STOP = "</ts-match>" def ts_locs_array( config: "ColumnElement[str]", text: "ColumnElement[str]", tsquery: "ColumnElement[object]", ) -> "ColumnElement[List[List[int]]]": options = f"HighlightAll = TRUE, StartSel = {TS_START}, StopSel = {TS_STOP}" delimited = func.ts_headline(config, text, tsquery, options) parts = func.unnest(func.string_to_array(delimited, TS_START)).alias() part = column(parts.name, Text) part_len = func.length(part) - len(TS_STOP) match_pos = func.sum(part_len).over(rows=(None, -1)) + len(TS_STOP) match_len = func.strpos(part, TS_STOP) - 1 ret = func.array( select( [ postgresql.array([match_pos, match_len]), # type: ignore[call-overload] # https://github.com/dropbox/sqlalchemy-stubs/issues/188 ] ) .select_from(parts) .offset(1) .as_scalar(), ) return ret # When you add a new operator to this, also update zerver/lib/narrow.py class NarrowBuilder: """ Build up a SQLAlchemy query to find messages matching a narrow. """ # This class has an important security invariant: # # None of these methods ever *add* messages to a query's result. # # That is, the `add_term` method, and its helpers the `by_*` methods, # are passed a Select object representing a query for messages; they may # call some methods on it, and then they return a resulting Select # object. Things these methods may do to the queries they handle # include # * add conditions to filter out rows (i.e., messages), with `query.where` # * add columns for more information on the same message, with `query.column` # * add a join for more information on the same message # # Things they may not do include # * anything that would pull in additional rows, or information on # other messages. def __init__( self, user_profile: Optional[UserProfile], msg_id_column: "ColumnElement[int]", realm: Realm, is_web_public_query: bool = False, ) -> None: self.user_profile = user_profile self.msg_id_column = msg_id_column self.realm = realm self.is_web_public_query = is_web_public_query def add_term(self, query: Select, term: Dict[str, Any]) -> Select: """ Extend the given query to one narrowed by the given term, and return the result. This method satisfies an important security property: the returned query never includes a message that the given query didn't. In particular, if the given query will only find messages that a given user can legitimately see, then so will the returned query. """ # To maintain the security property, we hold all the `by_*` # methods to the same criterion. See the class's block comment # for details. # We have to be careful here because we're letting users call a method # by name! The prefix 'by_' prevents it from colliding with builtin # Python __magic__ stuff. operator = term["operator"] operand = term["operand"] negated = term.get("negated", False) method_name = "by_" + operator.replace("-", "_") method = getattr(self, method_name, None) if method is None: raise BadNarrowOperator("unknown operator " + operator) if negated: maybe_negate = not_ else: maybe_negate = lambda cond: cond return method(query, operand, maybe_negate) def by_has(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: if operand not in ["attachment", "image", "link"]: raise BadNarrowOperator("unknown 'has' operand " + operand) col_name = "has_" + operand cond = column(col_name, Boolean) return query.where(maybe_negate(cond)) def by_in(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: # This operator does not support is_web_public_query. assert not self.is_web_public_query assert self.user_profile is not None if operand == "home": conditions = exclude_muting_conditions(self.user_profile, []) return query.where(and_(*conditions)) elif operand == "all": return query raise BadNarrowOperator("unknown 'in' operand " + operand) def by_is(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: # This operator class does not support is_web_public_query. assert not self.is_web_public_query assert self.user_profile is not None if operand == "private": cond = column("flags", Integer).op("&")(UserMessage.flags.is_private.mask) != 0 return query.where(maybe_negate(cond)) elif operand == "starred": cond = column("flags", Integer).op("&")(UserMessage.flags.starred.mask) != 0 return query.where(maybe_negate(cond)) elif operand == "unread": cond = column("flags", Integer).op("&")(UserMessage.flags.read.mask) == 0 return query.where(maybe_negate(cond)) elif operand == "mentioned": cond1 = column("flags", Integer).op("&")(UserMessage.flags.mentioned.mask) != 0 cond2 = column("flags", Integer).op("&")(UserMessage.flags.wildcard_mentioned.mask) != 0 cond = or_(cond1, cond2) return query.where(maybe_negate(cond)) elif operand == "alerted": cond = column("flags", Integer).op("&")(UserMessage.flags.has_alert_word.mask) != 0 return query.where(maybe_negate(cond)) raise BadNarrowOperator("unknown 'is' operand " + operand) _alphanum = frozenset("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") def _pg_re_escape(self, pattern: str) -> str: """ Escape user input to place in a regex Python's re.escape escapes Unicode characters in a way which PostgreSQL fails on, '\u03bb' to '\\\u03bb'. This function will correctly escape them for PostgreSQL, '\u03bb' to '\\u03bb'. """ s = list(pattern) for i, c in enumerate(s): if c not in self._alphanum: if ord(c) >= 128: # convert the character to hex PostgreSQL regex will take # \uXXXX s[i] = f"\\u{ord(c):0>4x}" else: s[i] = "\\" + c return "".join(s) def by_stream( self, query: Select, operand: Union[str, int], maybe_negate: ConditionTransform ) -> Select: try: # Because you can see your own message history for # private streams you are no longer subscribed to, we # need get_stream_by_narrow_operand_access_unchecked here. stream = get_stream_by_narrow_operand_access_unchecked(operand, self.realm) if self.is_web_public_query and not stream.is_web_public: raise BadNarrowOperator("unknown web-public stream " + str(operand)) except Stream.DoesNotExist: raise BadNarrowOperator("unknown stream " + str(operand)) if self.realm.is_zephyr_mirror_realm: # MIT users expect narrowing to "social" to also show messages to # /^(un)*social(.d)*$/ (unsocial, ununsocial, social.d, ...). # In `ok_to_include_history`, we assume that a non-negated # `stream` term for a public stream will limit the query to # that specific stream. So it would be a bug to hit this # codepath after relying on this term there. But all streams in # a Zephyr realm are private, so that doesn't happen. assert not stream.is_public() m = re.search(r"^(?:un)*(.+?)(?:\.d)*$", stream.name, re.IGNORECASE) # Since the regex has a `.+` in it and "" is invalid as a # stream name, this will always match assert m is not None base_stream_name = m.group(1) matching_streams = get_active_streams(self.realm).filter( name__iregex=fr"^(un)*{self._pg_re_escape(base_stream_name)}(\.d)*$" ) recipient_ids = [matching_stream.recipient_id for matching_stream in matching_streams] cond = column("recipient_id", Integer).in_(recipient_ids) return query.where(maybe_negate(cond)) recipient = stream.recipient cond = column("recipient_id", Integer) == recipient.id return query.where(maybe_negate(cond)) def by_streams(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: if operand == "public": # Get all both subscribed and non subscribed public streams # but exclude any private subscribed streams. recipient_queryset = get_public_streams_queryset(self.realm) elif operand == "web-public": recipient_queryset = get_web_public_streams_queryset(self.realm) else: raise BadNarrowOperator("unknown streams operand " + operand) recipient_ids = recipient_queryset.values_list("recipient_id", flat=True).order_by("id") cond = column("recipient_id", Integer).in_(recipient_ids) return query.where(maybe_negate(cond)) def by_topic(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: if self.realm.is_zephyr_mirror_realm: # MIT users expect narrowing to topic "foo" to also show messages to /^foo(.d)*$/ # (foo, foo.d, foo.d.d, etc) m = re.search(r"^(.*?)(?:\.d)*$", operand, re.IGNORECASE) # Since the regex has a `.*` in it, this will always match assert m is not None base_topic = m.group(1) # Additionally, MIT users expect the empty instance and # instance "personal" to be the same. if base_topic in ("", "personal", '(instance "")'): cond: ClauseElement = or_( topic_match_sa(""), topic_match_sa(".d"), topic_match_sa(".d.d"), topic_match_sa(".d.d.d"), topic_match_sa(".d.d.d.d"), topic_match_sa("personal"), topic_match_sa("personal.d"), topic_match_sa("personal.d.d"), topic_match_sa("personal.d.d.d"), topic_match_sa("personal.d.d.d.d"), topic_match_sa('(instance "")'), topic_match_sa('(instance "").d'), topic_match_sa('(instance "").d.d'), topic_match_sa('(instance "").d.d.d'), topic_match_sa('(instance "").d.d.d.d'), ) else: # We limit `.d` counts, since PostgreSQL has much better # query planning for this than they do for a regular # expression (which would sometimes table scan). cond = or_( topic_match_sa(base_topic), topic_match_sa(base_topic + ".d"), topic_match_sa(base_topic + ".d.d"), topic_match_sa(base_topic + ".d.d.d"), topic_match_sa(base_topic + ".d.d.d.d"), ) return query.where(maybe_negate(cond)) cond = topic_match_sa(operand) return query.where(maybe_negate(cond)) def by_sender( self, query: Select, operand: Union[str, int], maybe_negate: ConditionTransform ) -> Select: try: if isinstance(operand, str): sender = get_user_including_cross_realm(operand, self.realm) else: sender = get_user_by_id_in_realm_including_cross_realm(operand, self.realm) except UserProfile.DoesNotExist: raise BadNarrowOperator("unknown user " + str(operand)) cond = column("sender_id", Integer) == literal(sender.id) return query.where(maybe_negate(cond)) def by_near(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: return query def by_id( self, query: Select, operand: Union[int, str], maybe_negate: ConditionTransform ) -> Select: if not str(operand).isdigit(): raise BadNarrowOperator("Invalid message ID") cond = self.msg_id_column == literal(operand) return query.where(maybe_negate(cond)) def by_pm_with( self, query: Select, operand: Union[str, Iterable[int]], maybe_negate: ConditionTransform ) -> Select: # This operator does not support is_web_public_query. assert not self.is_web_public_query assert self.user_profile is not None try: if isinstance(operand, str): email_list = operand.split(",") user_profiles = get_user_profiles( emails=email_list, realm=self.realm, ) else: """ This is where we handle passing a list of user IDs for the narrow, which is the preferred/cleaner API. """ user_profiles = get_user_profiles_by_ids( user_ids=operand, realm=self.realm, ) recipient = recipient_for_user_profiles( user_profiles=user_profiles, forwarded_mirror_message=False, forwarder_user_profile=None, sender=self.user_profile, allow_deactivated=True, ) except (JsonableError, ValidationError): raise BadNarrowOperator("unknown user in " + str(operand)) # Group DM if recipient.type == Recipient.HUDDLE: cond = column("recipient_id", Integer) == recipient.id return query.where(maybe_negate(cond)) # 1:1 PM other_participant = None # Find if another person is in PM for user in user_profiles: if user.id != self.user_profile.id: other_participant = user # PM with another person if other_participant: # We need bidirectional messages PM with another person. # But Recipient.PERSONAL objects only encode the person who # received the message, and not the other participant in # the thread (the sender), we need to do a somewhat # complex query to get messages between these two users # with either of them as the sender. self_recipient_id = self.user_profile.recipient_id cond = or_( and_( column("sender_id", Integer) == other_participant.id, column("recipient_id", Integer) == self_recipient_id, ), and_( column("sender_id", Integer) == self.user_profile.id, column("recipient_id", Integer) == recipient.id, ), ) return query.where(maybe_negate(cond)) # PM with self cond = and_( column("sender_id", Integer) == self.user_profile.id, column("recipient_id", Integer) == recipient.id, ) return query.where(maybe_negate(cond)) def by_group_pm_with( self, query: Select, operand: Union[str, int], maybe_negate: ConditionTransform ) -> Select: # This operator does not support is_web_public_query. assert not self.is_web_public_query assert self.user_profile is not None try: if isinstance(operand, str): narrow_profile = get_user_including_cross_realm(operand, self.realm) else: narrow_profile = get_user_by_id_in_realm_including_cross_realm(operand, self.realm) except UserProfile.DoesNotExist: raise BadNarrowOperator("unknown user " + str(operand)) self_recipient_ids = [ recipient_tuple["recipient_id"] for recipient_tuple in Subscription.objects.filter( user_profile=self.user_profile, recipient__type=Recipient.HUDDLE, ).values("recipient_id") ] narrow_recipient_ids = [ recipient_tuple["recipient_id"] for recipient_tuple in Subscription.objects.filter( user_profile=narrow_profile, recipient__type=Recipient.HUDDLE, ).values("recipient_id") ] recipient_ids = set(self_recipient_ids) & set(narrow_recipient_ids) cond = column("recipient_id", Integer).in_(recipient_ids) return query.where(maybe_negate(cond)) def by_search(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: if settings.USING_PGROONGA: return self._by_search_pgroonga(query, operand, maybe_negate) else: return self._by_search_tsearch(query, operand, maybe_negate) def _by_search_pgroonga( self, query: Select, operand: str, maybe_negate: ConditionTransform ) -> Select: match_positions_character = func.pgroonga_match_positions_character query_extract_keywords = func.pgroonga_query_extract_keywords operand_escaped = func.escape_html(operand) keywords = query_extract_keywords(operand_escaped) query = query.column( match_positions_character(column("rendered_content", Text), keywords).label( "content_matches" ) ) query = query.column( match_positions_character(func.escape_html(topic_column_sa()), keywords).label( "topic_matches" ) ) condition = column("search_pgroonga").op("&@~")(operand_escaped) return query.where(maybe_negate(condition)) def _by_search_tsearch( self, query: Select, operand: str, maybe_negate: ConditionTransform ) -> Select: tsquery = func.plainto_tsquery(literal("zulip.english_us_search"), literal(operand)) query = query.column( ts_locs_array( literal("zulip.english_us_search"), column("rendered_content", Text), tsquery ).label("content_matches") ) # We HTML-escape the topic in PostgreSQL to avoid doing a server round-trip query = query.column( ts_locs_array( literal("zulip.english_us_search"), func.escape_html(topic_column_sa()), tsquery ).label("topic_matches") ) # Do quoted string matching. We really want phrase # search here so we can ignore punctuation and do # stemming, but there isn't a standard phrase search # mechanism in PostgreSQL for term in re.findall(r'"[^"]+"|\S+', operand): if term[0] == '"' and term[-1] == '"': term = term[1:-1] term = "%" + connection.ops.prep_for_like_query(term) + "%" cond = or_(column("content", Text).ilike(term), topic_column_sa().ilike(term)) query = query.where(maybe_negate(cond)) cond = column("search_tsvector", postgresql.TSVECTOR).op("@@")(tsquery) return query.where(maybe_negate(cond)) def highlight_string(text: str, locs: Iterable[Tuple[int, int]]) -> str: highlight_start = '<span class="highlight">' highlight_stop = "</span>" pos = 0 result = "" in_tag = False for loc in locs: (offset, length) = loc prefix_start = pos prefix_end = offset match_start = offset match_end = offset + length prefix = text[prefix_start:prefix_end] match = text[match_start:match_end] for character in prefix + match: if character == "<": in_tag = True elif character == ">": in_tag = False if in_tag: result += prefix result += match else: result += prefix result += highlight_start result += match result += highlight_stop pos = match_end result += text[pos:] return result def get_search_fields( rendered_content: str, topic_name: str, content_matches: Iterable[Tuple[int, int]], topic_matches: Iterable[Tuple[int, int]], ) -> Dict[str, str]: return { "match_content": highlight_string(rendered_content, content_matches), MATCH_TOPIC: highlight_string(escape_html(topic_name), topic_matches), } def narrow_parameter(json: str) -> OptionalNarrowListT: data = orjson.loads(json) if not isinstance(data, list): raise ValueError("argument is not a list") if len(data) == 0: # The "empty narrow" should be None, and not [] return None def convert_term(elem: Union[Dict[str, Any], List[str]]) -> Dict[str, Any]: # We have to support a legacy tuple format. if isinstance(elem, list): if len(elem) != 2 or any(not isinstance(x, str) for x in elem): raise ValueError("element is not a string pair") return dict(operator=elem[0], operand=elem[1]) if isinstance(elem, dict): # Make sure to sync this list to frontend also when adding a new operator. # that supports user IDs. Relevant code is located in static/js/message_fetch.js # in handle_operators_supporting_id_based_api function where you will need to update # operators_supporting_id, or operators_supporting_ids array. operators_supporting_id = ["sender", "group-pm-with", "stream"] operators_supporting_ids = ["pm-with"] operators_non_empty_operand = {"search"} operator = elem.get("operator", "") if operator in operators_supporting_id: operand_validator: Validator[object] = check_string_or_int elif operator in operators_supporting_ids: operand_validator = check_string_or_int_list elif operator in operators_non_empty_operand: operand_validator = check_required_string else: operand_validator = check_string validator = check_dict( required_keys=[ ("operator", check_string), ("operand", operand_validator), ], optional_keys=[ ("negated", check_bool), ], ) try: validator("elem", elem) except ValidationError as error: raise JsonableError(error.message) # whitelist the fields we care about for now return dict( operator=elem["operator"], operand=elem["operand"], negated=elem.get("negated", False), ) raise ValueError("element is not a dictionary") return list(map(convert_term, data)) def ok_to_include_history( narrow: OptionalNarrowListT, user_profile: Optional[UserProfile], is_web_public_query: bool ) -> bool: # There are occasions where we need to find Message rows that # have no corresponding UserMessage row, because the user is # reading a public stream that might include messages that # were sent while the user was not subscribed, but which they are # allowed to see. We have to be very careful about constructing # queries in those situations, so this function should return True # only if we are 100% sure that we're gonna add a clause to the # query that narrows to a particular public stream on the user's realm. # If we screw this up, then we can get into a nasty situation of # polluting our narrow results with messages from other realms. # For web-public queries, we are always returning history. The # analogues of the below stream access checks for whether streams # have is_web_public set and banning is operators in this code # path are done directly in NarrowBuilder. if is_web_public_query: assert user_profile is None return True assert user_profile is not None include_history = False if narrow is not None: for term in narrow: if term["operator"] == "stream" and not term.get("negated", False): operand: Union[str, int] = term["operand"] if isinstance(operand, str): include_history = can_access_stream_history_by_name(user_profile, operand) else: include_history = can_access_stream_history_by_id(user_profile, operand) elif ( term["operator"] == "streams" and term["operand"] == "public" and not term.get("negated", False) and user_profile.can_access_public_streams() ): include_history = True # Disable historical messages if the user is narrowing on anything # that's a property on the UserMessage table. There cannot be # historical messages in these cases anyway. for term in narrow: if term["operator"] == "is": include_history = False return include_history def get_stream_from_narrow_access_unchecked( narrow: OptionalNarrowListT, realm: Realm ) -> Optional[Stream]: if narrow is not None: for term in narrow: if term["operator"] == "stream": return get_stream_by_narrow_operand_access_unchecked(term["operand"], realm) return None def exclude_muting_conditions( user_profile: UserProfile, narrow: OptionalNarrowListT ) -> List[ClauseElement]: conditions = [] stream_id = None try: # Note: It is okay here to not check access to stream # because we are only using the stream id to exclude data, # not to include results. stream = get_stream_from_narrow_access_unchecked(narrow, user_profile.realm) if stream is not None: stream_id = stream.id except Stream.DoesNotExist: pass # Stream-level muting only applies when looking at views that # include multiple streams, since we do want users to be able to # browser messages within a muted stream. if stream_id is None: rows = Subscription.objects.filter( user_profile=user_profile, active=True, is_muted=True, recipient__type=Recipient.STREAM, ).values("recipient_id") muted_recipient_ids = [row["recipient_id"] for row in rows] if len(muted_recipient_ids) > 0: # Only add the condition if we have muted streams to simplify/avoid warnings. condition = not_(column("recipient_id", Integer).in_(muted_recipient_ids)) conditions.append(condition) conditions = exclude_topic_mutes(conditions, user_profile, stream_id) # Muted user logic for hiding messages is implemented entirely # client-side. This is by design, as it allows UI to hint that # muted messages exist where their absence might make conversation # difficult to understand. As a result, we do not need to consider # muted users in this server-side logic for returning messages to # clients. (We could in theory exclude PMs from muted users, but # they're likely to be sufficiently rare to not be worth extra # logic/testing here). return conditions def get_base_query_for_search( user_profile: Optional[UserProfile], need_message: bool, need_user_message: bool ) -> Tuple[Select, "ColumnElement[int]"]: # Handle the simple case where user_message isn't involved first. if not need_user_message: assert need_message query = select([column("id", Integer).label("message_id")], None, table("zerver_message")) inner_msg_id_col: ColumnElement[int] inner_msg_id_col = literal_column("zerver_message.id", Integer) # type: ignore[assignment] # https://github.com/dropbox/sqlalchemy-stubs/pull/189 return (query, inner_msg_id_col) assert user_profile is not None if need_message: query = select( [column("message_id"), column("flags", Integer)], column("user_profile_id") == literal(user_profile.id), join( table("zerver_usermessage"), table("zerver_message"), literal_column("zerver_usermessage.message_id", Integer) == literal_column("zerver_message.id", Integer), ), ) inner_msg_id_col = column("message_id", Integer) return (query, inner_msg_id_col) query = select( [column("message_id"), column("flags", Integer)], column("user_profile_id") == literal(user_profile.id), table("zerver_usermessage"), ) inner_msg_id_col = column("message_id", Integer) return (query, inner_msg_id_col) def add_narrow_conditions( user_profile: Optional[UserProfile], inner_msg_id_col: "ColumnElement[int]", query: Select, narrow: OptionalNarrowListT, is_web_public_query: bool, realm: Realm, ) -> Tuple[Select, bool]: is_search = False # for now if narrow is None: return (query, is_search) # Build the query for the narrow builder = NarrowBuilder(user_profile, inner_msg_id_col, realm, is_web_public_query) search_operands = [] # As we loop through terms, builder does most of the work to extend # our query, but we need to collect the search operands and handle # them after the loop. for term in narrow: if term["operator"] == "search": search_operands.append(term["operand"]) else: query = builder.add_term(query, term) if search_operands: is_search = True query = query.column(topic_column_sa()).column(column("rendered_content", Text)) search_term = dict( operator="search", operand=" ".join(search_operands), ) query = builder.add_term(query, search_term) return (query, is_search) def find_first_unread_anchor( sa_conn: Connection, user_profile: Optional[UserProfile], narrow: OptionalNarrowListT ) -> int: # For anonymous web users, all messages are treated as read, and so # always return LARGER_THAN_MAX_MESSAGE_ID. if user_profile is None: return LARGER_THAN_MAX_MESSAGE_ID # We always need UserMessage in our query, because it has the unread # flag for the user. need_user_message = True # Because we will need to call exclude_muting_conditions, unless # the user hasn't muted anything, we will need to include Message # in our query. It may be worth eventually adding an optimization # for the case of a user who hasn't muted anything to avoid the # join in that case, but it's low priority. need_message = True query, inner_msg_id_col = get_base_query_for_search( user_profile=user_profile, need_message=need_message, need_user_message=need_user_message, ) query, is_search = add_narrow_conditions( user_profile=user_profile, inner_msg_id_col=inner_msg_id_col, query=query, narrow=narrow, is_web_public_query=False, realm=user_profile.realm, ) condition = column("flags", Integer).op("&")(UserMessage.flags.read.mask) == 0 # We exclude messages on muted topics when finding the first unread # message in this narrow muting_conditions = exclude_muting_conditions(user_profile, narrow) if muting_conditions: condition = and_(condition, *muting_conditions) first_unread_query = query.where(condition) first_unread_query = first_unread_query.order_by(inner_msg_id_col.asc()).limit(1) first_unread_result = list(sa_conn.execute(first_unread_query).fetchall()) if len(first_unread_result) > 0: anchor = first_unread_result[0][0] else: anchor = LARGER_THAN_MAX_MESSAGE_ID return anchor def parse_anchor_value(anchor_val: Optional[str], use_first_unread_anchor: bool) -> Optional[int]: """Given the anchor and use_first_unread_anchor parameters passed by the client, computes what anchor value the client requested, handling backwards-compatibility and the various string-valued fields. We encode use_first_unread_anchor as anchor=None. """ if use_first_unread_anchor: # Backwards-compatibility: Before we added support for the # special string-typed anchor values, clients would pass # anchor=None and use_first_unread_anchor=True to indicate # what is now expressed as anchor="first_unread". return None if anchor_val is None: # Throw an exception if neither an anchor argument not # use_first_unread_anchor was specified. raise JsonableError(_("Missing 'anchor' argument.")) if anchor_val == "oldest": return 0 if anchor_val == "newest": return LARGER_THAN_MAX_MESSAGE_ID if anchor_val == "first_unread": return None try: # We don't use `.isnumeric()` to support negative numbers for # anchor. We don't recommend it in the API (if you want the # very first message, use 0 or 1), but it used to be supported # and was used by the web app, so we need to continue # supporting it for backwards-compatibility anchor = int(anchor_val) if anchor < 0: return 0 elif anchor > LARGER_THAN_MAX_MESSAGE_ID: return LARGER_THAN_MAX_MESSAGE_ID return anchor except ValueError: raise JsonableError(_("Invalid anchor")) @has_request_variables def get_messages_backend( request: HttpRequest, maybe_user_profile: Union[UserProfile, AnonymousUser], anchor_val: Optional[str] = REQ("anchor", default=None), num_before: int = REQ(converter=to_non_negative_int), num_after: int = REQ(converter=to_non_negative_int), narrow: OptionalNarrowListT = REQ("narrow", converter=narrow_parameter, default=None), use_first_unread_anchor_val: bool = REQ( "use_first_unread_anchor", json_validator=check_bool, default=False ), client_gravatar: bool = REQ(json_validator=check_bool, default=False), apply_markdown: bool = REQ(json_validator=check_bool, default=True), ) -> HttpResponse: anchor = parse_anchor_value(anchor_val, use_first_unread_anchor_val) if num_before + num_after > MAX_MESSAGES_PER_FETCH: return json_error( _("Too many messages requested (maximum {}).").format( MAX_MESSAGES_PER_FETCH, ) ) if not maybe_user_profile.is_authenticated: # If user is not authenticated, clients must include # `streams:web-public` in their narrow query to indicate this # is a web-public query. This helps differentiate between # cases of web-public queries (where we should return the # web-public results only) and clients with buggy # authentication code (where we should return an auth error). if not is_web_public_narrow(narrow): raise MissingAuthenticationError() assert narrow is not None if not is_web_public_compatible(narrow): raise MissingAuthenticationError() realm = get_valid_realm_from_request(request) # We use None to indicate unauthenticated requests as it's more # readable than using AnonymousUser, and the lack of Django # stubs means that mypy can't check AnonymousUser well. user_profile: Optional[UserProfile] = None is_web_public_query = True else: assert isinstance(maybe_user_profile, UserProfile) user_profile = maybe_user_profile assert user_profile is not None realm = user_profile.realm is_web_public_query = False assert realm is not None if ( is_web_public_query or realm.email_address_visibility != Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE ): # If email addresses are only available to administrators, # clients cannot compute gravatars, so we force-set it to false. client_gravatar = False include_history = ok_to_include_history(narrow, user_profile, is_web_public_query) if include_history: # The initial query in this case doesn't use `zerver_usermessage`, # and isn't yet limited to messages the user is entitled to see! # # This is OK only because we've made sure this is a narrow that # will cause us to limit the query appropriately elsewhere. # See `ok_to_include_history` for details. # # Note that is_web_public_query=True goes here, since # include_history is semantically correct for is_web_public_query. need_message = True need_user_message = False elif narrow is None: # We need to limit to messages the user has received, but we don't actually # need any fields from Message need_message = False need_user_message = True else: need_message = True need_user_message = True query: FromClause query, inner_msg_id_col = get_base_query_for_search( user_profile=user_profile, need_message=need_message, need_user_message=need_user_message, ) query, is_search = add_narrow_conditions( user_profile=user_profile, inner_msg_id_col=inner_msg_id_col, query=query, narrow=narrow, realm=realm, is_web_public_query=is_web_public_query, ) if narrow is not None: # Add some metadata to our logging data for narrows verbose_operators = [] for term in narrow: if term["operator"] == "is": verbose_operators.append("is:" + term["operand"]) else: verbose_operators.append(term["operator"]) request._log_data["extra"] = "[{}]".format(",".join(verbose_operators)) sa_conn = get_sqlalchemy_connection() if anchor is None: # `anchor=None` corresponds to the anchor="first_unread" parameter. anchor = find_first_unread_anchor( sa_conn, user_profile, narrow, ) anchored_to_left = anchor == 0 # Set value that will be used to short circuit the after_query # altogether and avoid needless conditions in the before_query. anchored_to_right = anchor >= LARGER_THAN_MAX_MESSAGE_ID if anchored_to_right: num_after = 0 first_visible_message_id = get_first_visible_message_id(realm) query = limit_query_to_range( query=query, num_before=num_before, num_after=num_after, anchor=anchor, anchored_to_left=anchored_to_left, anchored_to_right=anchored_to_right, id_col=inner_msg_id_col, first_visible_message_id=first_visible_message_id, ) main_query = alias(query) query = select(main_query.c, None, main_query).order_by(column("message_id", Integer).asc()) # This is a hack to tag the query we use for testing query = query.prefix_with("/* get_messages */") rows = list(sa_conn.execute(query).fetchall()) query_info = post_process_limited_query( rows=rows, num_before=num_before, num_after=num_after, anchor=anchor, anchored_to_left=anchored_to_left, anchored_to_right=anchored_to_right, first_visible_message_id=first_visible_message_id, ) rows = query_info["rows"] # The following is a little messy, but ensures that the code paths # are similar regardless of the value of include_history. The # 'user_messages' dictionary maps each message to the user's # UserMessage object for that message, which we will attach to the # rendered message dict before returning it. We attempt to # bulk-fetch rendered message dicts from remote cache using the # 'messages' list. message_ids: List[int] = [] user_message_flags: Dict[int, List[str]] = {} if is_web_public_query: # For web-public users, we treat all historical messages as read. for row in rows: message_id = row[0] message_ids.append(message_id) user_message_flags[message_id] = ["read"] elif include_history: assert user_profile is not None message_ids = [row[0] for row in rows] # TODO: This could be done with an outer join instead of two queries um_rows = UserMessage.objects.filter(user_profile=user_profile, message_id__in=message_ids) user_message_flags = {um.message_id: um.flags_list() for um in um_rows} for message_id in message_ids: if message_id not in user_message_flags: user_message_flags[message_id] = ["read", "historical"] else: for row in rows: message_id = row[0] flags = row[1] user_message_flags[message_id] = UserMessage.flags_list_for_flags(flags) message_ids.append(message_id) search_fields: Dict[int, Dict[str, str]] = {} if is_search: for row in rows: message_id = row[0] (topic_name, rendered_content, content_matches, topic_matches) = row[-4:] try: search_fields[message_id] = get_search_fields( rendered_content, topic_name, content_matches, topic_matches ) except UnicodeDecodeError as err: # nocoverage # No coverage for this block since it should be # impossible, and we plan to remove it once we've # debugged the case that makes it happen. raise Exception(str(err), message_id, narrow) message_list = messages_for_ids( message_ids=message_ids, user_message_flags=user_message_flags, search_fields=search_fields, apply_markdown=apply_markdown, client_gravatar=client_gravatar, allow_edit_history=realm.allow_edit_history, ) statsd.incr("loaded_old_messages", len(message_list)) ret = dict( messages=message_list, result="success", msg="", found_anchor=query_info["found_anchor"], found_oldest=query_info["found_oldest"], found_newest=query_info["found_newest"], history_limited=query_info["history_limited"], anchor=anchor, ) return json_success(ret) def limit_query_to_range( query: Select, num_before: int, num_after: int, anchor: int, anchored_to_left: bool, anchored_to_right: bool, id_col: "ColumnElement[int]", first_visible_message_id: int, ) -> FromClause: """ This code is actually generic enough that we could move it to a library, but our only caller for now is message search. """ need_before_query = (not anchored_to_left) and (num_before > 0) need_after_query = (not anchored_to_right) and (num_after > 0) need_both_sides = need_before_query and need_after_query # The semantics of our flags are as follows: # # num_after = number of rows < anchor # num_after = number of rows > anchor # # But we also want the row where id == anchor (if it exists), # and we don't want to union up to 3 queries. So in some cases # we do things like `after_limit = num_after + 1` to grab the # anchor row in the "after" query. # # Note that in some cases, if the anchor row isn't found, we # actually may fetch an extra row at one of the extremes. if need_both_sides: before_anchor = anchor - 1 after_anchor = max(anchor, first_visible_message_id) before_limit = num_before after_limit = num_after + 1 elif need_before_query: before_anchor = anchor before_limit = num_before if not anchored_to_right: before_limit += 1 elif need_after_query: after_anchor = max(anchor, first_visible_message_id) after_limit = num_after + 1 if need_before_query: before_query = query if not anchored_to_right: before_query = before_query.where(id_col <= before_anchor) before_query = before_query.order_by(id_col.desc()) before_query = before_query.limit(before_limit) if need_after_query: after_query = query if not anchored_to_left: after_query = after_query.where(id_col >= after_anchor) after_query = after_query.order_by(id_col.asc()) after_query = after_query.limit(after_limit) if need_both_sides: return union_all(before_query.self_group(), after_query.self_group()) elif need_before_query: return before_query elif need_after_query: return after_query else: # If we don't have either a before_query or after_query, it's because # some combination of num_before/num_after/anchor are zero or # use_first_unread_anchor logic found no unread messages. # # The most likely reason is somebody is doing an id search, so searching # for something like `message_id = 42` is exactly what we want. In other # cases, which could possibly be buggy API clients, at least we will # return at most one row here. return query.where(id_col == anchor) def post_process_limited_query( rows: Sequence[Union[RowProxy, Sequence[Any]]], num_before: int, num_after: int, anchor: int, anchored_to_left: bool, anchored_to_right: bool, first_visible_message_id: int, ) -> Dict[str, Any]: # Our queries may have fetched extra rows if they added # "headroom" to the limits, but we want to truncate those # rows. # # Also, in cases where we had non-zero values of num_before or # num_after, we want to know found_oldest and found_newest, so # that the clients will know that they got complete results. if first_visible_message_id > 0: visible_rows: Sequence[Union[RowProxy, Sequence[Any]]] = [ r for r in rows if r[0] >= first_visible_message_id ] else: visible_rows = rows rows_limited = len(visible_rows) != len(rows) if anchored_to_right: num_after = 0 before_rows = visible_rows[:] anchor_rows = [] after_rows = [] else: before_rows = [r for r in visible_rows if r[0] < anchor] anchor_rows = [r for r in visible_rows if r[0] == anchor] after_rows = [r for r in visible_rows if r[0] > anchor] if num_before: before_rows = before_rows[-1 * num_before :] if num_after: after_rows = after_rows[:num_after] visible_rows = [*before_rows, *anchor_rows, *after_rows] found_anchor = len(anchor_rows) == 1 found_oldest = anchored_to_left or (len(before_rows) < num_before) found_newest = anchored_to_right or (len(after_rows) < num_after) # BUG: history_limited is incorrect False in the event that we had # to bump `anchor` up due to first_visible_message_id, and there # were actually older messages. This may be a rare event in the # context where history_limited is relevant, because it can only # happen in one-sided queries with no num_before (see tests tagged # BUG in PostProcessTest for examples), and we don't generally do # those from the UI, so this might be OK for now. # # The correct fix for this probably involves e.g. making a # `before_query` when we increase `anchor` just to confirm whether # messages were hidden. history_limited = rows_limited and found_oldest return dict( rows=visible_rows, found_anchor=found_anchor, found_newest=found_newest, found_oldest=found_oldest, history_limited=history_limited, ) @has_request_variables def messages_in_narrow_backend( request: HttpRequest, user_profile: UserProfile, msg_ids: List[int] = REQ(json_validator=check_list(check_int)), narrow: OptionalNarrowListT = REQ(converter=narrow_parameter), ) -> HttpResponse: first_visible_message_id = get_first_visible_message_id(user_profile.realm) msg_ids = [message_id for message_id in msg_ids if message_id >= first_visible_message_id] # This query is limited to messages the user has access to because they # actually received them, as reflected in `zerver_usermessage`. query = select( [column("message_id", Integer), topic_column_sa(), column("rendered_content", Text)], and_( column("user_profile_id", Integer) == literal(user_profile.id), column("message_id", Integer).in_(msg_ids), ), join( table("zerver_usermessage"), table("zerver_message"), literal_column("zerver_usermessage.message_id", Integer) == literal_column("zerver_message.id", Integer), ), ) builder = NarrowBuilder(user_profile, column("message_id", Integer), user_profile.realm) if narrow is not None: for term in narrow: query = builder.add_term(query, term) sa_conn = get_sqlalchemy_connection() query_result = list(sa_conn.execute(query).fetchall()) search_fields = {} for row in query_result: message_id = row["message_id"] topic_name = row[DB_TOPIC_NAME] rendered_content = row["rendered_content"] if "content_matches" in row: content_matches = row["content_matches"] topic_matches = row["topic_matches"] else: content_matches = topic_matches = [] search_fields[str(message_id)] = get_search_fields( rendered_content, topic_name, content_matches, topic_matches, ) return json_success({"messages": search_fields})
39.057227
154
0.64544
import re from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Tuple, Union import orjson from django.conf import settings from django.contrib.auth.models import AnonymousUser from django.core.exceptions import ValidationError from django.db import connection from django.http import HttpRequest, HttpResponse from django.utils.html import escape as escape_html from django.utils.translation import gettext as _ from sqlalchemy import func from sqlalchemy.dialects import postgresql from sqlalchemy.engine import Connection, RowProxy from sqlalchemy.sql import ( ClauseElement, ColumnElement, FromClause, Select, alias, and_, column, join, literal, literal_column, not_, or_, select, table, union_all, ) from sqlalchemy.types import Boolean, Integer, Text from zerver.context_processors import get_valid_realm_from_request from zerver.decorator import REQ, has_request_variables from zerver.lib.actions import recipient_for_user_profiles from zerver.lib.addressee import get_user_profiles, get_user_profiles_by_ids from zerver.lib.exceptions import ErrorCode, JsonableError, MissingAuthenticationError from zerver.lib.message import get_first_visible_message_id, messages_for_ids from zerver.lib.narrow import is_web_public_compatible, is_web_public_narrow from zerver.lib.response import json_error, json_success from zerver.lib.sqlalchemy_utils import get_sqlalchemy_connection from zerver.lib.streams import ( can_access_stream_history_by_id, can_access_stream_history_by_name, get_public_streams_queryset, get_stream_by_narrow_operand_access_unchecked, get_web_public_streams_queryset, ) from zerver.lib.topic import DB_TOPIC_NAME, MATCH_TOPIC, topic_column_sa, topic_match_sa from zerver.lib.topic_mutes import exclude_topic_mutes from zerver.lib.types import Validator from zerver.lib.utils import statsd from zerver.lib.validator import ( check_bool, check_dict, check_int, check_list, check_required_string, check_string, check_string_or_int, check_string_or_int_list, to_non_negative_int, ) from zerver.models import ( Realm, Recipient, Stream, Subscription, UserMessage, UserProfile, get_active_streams, get_user_by_id_in_realm_including_cross_realm, get_user_including_cross_realm, ) LARGER_THAN_MAX_MESSAGE_ID = 10000000000000000 MAX_MESSAGES_PER_FETCH = 5000 class BadNarrowOperator(JsonableError): code = ErrorCode.BAD_NARROW data_fields = ["desc"] def __init__(self, desc: str) -> None: self.desc: str = desc @staticmethod def msg_format() -> str: return _("Invalid narrow operator: {desc}") ConditionTransform = Callable[[ClauseElement], ClauseElement] OptionalNarrowListT = Optional[List[Dict[str, Any]]] TS_START = "<ts-match>" TS_STOP = "</ts-match>" def ts_locs_array( config: "ColumnElement[str]", text: "ColumnElement[str]", tsquery: "ColumnElement[object]", ) -> "ColumnElement[List[List[int]]]": options = f"HighlightAll = TRUE, StartSel = {TS_START}, StopSel = {TS_STOP}" delimited = func.ts_headline(config, text, tsquery, options) parts = func.unnest(func.string_to_array(delimited, TS_START)).alias() part = column(parts.name, Text) part_len = func.length(part) - len(TS_STOP) match_pos = func.sum(part_len).over(rows=(None, -1)) + len(TS_STOP) match_len = func.strpos(part, TS_STOP) - 1 ret = func.array( select( [ postgresql.array([match_pos, match_len]), .offset(1) .as_scalar(), ) return ret class NarrowBuilder: # # That is, the `add_term` method, and its helpers the `by_*` methods, # are passed a Select object representing a query for messages; they may # call some methods on it, and then they return a resulting Select # object. Things these methods may do to the queries they handle # include # * add conditions to filter out rows (i.e., messages), with `query.where` # * add columns for more information on the same message, with `query.column` # * add a join for more information on the same message # # Things they may not do include # * anything that would pull in additional rows, or information on # other messages. def __init__( self, user_profile: Optional[UserProfile], msg_id_column: "ColumnElement[int]", realm: Realm, is_web_public_query: bool = False, ) -> None: self.user_profile = user_profile self.msg_id_column = msg_id_column self.realm = realm self.is_web_public_query = is_web_public_query def add_term(self, query: Select, term: Dict[str, Any]) -> Select: # To maintain the security property, we hold all the `by_*` # methods to the same criterion. See the class's block comment # by name! The prefix 'by_' prevents it from colliding with builtin # Python __magic__ stuff. operator = term["operator"] operand = term["operand"] negated = term.get("negated", False) method_name = "by_" + operator.replace("-", "_") method = getattr(self, method_name, None) if method is None: raise BadNarrowOperator("unknown operator " + operator) if negated: maybe_negate = not_ else: maybe_negate = lambda cond: cond return method(query, operand, maybe_negate) def by_has(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: if operand not in ["attachment", "image", "link"]: raise BadNarrowOperator("unknown 'has' operand " + operand) col_name = "has_" + operand cond = column(col_name, Boolean) return query.where(maybe_negate(cond)) def by_in(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: # This operator does not support is_web_public_query. assert not self.is_web_public_query assert self.user_profile is not None if operand == "home": conditions = exclude_muting_conditions(self.user_profile, []) return query.where(and_(*conditions)) elif operand == "all": return query raise BadNarrowOperator("unknown 'in' operand " + operand) def by_is(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: # This operator class does not support is_web_public_query. assert not self.is_web_public_query assert self.user_profile is not None if operand == "private": cond = column("flags", Integer).op("&")(UserMessage.flags.is_private.mask) != 0 return query.where(maybe_negate(cond)) elif operand == "starred": cond = column("flags", Integer).op("&")(UserMessage.flags.starred.mask) != 0 return query.where(maybe_negate(cond)) elif operand == "unread": cond = column("flags", Integer).op("&")(UserMessage.flags.read.mask) == 0 return query.where(maybe_negate(cond)) elif operand == "mentioned": cond1 = column("flags", Integer).op("&")(UserMessage.flags.mentioned.mask) != 0 cond2 = column("flags", Integer).op("&")(UserMessage.flags.wildcard_mentioned.mask) != 0 cond = or_(cond1, cond2) return query.where(maybe_negate(cond)) elif operand == "alerted": cond = column("flags", Integer).op("&")(UserMessage.flags.has_alert_word.mask) != 0 return query.where(maybe_negate(cond)) raise BadNarrowOperator("unknown 'is' operand " + operand) _alphanum = frozenset("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789") def _pg_re_escape(self, pattern: str) -> str: s = list(pattern) for i, c in enumerate(s): if c not in self._alphanum: if ord(c) >= 128: # convert the character to hex PostgreSQL regex will take # \uXXXX s[i] = f"\\u{ord(c):0>4x}" else: s[i] = "\\" + c return "".join(s) def by_stream( self, query: Select, operand: Union[str, int], maybe_negate: ConditionTransform ) -> Select: try: # Because you can see your own message history for # private streams you are no longer subscribed to, we # need get_stream_by_narrow_operand_access_unchecked here. stream = get_stream_by_narrow_operand_access_unchecked(operand, self.realm) if self.is_web_public_query and not stream.is_web_public: raise BadNarrowOperator("unknown web-public stream " + str(operand)) except Stream.DoesNotExist: raise BadNarrowOperator("unknown stream " + str(operand)) if self.realm.is_zephyr_mirror_realm: # MIT users expect narrowing to "social" to also show messages to # /^(un)*social(.d)*$/ (unsocial, ununsocial, social.d, ...). # In `ok_to_include_history`, we assume that a non-negated # `stream` term for a public stream will limit the query to # that specific stream. So it would be a bug to hit this # codepath after relying on this term there. But all streams in # a Zephyr realm are private, so that doesn't happen. assert not stream.is_public() m = re.search(r"^(?:un)*(.+?)(?:\.d)*$", stream.name, re.IGNORECASE) assert m is not None base_stream_name = m.group(1) matching_streams = get_active_streams(self.realm).filter( name__iregex=fr"^(un)*{self._pg_re_escape(base_stream_name)}(\.d)*$" ) recipient_ids = [matching_stream.recipient_id for matching_stream in matching_streams] cond = column("recipient_id", Integer).in_(recipient_ids) return query.where(maybe_negate(cond)) recipient = stream.recipient cond = column("recipient_id", Integer) == recipient.id return query.where(maybe_negate(cond)) def by_streams(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: if operand == "public": recipient_queryset = get_public_streams_queryset(self.realm) elif operand == "web-public": recipient_queryset = get_web_public_streams_queryset(self.realm) else: raise BadNarrowOperator("unknown streams operand " + operand) recipient_ids = recipient_queryset.values_list("recipient_id", flat=True).order_by("id") cond = column("recipient_id", Integer).in_(recipient_ids) return query.where(maybe_negate(cond)) def by_topic(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: if self.realm.is_zephyr_mirror_realm: m = re.search(r"^(.*?)(?:\.d)*$", operand, re.IGNORECASE) assert m is not None base_topic = m.group(1) if base_topic in ("", "personal", '(instance "")'): cond: ClauseElement = or_( topic_match_sa(""), topic_match_sa(".d"), topic_match_sa(".d.d"), topic_match_sa(".d.d.d"), topic_match_sa(".d.d.d.d"), topic_match_sa("personal"), topic_match_sa("personal.d"), topic_match_sa("personal.d.d"), topic_match_sa("personal.d.d.d"), topic_match_sa("personal.d.d.d.d"), topic_match_sa('(instance "")'), topic_match_sa('(instance "").d'), topic_match_sa('(instance "").d.d'), topic_match_sa('(instance "").d.d.d'), topic_match_sa('(instance "").d.d.d.d'), ) else: cond = or_( topic_match_sa(base_topic), topic_match_sa(base_topic + ".d"), topic_match_sa(base_topic + ".d.d"), topic_match_sa(base_topic + ".d.d.d"), topic_match_sa(base_topic + ".d.d.d.d"), ) return query.where(maybe_negate(cond)) cond = topic_match_sa(operand) return query.where(maybe_negate(cond)) def by_sender( self, query: Select, operand: Union[str, int], maybe_negate: ConditionTransform ) -> Select: try: if isinstance(operand, str): sender = get_user_including_cross_realm(operand, self.realm) else: sender = get_user_by_id_in_realm_including_cross_realm(operand, self.realm) except UserProfile.DoesNotExist: raise BadNarrowOperator("unknown user " + str(operand)) cond = column("sender_id", Integer) == literal(sender.id) return query.where(maybe_negate(cond)) def by_near(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: return query def by_id( self, query: Select, operand: Union[int, str], maybe_negate: ConditionTransform ) -> Select: if not str(operand).isdigit(): raise BadNarrowOperator("Invalid message ID") cond = self.msg_id_column == literal(operand) return query.where(maybe_negate(cond)) def by_pm_with( self, query: Select, operand: Union[str, Iterable[int]], maybe_negate: ConditionTransform ) -> Select: assert not self.is_web_public_query assert self.user_profile is not None try: if isinstance(operand, str): email_list = operand.split(",") user_profiles = get_user_profiles( emails=email_list, realm=self.realm, ) else: """ This is where we handle passing a list of user IDs for the narrow, which is the preferred/cleaner API. """ user_profiles = get_user_profiles_by_ids( user_ids=operand, realm=self.realm, ) recipient = recipient_for_user_profiles( user_profiles=user_profiles, forwarded_mirror_message=False, forwarder_user_profile=None, sender=self.user_profile, allow_deactivated=True, ) except (JsonableError, ValidationError): raise BadNarrowOperator("unknown user in " + str(operand)) if recipient.type == Recipient.HUDDLE: cond = column("recipient_id", Integer) == recipient.id return query.where(maybe_negate(cond)) other_participant = None for user in user_profiles: if user.id != self.user_profile.id: other_participant = user if other_participant: self_recipient_id = self.user_profile.recipient_id cond = or_( and_( column("sender_id", Integer) == other_participant.id, column("recipient_id", Integer) == self_recipient_id, ), and_( column("sender_id", Integer) == self.user_profile.id, column("recipient_id", Integer) == recipient.id, ), ) return query.where(maybe_negate(cond)) cond = and_( column("sender_id", Integer) == self.user_profile.id, column("recipient_id", Integer) == recipient.id, ) return query.where(maybe_negate(cond)) def by_group_pm_with( self, query: Select, operand: Union[str, int], maybe_negate: ConditionTransform ) -> Select: assert not self.is_web_public_query assert self.user_profile is not None try: if isinstance(operand, str): narrow_profile = get_user_including_cross_realm(operand, self.realm) else: narrow_profile = get_user_by_id_in_realm_including_cross_realm(operand, self.realm) except UserProfile.DoesNotExist: raise BadNarrowOperator("unknown user " + str(operand)) self_recipient_ids = [ recipient_tuple["recipient_id"] for recipient_tuple in Subscription.objects.filter( user_profile=self.user_profile, recipient__type=Recipient.HUDDLE, ).values("recipient_id") ] narrow_recipient_ids = [ recipient_tuple["recipient_id"] for recipient_tuple in Subscription.objects.filter( user_profile=narrow_profile, recipient__type=Recipient.HUDDLE, ).values("recipient_id") ] recipient_ids = set(self_recipient_ids) & set(narrow_recipient_ids) cond = column("recipient_id", Integer).in_(recipient_ids) return query.where(maybe_negate(cond)) def by_search(self, query: Select, operand: str, maybe_negate: ConditionTransform) -> Select: if settings.USING_PGROONGA: return self._by_search_pgroonga(query, operand, maybe_negate) else: return self._by_search_tsearch(query, operand, maybe_negate) def _by_search_pgroonga( self, query: Select, operand: str, maybe_negate: ConditionTransform ) -> Select: match_positions_character = func.pgroonga_match_positions_character query_extract_keywords = func.pgroonga_query_extract_keywords operand_escaped = func.escape_html(operand) keywords = query_extract_keywords(operand_escaped) query = query.column( match_positions_character(column("rendered_content", Text), keywords).label( "content_matches" ) ) query = query.column( match_positions_character(func.escape_html(topic_column_sa()), keywords).label( "topic_matches" ) ) condition = column("search_pgroonga").op("&@~")(operand_escaped) return query.where(maybe_negate(condition)) def _by_search_tsearch( self, query: Select, operand: str, maybe_negate: ConditionTransform ) -> Select: tsquery = func.plainto_tsquery(literal("zulip.english_us_search"), literal(operand)) query = query.column( ts_locs_array( literal("zulip.english_us_search"), column("rendered_content", Text), tsquery ).label("content_matches") ) query = query.column( ts_locs_array( literal("zulip.english_us_search"), func.escape_html(topic_column_sa()), tsquery ).label("topic_matches") ) # mechanism in PostgreSQL for term in re.findall(r'"[^"]+"|\S+', operand): if term[0] == '"' and term[-1] == '"': term = term[1:-1] term = "%" + connection.ops.prep_for_like_query(term) + "%" cond = or_(column("content", Text).ilike(term), topic_column_sa().ilike(term)) query = query.where(maybe_negate(cond)) cond = column("search_tsvector", postgresql.TSVECTOR).op("@@")(tsquery) return query.where(maybe_negate(cond)) def highlight_string(text: str, locs: Iterable[Tuple[int, int]]) -> str: highlight_start = '<span class="highlight">' highlight_stop = "</span>" pos = 0 result = "" in_tag = False for loc in locs: (offset, length) = loc prefix_start = pos prefix_end = offset match_start = offset match_end = offset + length prefix = text[prefix_start:prefix_end] match = text[match_start:match_end] for character in prefix + match: if character == "<": in_tag = True elif character == ">": in_tag = False if in_tag: result += prefix result += match else: result += prefix result += highlight_start result += match result += highlight_stop pos = match_end result += text[pos:] return result def get_search_fields( rendered_content: str, topic_name: str, content_matches: Iterable[Tuple[int, int]], topic_matches: Iterable[Tuple[int, int]], ) -> Dict[str, str]: return { "match_content": highlight_string(rendered_content, content_matches), MATCH_TOPIC: highlight_string(escape_html(topic_name), topic_matches), } def narrow_parameter(json: str) -> OptionalNarrowListT: data = orjson.loads(json) if not isinstance(data, list): raise ValueError("argument is not a list") if len(data) == 0: # The "empty narrow" should be None, and not [] return None def convert_term(elem: Union[Dict[str, Any], List[str]]) -> Dict[str, Any]: # We have to support a legacy tuple format. if isinstance(elem, list): if len(elem) != 2 or any(not isinstance(x, str) for x in elem): raise ValueError("element is not a string pair") return dict(operator=elem[0], operand=elem[1]) if isinstance(elem, dict): # Make sure to sync this list to frontend also when adding a new operator. # that supports user IDs. Relevant code is located in static/js/message_fetch.js # in handle_operators_supporting_id_based_api function where you will need to update # operators_supporting_id, or operators_supporting_ids array. operators_supporting_id = ["sender", "group-pm-with", "stream"] operators_supporting_ids = ["pm-with"] operators_non_empty_operand = {"search"} operator = elem.get("operator", "") if operator in operators_supporting_id: operand_validator: Validator[object] = check_string_or_int elif operator in operators_supporting_ids: operand_validator = check_string_or_int_list elif operator in operators_non_empty_operand: operand_validator = check_required_string else: operand_validator = check_string validator = check_dict( required_keys=[ ("operator", check_string), ("operand", operand_validator), ], optional_keys=[ ("negated", check_bool), ], ) try: validator("elem", elem) except ValidationError as error: raise JsonableError(error.message) # whitelist the fields we care about for now return dict( operator=elem["operator"], operand=elem["operand"], negated=elem.get("negated", False), ) raise ValueError("element is not a dictionary") return list(map(convert_term, data)) def ok_to_include_history( narrow: OptionalNarrowListT, user_profile: Optional[UserProfile], is_web_public_query: bool ) -> bool: # There are occasions where we need to find Message rows that # have no corresponding UserMessage row, because the user is # reading a public stream that might include messages that # were sent while the user was not subscribed, but which they are # allowed to see. We have to be very careful about constructing # queries in those situations, so this function should return True # only if we are 100% sure that we're gonna add a clause to the # query that narrows to a particular public stream on the user's realm. # If we screw this up, then we can get into a nasty situation of # polluting our narrow results with messages from other realms. # For web-public queries, we are always returning history. The # analogues of the below stream access checks for whether streams # have is_web_public set and banning is operators in this code # path are done directly in NarrowBuilder. if is_web_public_query: assert user_profile is None return True assert user_profile is not None include_history = False if narrow is not None: for term in narrow: if term["operator"] == "stream" and not term.get("negated", False): operand: Union[str, int] = term["operand"] if isinstance(operand, str): include_history = can_access_stream_history_by_name(user_profile, operand) else: include_history = can_access_stream_history_by_id(user_profile, operand) elif ( term["operator"] == "streams" and term["operand"] == "public" and not term.get("negated", False) and user_profile.can_access_public_streams() ): include_history = True # Disable historical messages if the user is narrowing on anything # that's a property on the UserMessage table. There cannot be # historical messages in these cases anyway. for term in narrow: if term["operator"] == "is": include_history = False return include_history def get_stream_from_narrow_access_unchecked( narrow: OptionalNarrowListT, realm: Realm ) -> Optional[Stream]: if narrow is not None: for term in narrow: if term["operator"] == "stream": return get_stream_by_narrow_operand_access_unchecked(term["operand"], realm) return None def exclude_muting_conditions( user_profile: UserProfile, narrow: OptionalNarrowListT ) -> List[ClauseElement]: conditions = [] stream_id = None try: # Note: It is okay here to not check access to stream # because we are only using the stream id to exclude data, # not to include results. stream = get_stream_from_narrow_access_unchecked(narrow, user_profile.realm) if stream is not None: stream_id = stream.id except Stream.DoesNotExist: pass # Stream-level muting only applies when looking at views that # include multiple streams, since we do want users to be able to # browser messages within a muted stream. if stream_id is None: rows = Subscription.objects.filter( user_profile=user_profile, active=True, is_muted=True, recipient__type=Recipient.STREAM, ).values("recipient_id") muted_recipient_ids = [row["recipient_id"] for row in rows] if len(muted_recipient_ids) > 0: # Only add the condition if we have muted streams to simplify/avoid warnings. condition = not_(column("recipient_id", Integer).in_(muted_recipient_ids)) conditions.append(condition) conditions = exclude_topic_mutes(conditions, user_profile, stream_id) # Muted user logic for hiding messages is implemented entirely # client-side. This is by design, as it allows UI to hint that # muted messages exist where their absence might make conversation # difficult to understand. As a result, we do not need to consider # muted users in this server-side logic for returning messages to # clients. (We could in theory exclude PMs from muted users, but # they're likely to be sufficiently rare to not be worth extra # logic/testing here). return conditions def get_base_query_for_search( user_profile: Optional[UserProfile], need_message: bool, need_user_message: bool ) -> Tuple[Select, "ColumnElement[int]"]: # Handle the simple case where user_message isn't involved first. if not need_user_message: assert need_message query = select([column("id", Integer).label("message_id")], None, table("zerver_message")) inner_msg_id_col: ColumnElement[int] inner_msg_id_col = literal_column("zerver_message.id", Integer) # type: ignore[assignment] # https://github.com/dropbox/sqlalchemy-stubs/pull/189 return (query, inner_msg_id_col) assert user_profile is not None if need_message: query = select( [column("message_id"), column("flags", Integer)], column("user_profile_id") == literal(user_profile.id), join( table("zerver_usermessage"), table("zerver_message"), literal_column("zerver_usermessage.message_id", Integer) == literal_column("zerver_message.id", Integer), ), ) inner_msg_id_col = column("message_id", Integer) return (query, inner_msg_id_col) query = select( [column("message_id"), column("flags", Integer)], column("user_profile_id") == literal(user_profile.id), table("zerver_usermessage"), ) inner_msg_id_col = column("message_id", Integer) return (query, inner_msg_id_col) def add_narrow_conditions( user_profile: Optional[UserProfile], inner_msg_id_col: "ColumnElement[int]", query: Select, narrow: OptionalNarrowListT, is_web_public_query: bool, realm: Realm, ) -> Tuple[Select, bool]: is_search = False # for now if narrow is None: return (query, is_search) # Build the query for the narrow builder = NarrowBuilder(user_profile, inner_msg_id_col, realm, is_web_public_query) search_operands = [] # As we loop through terms, builder does most of the work to extend # our query, but we need to collect the search operands and handle # them after the loop. for term in narrow: if term["operator"] == "search": search_operands.append(term["operand"]) else: query = builder.add_term(query, term) if search_operands: is_search = True query = query.column(topic_column_sa()).column(column("rendered_content", Text)) search_term = dict( operator="search", operand=" ".join(search_operands), ) query = builder.add_term(query, search_term) return (query, is_search) def find_first_unread_anchor( sa_conn: Connection, user_profile: Optional[UserProfile], narrow: OptionalNarrowListT ) -> int: # For anonymous web users, all messages are treated as read, and so # always return LARGER_THAN_MAX_MESSAGE_ID. if user_profile is None: return LARGER_THAN_MAX_MESSAGE_ID # We always need UserMessage in our query, because it has the unread # flag for the user. need_user_message = True # Because we will need to call exclude_muting_conditions, unless # the user hasn't muted anything, we will need to include Message # in our query. It may be worth eventually adding an optimization # for the case of a user who hasn't muted anything to avoid the # join in that case, but it's low priority. need_message = True query, inner_msg_id_col = get_base_query_for_search( user_profile=user_profile, need_message=need_message, need_user_message=need_user_message, ) query, is_search = add_narrow_conditions( user_profile=user_profile, inner_msg_id_col=inner_msg_id_col, query=query, narrow=narrow, is_web_public_query=False, realm=user_profile.realm, ) condition = column("flags", Integer).op("&")(UserMessage.flags.read.mask) == 0 # We exclude messages on muted topics when finding the first unread # message in this narrow muting_conditions = exclude_muting_conditions(user_profile, narrow) if muting_conditions: condition = and_(condition, *muting_conditions) first_unread_query = query.where(condition) first_unread_query = first_unread_query.order_by(inner_msg_id_col.asc()).limit(1) first_unread_result = list(sa_conn.execute(first_unread_query).fetchall()) if len(first_unread_result) > 0: anchor = first_unread_result[0][0] else: anchor = LARGER_THAN_MAX_MESSAGE_ID return anchor def parse_anchor_value(anchor_val: Optional[str], use_first_unread_anchor: bool) -> Optional[int]: if use_first_unread_anchor: # Backwards-compatibility: Before we added support for the # special string-typed anchor values, clients would pass # anchor=None and use_first_unread_anchor=True to indicate # what is now expressed as anchor="first_unread". return None if anchor_val is None: # Throw an exception if neither an anchor argument not # use_first_unread_anchor was specified. raise JsonableError(_("Missing 'anchor' argument.")) if anchor_val == "oldest": return 0 if anchor_val == "newest": return LARGER_THAN_MAX_MESSAGE_ID if anchor_val == "first_unread": return None try: # We don't use `.isnumeric()` to support negative numbers for # anchor. We don't recommend it in the API (if you want the # very first message, use 0 or 1), but it used to be supported # and was used by the web app, so we need to continue # supporting it for backwards-compatibility anchor = int(anchor_val) if anchor < 0: return 0 elif anchor > LARGER_THAN_MAX_MESSAGE_ID: return LARGER_THAN_MAX_MESSAGE_ID return anchor except ValueError: raise JsonableError(_("Invalid anchor")) @has_request_variables def get_messages_backend( request: HttpRequest, maybe_user_profile: Union[UserProfile, AnonymousUser], anchor_val: Optional[str] = REQ("anchor", default=None), num_before: int = REQ(converter=to_non_negative_int), num_after: int = REQ(converter=to_non_negative_int), narrow: OptionalNarrowListT = REQ("narrow", converter=narrow_parameter, default=None), use_first_unread_anchor_val: bool = REQ( "use_first_unread_anchor", json_validator=check_bool, default=False ), client_gravatar: bool = REQ(json_validator=check_bool, default=False), apply_markdown: bool = REQ(json_validator=check_bool, default=True), ) -> HttpResponse: anchor = parse_anchor_value(anchor_val, use_first_unread_anchor_val) if num_before + num_after > MAX_MESSAGES_PER_FETCH: return json_error( _("Too many messages requested (maximum {}).").format( MAX_MESSAGES_PER_FETCH, ) ) if not maybe_user_profile.is_authenticated: # If user is not authenticated, clients must include # `streams:web-public` in their narrow query to indicate this # is a web-public query. This helps differentiate between # cases of web-public queries (where we should return the # web-public results only) and clients with buggy # authentication code (where we should return an auth error). if not is_web_public_narrow(narrow): raise MissingAuthenticationError() assert narrow is not None if not is_web_public_compatible(narrow): raise MissingAuthenticationError() realm = get_valid_realm_from_request(request) # We use None to indicate unauthenticated requests as it's more # readable than using AnonymousUser, and the lack of Django # stubs means that mypy can't check AnonymousUser well. user_profile: Optional[UserProfile] = None is_web_public_query = True else: assert isinstance(maybe_user_profile, UserProfile) user_profile = maybe_user_profile assert user_profile is not None realm = user_profile.realm is_web_public_query = False assert realm is not None if ( is_web_public_query or realm.email_address_visibility != Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE ): # If email addresses are only available to administrators, # clients cannot compute gravatars, so we force-set it to false. client_gravatar = False include_history = ok_to_include_history(narrow, user_profile, is_web_public_query) if include_history: # The initial query in this case doesn't use `zerver_usermessage`, # and isn't yet limited to messages the user is entitled to see! # # This is OK only because we've made sure this is a narrow that # will cause us to limit the query appropriately elsewhere. # See `ok_to_include_history` for details. # # Note that is_web_public_query=True goes here, since # include_history is semantically correct for is_web_public_query. need_message = True need_user_message = False elif narrow is None: # We need to limit to messages the user has received, but we don't actually # need any fields from Message need_message = False need_user_message = True else: need_message = True need_user_message = True query: FromClause query, inner_msg_id_col = get_base_query_for_search( user_profile=user_profile, need_message=need_message, need_user_message=need_user_message, ) query, is_search = add_narrow_conditions( user_profile=user_profile, inner_msg_id_col=inner_msg_id_col, query=query, narrow=narrow, realm=realm, is_web_public_query=is_web_public_query, ) if narrow is not None: # Add some metadata to our logging data for narrows verbose_operators = [] for term in narrow: if term["operator"] == "is": verbose_operators.append("is:" + term["operand"]) else: verbose_operators.append(term["operator"]) request._log_data["extra"] = "[{}]".format(",".join(verbose_operators)) sa_conn = get_sqlalchemy_connection() if anchor is None: # `anchor=None` corresponds to the anchor="first_unread" parameter. anchor = find_first_unread_anchor( sa_conn, user_profile, narrow, ) anchored_to_left = anchor == 0 # Set value that will be used to short circuit the after_query # altogether and avoid needless conditions in the before_query. anchored_to_right = anchor >= LARGER_THAN_MAX_MESSAGE_ID if anchored_to_right: num_after = 0 first_visible_message_id = get_first_visible_message_id(realm) query = limit_query_to_range( query=query, num_before=num_before, num_after=num_after, anchor=anchor, anchored_to_left=anchored_to_left, anchored_to_right=anchored_to_right, id_col=inner_msg_id_col, first_visible_message_id=first_visible_message_id, ) main_query = alias(query) query = select(main_query.c, None, main_query).order_by(column("message_id", Integer).asc()) # This is a hack to tag the query we use for testing query = query.prefix_with("/* get_messages */") rows = list(sa_conn.execute(query).fetchall()) query_info = post_process_limited_query( rows=rows, num_before=num_before, num_after=num_after, anchor=anchor, anchored_to_left=anchored_to_left, anchored_to_right=anchored_to_right, first_visible_message_id=first_visible_message_id, ) rows = query_info["rows"] # The following is a little messy, but ensures that the code paths # are similar regardless of the value of include_history. The # 'user_messages' dictionary maps each message to the user's # UserMessage object for that message, which we will attach to the # rendered message dict before returning it. We attempt to # bulk-fetch rendered message dicts from remote cache using the # 'messages' list. message_ids: List[int] = [] user_message_flags: Dict[int, List[str]] = {} if is_web_public_query: # For web-public users, we treat all historical messages as read. for row in rows: message_id = row[0] message_ids.append(message_id) user_message_flags[message_id] = ["read"] elif include_history: assert user_profile is not None message_ids = [row[0] for row in rows] # TODO: This could be done with an outer join instead of two queries um_rows = UserMessage.objects.filter(user_profile=user_profile, message_id__in=message_ids) user_message_flags = {um.message_id: um.flags_list() for um in um_rows} for message_id in message_ids: if message_id not in user_message_flags: user_message_flags[message_id] = ["read", "historical"] else: for row in rows: message_id = row[0] flags = row[1] user_message_flags[message_id] = UserMessage.flags_list_for_flags(flags) message_ids.append(message_id) search_fields: Dict[int, Dict[str, str]] = {} if is_search: for row in rows: message_id = row[0] (topic_name, rendered_content, content_matches, topic_matches) = row[-4:] try: search_fields[message_id] = get_search_fields( rendered_content, topic_name, content_matches, topic_matches ) except UnicodeDecodeError as err: # nocoverage # No coverage for this block since it should be # impossible, and we plan to remove it once we've # debugged the case that makes it happen. raise Exception(str(err), message_id, narrow) message_list = messages_for_ids( message_ids=message_ids, user_message_flags=user_message_flags, search_fields=search_fields, apply_markdown=apply_markdown, client_gravatar=client_gravatar, allow_edit_history=realm.allow_edit_history, ) statsd.incr("loaded_old_messages", len(message_list)) ret = dict( messages=message_list, result="success", msg="", found_anchor=query_info["found_anchor"], found_oldest=query_info["found_oldest"], found_newest=query_info["found_newest"], history_limited=query_info["history_limited"], anchor=anchor, ) return json_success(ret) def limit_query_to_range( query: Select, num_before: int, num_after: int, anchor: int, anchored_to_left: bool, anchored_to_right: bool, id_col: "ColumnElement[int]", first_visible_message_id: int, ) -> FromClause: need_before_query = (not anchored_to_left) and (num_before > 0) need_after_query = (not anchored_to_right) and (num_after > 0) need_both_sides = need_before_query and need_after_query # The semantics of our flags are as follows: # # num_after = number of rows < anchor # num_after = number of rows > anchor # # But we also want the row where id == anchor (if it exists), # and we don't want to union up to 3 queries. So in some cases # we do things like `after_limit = num_after + 1` to grab the # anchor row in the "after" query. # # Note that in some cases, if the anchor row isn't found, we # actually may fetch an extra row at one of the extremes. if need_both_sides: before_anchor = anchor - 1 after_anchor = max(anchor, first_visible_message_id) before_limit = num_before after_limit = num_after + 1 elif need_before_query: before_anchor = anchor before_limit = num_before if not anchored_to_right: before_limit += 1 elif need_after_query: after_anchor = max(anchor, first_visible_message_id) after_limit = num_after + 1 if need_before_query: before_query = query if not anchored_to_right: before_query = before_query.where(id_col <= before_anchor) before_query = before_query.order_by(id_col.desc()) before_query = before_query.limit(before_limit) if need_after_query: after_query = query if not anchored_to_left: after_query = after_query.where(id_col >= after_anchor) after_query = after_query.order_by(id_col.asc()) after_query = after_query.limit(after_limit) if need_both_sides: return union_all(before_query.self_group(), after_query.self_group()) elif need_before_query: return before_query elif need_after_query: return after_query else: # If we don't have either a before_query or after_query, it's because # some combination of num_before/num_after/anchor are zero or # use_first_unread_anchor logic found no unread messages. # # The most likely reason is somebody is doing an id search, so searching # for something like `message_id = 42` is exactly what we want. In other # cases, which could possibly be buggy API clients, at least we will # return at most one row here. return query.where(id_col == anchor) def post_process_limited_query( rows: Sequence[Union[RowProxy, Sequence[Any]]], num_before: int, num_after: int, anchor: int, anchored_to_left: bool, anchored_to_right: bool, first_visible_message_id: int, ) -> Dict[str, Any]: # Our queries may have fetched extra rows if they added # "headroom" to the limits, but we want to truncate those # rows. # # Also, in cases where we had non-zero values of num_before or # num_after, we want to know found_oldest and found_newest, so # that the clients will know that they got complete results. if first_visible_message_id > 0: visible_rows: Sequence[Union[RowProxy, Sequence[Any]]] = [ r for r in rows if r[0] >= first_visible_message_id ] else: visible_rows = rows rows_limited = len(visible_rows) != len(rows) if anchored_to_right: num_after = 0 before_rows = visible_rows[:] anchor_rows = [] after_rows = [] else: before_rows = [r for r in visible_rows if r[0] < anchor] anchor_rows = [r for r in visible_rows if r[0] == anchor] after_rows = [r for r in visible_rows if r[0] > anchor] if num_before: before_rows = before_rows[-1 * num_before :] if num_after: after_rows = after_rows[:num_after] visible_rows = [*before_rows, *anchor_rows, *after_rows] found_anchor = len(anchor_rows) == 1 found_oldest = anchored_to_left or (len(before_rows) < num_before) found_newest = anchored_to_right or (len(after_rows) < num_after) # BUG: history_limited is incorrect False in the event that we had # to bump `anchor` up due to first_visible_message_id, and there # were actually older messages. This may be a rare event in the # context where history_limited is relevant, because it can only # happen in one-sided queries with no num_before (see tests tagged # BUG in PostProcessTest for examples), and we don't generally do # those from the UI, so this might be OK for now. # # The correct fix for this probably involves e.g. making a # `before_query` when we increase `anchor` just to confirm whether # messages were hidden. history_limited = rows_limited and found_oldest return dict( rows=visible_rows, found_anchor=found_anchor, found_newest=found_newest, found_oldest=found_oldest, history_limited=history_limited, ) @has_request_variables def messages_in_narrow_backend( request: HttpRequest, user_profile: UserProfile, msg_ids: List[int] = REQ(json_validator=check_list(check_int)), narrow: OptionalNarrowListT = REQ(converter=narrow_parameter), ) -> HttpResponse: first_visible_message_id = get_first_visible_message_id(user_profile.realm) msg_ids = [message_id for message_id in msg_ids if message_id >= first_visible_message_id] # This query is limited to messages the user has access to because they # actually received them, as reflected in `zerver_usermessage`. query = select( [column("message_id", Integer), topic_column_sa(), column("rendered_content", Text)], and_( column("user_profile_id", Integer) == literal(user_profile.id), column("message_id", Integer).in_(msg_ids), ), join( table("zerver_usermessage"), table("zerver_message"), literal_column("zerver_usermessage.message_id", Integer) == literal_column("zerver_message.id", Integer), ), ) builder = NarrowBuilder(user_profile, column("message_id", Integer), user_profile.realm) if narrow is not None: for term in narrow: query = builder.add_term(query, term) sa_conn = get_sqlalchemy_connection() query_result = list(sa_conn.execute(query).fetchall()) search_fields = {} for row in query_result: message_id = row["message_id"] topic_name = row[DB_TOPIC_NAME] rendered_content = row["rendered_content"] if "content_matches" in row: content_matches = row["content_matches"] topic_matches = row["topic_matches"] else: content_matches = topic_matches = [] search_fields[str(message_id)] = get_search_fields( rendered_content, topic_name, content_matches, topic_matches, ) return json_success({"messages": search_fields})
true
true
f71ccbfa3508cc2de142272d5a5eb12f86208da2
2,165
py
Python
Packs/FeedCyjax/Integrations/FeedCyjax/test_data/indicators.py
diCagri/content
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
[ "MIT" ]
799
2016-08-02T06:43:14.000Z
2022-03-31T11:10:11.000Z
Packs/FeedCyjax/Integrations/FeedCyjax/test_data/indicators.py
diCagri/content
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
[ "MIT" ]
9,317
2016-08-07T19:00:51.000Z
2022-03-31T21:56:04.000Z
Packs/FeedCyjax/Integrations/FeedCyjax/test_data/indicators.py
diCagri/content
c532c50b213e6dddb8ae6a378d6d09198e08fc9f
[ "MIT" ]
1,297
2016-08-04T13:59:00.000Z
2022-03-31T23:43:06.000Z
mocked_indicators = [ { "type": "URL", "industry_type": [ "IT", "online gaming", "Military" ], "value": "https://test.domainos.com?test=true&id=32423", "handling_condition": "GREEN", "discovered_at": "2020-12-31T14:18:26+0000", "description": "Incident report with some test iocs", "source": "https://website.cyjax.com/report/incident/view?id=68646", "ttp": [ "Remote Access Software", "Download New Code at Runtime", ], "geoip": { "city_name": "Donetsk", "location": { "lon": 37.7759, "lat": 47.9917 }, "country_code2": "UA", "country_name": "Ukraine" }, }, { "type": "FileHash-SHA1", "industry_type": [ "IT", "online gaming", "Military" ], "value": "1f49429f805663702acf221177dd0e99f6ba3f46", "handling_condition": "GREEN", "discovered_at": "2020-12-31T14:18:26+0000", "description": "Incident report with some test iocs", "source": "https://website.cyjax.com/report/incident/view?id=68646" }, { "type": "FileHash-SSDEEP", "industry_type": [ "IT", "online gaming", "Military" ], "value": "3072:Rl0zyy95JFokb1sUUBTHxg1htzj5hZrUrYq2r5HsBnWR0:Rl0Lrh1sBS1hLhZrVh5HsNWO", "handling_condition": "GREEN", "discovered_at": "2020-12-31T14:18:26+0000", "description": "Incident report with some test iocs", "source": "https://website.com/report/incident/view?id=68646" }, { "type": "IPv6", "industry_type": [ "IT", "online gaming", "Military" ], "value": "2001:da8:8000:6300:1c22:6545:295d:f55c", "handling_condition": "GREEN", "discovered_at": "2021-12-31T22:00:32+0000", "description": "Incident report with some test iocs", "source": "https://website.com/report/incident/view?id=68646" }, ]
32.313433
95
0.513164
mocked_indicators = [ { "type": "URL", "industry_type": [ "IT", "online gaming", "Military" ], "value": "https://test.domainos.com?test=true&id=32423", "handling_condition": "GREEN", "discovered_at": "2020-12-31T14:18:26+0000", "description": "Incident report with some test iocs", "source": "https://website.cyjax.com/report/incident/view?id=68646", "ttp": [ "Remote Access Software", "Download New Code at Runtime", ], "geoip": { "city_name": "Donetsk", "location": { "lon": 37.7759, "lat": 47.9917 }, "country_code2": "UA", "country_name": "Ukraine" }, }, { "type": "FileHash-SHA1", "industry_type": [ "IT", "online gaming", "Military" ], "value": "1f49429f805663702acf221177dd0e99f6ba3f46", "handling_condition": "GREEN", "discovered_at": "2020-12-31T14:18:26+0000", "description": "Incident report with some test iocs", "source": "https://website.cyjax.com/report/incident/view?id=68646" }, { "type": "FileHash-SSDEEP", "industry_type": [ "IT", "online gaming", "Military" ], "value": "3072:Rl0zyy95JFokb1sUUBTHxg1htzj5hZrUrYq2r5HsBnWR0:Rl0Lrh1sBS1hLhZrVh5HsNWO", "handling_condition": "GREEN", "discovered_at": "2020-12-31T14:18:26+0000", "description": "Incident report with some test iocs", "source": "https://website.com/report/incident/view?id=68646" }, { "type": "IPv6", "industry_type": [ "IT", "online gaming", "Military" ], "value": "2001:da8:8000:6300:1c22:6545:295d:f55c", "handling_condition": "GREEN", "discovered_at": "2021-12-31T22:00:32+0000", "description": "Incident report with some test iocs", "source": "https://website.com/report/incident/view?id=68646" }, ]
true
true
f71ccc3dfa925bc05b65ff8afe4da56a24f1736f
245
py
Python
Python Programming/06. Classes/01-Classes.py
luckyrabbit85/Python
ed134fd70b4a7b84b183b87b85ad5190f54c9526
[ "MIT" ]
1
2021-07-15T18:40:26.000Z
2021-07-15T18:40:26.000Z
Python Programming/06. Classes/01-Classes.py
luckyrabbit85/Python
ed134fd70b4a7b84b183b87b85ad5190f54c9526
[ "MIT" ]
null
null
null
Python Programming/06. Classes/01-Classes.py
luckyrabbit85/Python
ed134fd70b4a7b84b183b87b85ad5190f54c9526
[ "MIT" ]
null
null
null
# Class: blueprint for creating new objects # Object: instances of a class # Class: Human # Objects: John, Mary, Jack class Point: def draw(self): print("draw") point = Point() print(type(point)) print(isinstance(point, Point))
15.3125
43
0.677551
class Point: def draw(self): print("draw") point = Point() print(type(point)) print(isinstance(point, Point))
true
true
f71cccaab8c8334d17849d7af7fa89ed4b6eaf3b
7,967
py
Python
magnum/tests/unit/api/test_attr_validator.py
MatMaul/magnum
4d5fd80d89e38e98aff24f01b967a57d0adcd191
[ "Apache-2.0" ]
null
null
null
magnum/tests/unit/api/test_attr_validator.py
MatMaul/magnum
4d5fd80d89e38e98aff24f01b967a57d0adcd191
[ "Apache-2.0" ]
null
null
null
magnum/tests/unit/api/test_attr_validator.py
MatMaul/magnum
4d5fd80d89e38e98aff24f01b967a57d0adcd191
[ "Apache-2.0" ]
1
2020-09-09T14:35:08.000Z
2020-09-09T14:35:08.000Z
# Copyright 2015 EasyStack, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from glanceclient import exc as glance_exception import mock from novaclient import exceptions as nova_exc from magnum.api import attr_validator from magnum.common import exception from magnum.tests import base class TestAttrValidator(base.BaseTestCase): def test_validate_flavor_with_vaild_flavor(self): mock_flavor = mock.MagicMock() mock_flavor.name = 'test_flavor' mock_flavor.id = 'test_flavor_id' mock_flavors = [mock_flavor] mock_nova = mock.MagicMock() mock_nova.flavors.list.return_value = mock_flavors mock_os_cli = mock.MagicMock() mock_os_cli.nova.return_value = mock_nova attr_validator.validate_flavor(mock_os_cli, 'test_flavor') self.assertTrue(mock_nova.flavors.list.called) def test_validate_flavor_with_invaild_flavor(self): mock_flavor = mock.MagicMock() mock_flavor.name = 'test_flavor_not_equal' mock_flavor.id = 'test_flavor_id_not_equal' mock_flavors = [mock_flavor] mock_nova = mock.MagicMock() mock_nova.flavors.list.return_value = mock_flavors mock_os_cli = mock.MagicMock() mock_os_cli.nova.return_value = mock_nova self.assertRaises(exception.FlavorNotFound, attr_validator.validate_flavor, mock_os_cli, 'test_flavor') def test_validate_external_network_with_valid_network(self): mock_networks = {'networks': [{'name': 'test_ext_net', 'id': 'test_ext_net_id'}]} mock_neutron = mock.MagicMock() mock_neutron.list_networks.return_value = mock_networks mock_os_cli = mock.MagicMock() mock_os_cli.neutron.return_value = mock_neutron attr_validator.validate_external_network(mock_os_cli, 'test_ext_net') self.assertTrue(mock_neutron.list_networks.called) def test_validate_external_network_with_invalid_network(self): mock_networks = {'networks': [{'name': 'test_ext_net_not_equal', 'id': 'test_ext_net_id_not_equal'}]} mock_neutron = mock.MagicMock() mock_neutron.list_networks.return_value = mock_networks mock_os_cli = mock.MagicMock() mock_os_cli.neutron.return_value = mock_neutron self.assertRaises(exception.NetworkNotFound, attr_validator.validate_external_network, mock_os_cli, 'test_ext_net') def test_validate_keypair_with_valid_keypair(self): mock_keypair = mock.MagicMock() mock_keypair.id = 'test-keypair' mock_nova = mock.MagicMock() mock_nova.keypairs.get.return_value = mock_keypair mock_os_cli = mock.MagicMock() mock_os_cli.nova.return_value = mock_nova attr_validator.validate_keypair(mock_os_cli, 'test-keypair') def test_validate_keypair_with_invalid_keypair(self): mock_nova = mock.MagicMock() mock_nova.keypairs.get.side_effect = nova_exc.NotFound('test-keypair') mock_os_cli = mock.MagicMock() mock_os_cli.nova.return_value = mock_nova self.assertRaises(exception.KeyPairNotFound, attr_validator.validate_keypair, mock_os_cli, 'test_keypair') @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_valid_image_by_name(self, mock_os_res): mock_image = {'name': 'fedora-21-atomic-5', 'id': 'e33f0988-1730-405e-8401-30cbc8535302', 'os_distro': 'fedora-atomic'} mock_os_res.return_value = mock_image mock_os_cli = mock.MagicMock() attr_validator.validate_image(mock_os_cli, 'fedora-21-atomic-5') self.assertTrue(mock_os_res.called) @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_valid_image_by_id(self, mock_os_res): mock_image = {'name': 'fedora-21-atomic-5', 'id': 'e33f0988-1730-405e-8401-30cbc8535302', 'os_distro': 'fedora-atomic'} mock_os_res.return_value = mock_image mock_os_cli = mock.MagicMock() attr_validator.validate_image(mock_os_cli, 'e33f0988-1730-405e-8401-30cbc8535302') self.assertTrue(mock_os_res.called) @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_nonexist_image_by_name(self, mock_os_res): mock_os_res.side_effect = exception.ResourceNotFound mock_os_cli = mock.MagicMock() self.assertRaises(exception.ImageNotFound, attr_validator.validate_image, mock_os_cli, 'fedora-21-atomic-5') @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_nonexist_image_by_id(self, mock_os_res): mock_os_res.side_effect = glance_exception.NotFound mock_os_cli = mock.MagicMock() self.assertRaises(exception.ImageNotFound, attr_validator.validate_image, mock_os_cli, 'fedora-21-atomic-5') @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_multi_images_same_name(self, mock_os_res): mock_os_res.side_effect = exception.Conflict mock_os_cli = mock.MagicMock() self.assertRaises(exception.Conflict, attr_validator.validate_image, mock_os_cli, 'fedora-21-atomic-5') @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_without_os_distro(self, mock_os_res): mock_image = {'name': 'fedora-21-atomic-5', 'id': 'e33f0988-1730-405e-8401-30cbc8535302'} mock_os_res.return_value = mock_image mock_os_cli = mock.MagicMock() self.assertRaises(exception.OSDistroFieldNotFound, attr_validator.validate_image, mock_os_cli, 'fedora-21-atomic-5') @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_empty_os_distro(self, mock_os_res): mock_image = {'name': 'fedora-21-atomic-5', 'id': 'e33f0988-1730-405e-8401-30cbc8535302', 'os_distro': ''} mock_os_res.return_value = mock_image mock_os_cli = mock.MagicMock() self.assertRaises(exception.OSDistroFieldNotFound, attr_validator.validate_image, mock_os_cli, 'fedora-21-atomic-5') @mock.patch('magnum.common.clients.OpenStackClients') def test_validate_os_resources_with_invalid_flavor(self, mock_os_cli): mock_baymodel = {'flavor_id': 'test_flavor'} mock_flavor = mock.MagicMock() mock_flavor.name = 'test_flavor_not_equal' mock_flavor.id = 'test_flavor_id_not_equal' mock_flavors = [mock_flavor] mock_nova = mock.MagicMock() mock_nova.flavors.list.return_value = mock_flavors mock_os_cli.nova.return_value = mock_nova mock_context = mock.MagicMock() self.assertRaises(exception.FlavorNotFound, attr_validator.validate_os_resources, mock_context, mock_baymodel)
46.319767
78
0.667629
from glanceclient import exc as glance_exception import mock from novaclient import exceptions as nova_exc from magnum.api import attr_validator from magnum.common import exception from magnum.tests import base class TestAttrValidator(base.BaseTestCase): def test_validate_flavor_with_vaild_flavor(self): mock_flavor = mock.MagicMock() mock_flavor.name = 'test_flavor' mock_flavor.id = 'test_flavor_id' mock_flavors = [mock_flavor] mock_nova = mock.MagicMock() mock_nova.flavors.list.return_value = mock_flavors mock_os_cli = mock.MagicMock() mock_os_cli.nova.return_value = mock_nova attr_validator.validate_flavor(mock_os_cli, 'test_flavor') self.assertTrue(mock_nova.flavors.list.called) def test_validate_flavor_with_invaild_flavor(self): mock_flavor = mock.MagicMock() mock_flavor.name = 'test_flavor_not_equal' mock_flavor.id = 'test_flavor_id_not_equal' mock_flavors = [mock_flavor] mock_nova = mock.MagicMock() mock_nova.flavors.list.return_value = mock_flavors mock_os_cli = mock.MagicMock() mock_os_cli.nova.return_value = mock_nova self.assertRaises(exception.FlavorNotFound, attr_validator.validate_flavor, mock_os_cli, 'test_flavor') def test_validate_external_network_with_valid_network(self): mock_networks = {'networks': [{'name': 'test_ext_net', 'id': 'test_ext_net_id'}]} mock_neutron = mock.MagicMock() mock_neutron.list_networks.return_value = mock_networks mock_os_cli = mock.MagicMock() mock_os_cli.neutron.return_value = mock_neutron attr_validator.validate_external_network(mock_os_cli, 'test_ext_net') self.assertTrue(mock_neutron.list_networks.called) def test_validate_external_network_with_invalid_network(self): mock_networks = {'networks': [{'name': 'test_ext_net_not_equal', 'id': 'test_ext_net_id_not_equal'}]} mock_neutron = mock.MagicMock() mock_neutron.list_networks.return_value = mock_networks mock_os_cli = mock.MagicMock() mock_os_cli.neutron.return_value = mock_neutron self.assertRaises(exception.NetworkNotFound, attr_validator.validate_external_network, mock_os_cli, 'test_ext_net') def test_validate_keypair_with_valid_keypair(self): mock_keypair = mock.MagicMock() mock_keypair.id = 'test-keypair' mock_nova = mock.MagicMock() mock_nova.keypairs.get.return_value = mock_keypair mock_os_cli = mock.MagicMock() mock_os_cli.nova.return_value = mock_nova attr_validator.validate_keypair(mock_os_cli, 'test-keypair') def test_validate_keypair_with_invalid_keypair(self): mock_nova = mock.MagicMock() mock_nova.keypairs.get.side_effect = nova_exc.NotFound('test-keypair') mock_os_cli = mock.MagicMock() mock_os_cli.nova.return_value = mock_nova self.assertRaises(exception.KeyPairNotFound, attr_validator.validate_keypair, mock_os_cli, 'test_keypair') @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_valid_image_by_name(self, mock_os_res): mock_image = {'name': 'fedora-21-atomic-5', 'id': 'e33f0988-1730-405e-8401-30cbc8535302', 'os_distro': 'fedora-atomic'} mock_os_res.return_value = mock_image mock_os_cli = mock.MagicMock() attr_validator.validate_image(mock_os_cli, 'fedora-21-atomic-5') self.assertTrue(mock_os_res.called) @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_valid_image_by_id(self, mock_os_res): mock_image = {'name': 'fedora-21-atomic-5', 'id': 'e33f0988-1730-405e-8401-30cbc8535302', 'os_distro': 'fedora-atomic'} mock_os_res.return_value = mock_image mock_os_cli = mock.MagicMock() attr_validator.validate_image(mock_os_cli, 'e33f0988-1730-405e-8401-30cbc8535302') self.assertTrue(mock_os_res.called) @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_nonexist_image_by_name(self, mock_os_res): mock_os_res.side_effect = exception.ResourceNotFound mock_os_cli = mock.MagicMock() self.assertRaises(exception.ImageNotFound, attr_validator.validate_image, mock_os_cli, 'fedora-21-atomic-5') @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_nonexist_image_by_id(self, mock_os_res): mock_os_res.side_effect = glance_exception.NotFound mock_os_cli = mock.MagicMock() self.assertRaises(exception.ImageNotFound, attr_validator.validate_image, mock_os_cli, 'fedora-21-atomic-5') @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_multi_images_same_name(self, mock_os_res): mock_os_res.side_effect = exception.Conflict mock_os_cli = mock.MagicMock() self.assertRaises(exception.Conflict, attr_validator.validate_image, mock_os_cli, 'fedora-21-atomic-5') @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_without_os_distro(self, mock_os_res): mock_image = {'name': 'fedora-21-atomic-5', 'id': 'e33f0988-1730-405e-8401-30cbc8535302'} mock_os_res.return_value = mock_image mock_os_cli = mock.MagicMock() self.assertRaises(exception.OSDistroFieldNotFound, attr_validator.validate_image, mock_os_cli, 'fedora-21-atomic-5') @mock.patch('magnum.api.utils.get_openstack_resource') def test_validate_image_with_empty_os_distro(self, mock_os_res): mock_image = {'name': 'fedora-21-atomic-5', 'id': 'e33f0988-1730-405e-8401-30cbc8535302', 'os_distro': ''} mock_os_res.return_value = mock_image mock_os_cli = mock.MagicMock() self.assertRaises(exception.OSDistroFieldNotFound, attr_validator.validate_image, mock_os_cli, 'fedora-21-atomic-5') @mock.patch('magnum.common.clients.OpenStackClients') def test_validate_os_resources_with_invalid_flavor(self, mock_os_cli): mock_baymodel = {'flavor_id': 'test_flavor'} mock_flavor = mock.MagicMock() mock_flavor.name = 'test_flavor_not_equal' mock_flavor.id = 'test_flavor_id_not_equal' mock_flavors = [mock_flavor] mock_nova = mock.MagicMock() mock_nova.flavors.list.return_value = mock_flavors mock_os_cli.nova.return_value = mock_nova mock_context = mock.MagicMock() self.assertRaises(exception.FlavorNotFound, attr_validator.validate_os_resources, mock_context, mock_baymodel)
true
true
f71ccd96515f76c1d80e0d8132600385ef3f08bf
3,311
py
Python
huaweicloud-sdk-osm/huaweicloudsdkosm/v2/model/list_case_labels_response.py
huaweicloud/huaweicloud-sdk-python-v3
7a6270390fcbf192b3882bf763e7016e6026ef78
[ "Apache-2.0" ]
64
2020-06-12T07:05:07.000Z
2022-03-30T03:32:50.000Z
huaweicloud-sdk-osm/huaweicloudsdkosm/v2/model/list_case_labels_response.py
huaweicloud/huaweicloud-sdk-python-v3
7a6270390fcbf192b3882bf763e7016e6026ef78
[ "Apache-2.0" ]
11
2020-07-06T07:56:54.000Z
2022-01-11T11:14:40.000Z
huaweicloud-sdk-osm/huaweicloudsdkosm/v2/model/list_case_labels_response.py
huaweicloud/huaweicloud-sdk-python-v3
7a6270390fcbf192b3882bf763e7016e6026ef78
[ "Apache-2.0" ]
24
2020-06-08T11:42:13.000Z
2022-03-04T06:44:08.000Z
# coding: utf-8 import re import six from huaweicloudsdkcore.sdk_response import SdkResponse from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization class ListCaseLabelsResponse(SdkResponse): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ sensitive_list = [] openapi_types = { 'case_label_list': 'list[CaseLabelInfo]' } attribute_map = { 'case_label_list': 'case_label_list' } def __init__(self, case_label_list=None): """ListCaseLabelsResponse - a model defined in huaweicloud sdk""" super(ListCaseLabelsResponse, self).__init__() self._case_label_list = None self.discriminator = None if case_label_list is not None: self.case_label_list = case_label_list @property def case_label_list(self): """Gets the case_label_list of this ListCaseLabelsResponse. 工单关联的标签列表 :return: The case_label_list of this ListCaseLabelsResponse. :rtype: list[CaseLabelInfo] """ return self._case_label_list @case_label_list.setter def case_label_list(self, case_label_list): """Sets the case_label_list of this ListCaseLabelsResponse. 工单关联的标签列表 :param case_label_list: The case_label_list of this ListCaseLabelsResponse. :type: list[CaseLabelInfo] """ self._case_label_list = case_label_list def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: if attr in self.sensitive_list: result[attr] = "****" else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" import simplejson as json if six.PY2: import sys reload(sys) sys.setdefaultencoding("utf-8") return json.dumps(sanitize_for_serialization(self), ensure_ascii=False) def __repr__(self): """For `print`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, ListCaseLabelsResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
28.791304
83
0.583812
import re import six from huaweicloudsdkcore.sdk_response import SdkResponse from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization class ListCaseLabelsResponse(SdkResponse): sensitive_list = [] openapi_types = { 'case_label_list': 'list[CaseLabelInfo]' } attribute_map = { 'case_label_list': 'case_label_list' } def __init__(self, case_label_list=None): super(ListCaseLabelsResponse, self).__init__() self._case_label_list = None self.discriminator = None if case_label_list is not None: self.case_label_list = case_label_list @property def case_label_list(self): return self._case_label_list @case_label_list.setter def case_label_list(self, case_label_list): self._case_label_list = case_label_list def to_dict(self): result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: if attr in self.sensitive_list: result[attr] = "****" else: result[attr] = value return result def to_str(self): import simplejson as json if six.PY2: import sys reload(sys) sys.setdefaultencoding("utf-8") return json.dumps(sanitize_for_serialization(self), ensure_ascii=False) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, ListCaseLabelsResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): return not self == other
true
true
f71cce801e6baa714b9a75a9300c23ab0372565e
21,083
py
Python
deepfence_backend/tasks/task_scheduler.py
Jramirezg/ThreatMapper
af5fda3ff585f8728a7a0b48ae6818ed189e4dbf
[ "Apache-2.0" ]
null
null
null
deepfence_backend/tasks/task_scheduler.py
Jramirezg/ThreatMapper
af5fda3ff585f8728a7a0b48ae6818ed189e4dbf
[ "Apache-2.0" ]
null
null
null
deepfence_backend/tasks/task_scheduler.py
Jramirezg/ThreatMapper
af5fda3ff585f8728a7a0b48ae6818ed189e4dbf
[ "Apache-2.0" ]
null
null
null
import arrow from config.app import celery_app, app from models.container_image_registry import RegistryCredential from models.scheduler import Scheduler from models.setting import Setting from croniter import croniter from utils import constants import time from datetime import datetime from utils.helper import websocketio_channel_name_format, get_image_cve_status from config.redisconfig import redis from utils.esconn import ESConn from resource_models.node import Node from utils.reports import prepare_report_download, prepare_report_email_body from utils.response import set_response from flask import make_response import json import uuid from copy import deepcopy from utils.helper import get_all_scanned_node, get_all_scanned_images import pandas as pd import re @celery_app.task def task_scheduler(): with app.app_context(): curr_time = arrow.now(tz="+00:00").datetime.replace(minute=0, second=0, microsecond=0) scheduled_tasks = Scheduler.query.filter_by(is_enabled=True).all() if not scheduled_tasks: return for scheduled_task in scheduled_tasks: if croniter.match(scheduled_task.cron_expr, curr_time): run_node_task(scheduled_task.action, scheduled_task.nodes, scheduled_task.id, scheduled_task.cron_expr) def run_node_task(action, node_action_details, scheduler_id=None, cron_expr=None): with app.app_context(): curr_time = arrow.now(tz="+00:00").datetime if scheduler_id: try: scheduled_task = Scheduler.query.get(scheduler_id) scheduled_task.last_ran_at = curr_time scheduled_task.status = "running" scheduled_task.save() except Exception as ex: app.logger.error(ex) return def save_scheduled_task_status(status): if scheduler_id: try: scheduled_task = Scheduler.query.get(scheduler_id) scheduled_task.status = status scheduled_task.save() except Exception as ex: app.logger.error(ex) save_scheduled_task_status("In Progress") node_type = node_action_details["node_type"] df_id_to_scope_id_map = {} topology_data_df_format = {} registry_credential = None if node_type == constants.NODE_TYPE_REGISTRY_IMAGE: try: registry_credential = RegistryCredential.query.get( node_action_details["registry_images"]["registry_id"]) except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) return else: if not node_action_details.get("node_id_list"): node_action_details["node_id_list"] = [] for i in range(3): try: redis_pipe = redis.pipeline() redis_pipe.hgetall(constants.DF_ID_TO_SCOPE_ID_REDIS_KEY_PREFIX + node_type.upper()) redis_pipe.get(websocketio_channel_name_format(node_type + "?format=deepfence")[1]) redis_resp = redis_pipe.execute() df_id_to_scope_id_map = redis_resp[0] if redis_resp[1]: topology_data_df_format = json.loads(redis_resp[1]) if topology_data_df_format and df_id_to_scope_id_map: break else: app.logger.error("topology data is empty, retrying") time.sleep(10) except Exception as ex: app.logger.error(ex) time.sleep(10) if action in [constants.NODE_ACTION_CVE_SCAN_START, constants.NODE_ACTION_SCHEDULE_CVE_SCAN]: if node_type == constants.NODE_TYPE_REGISTRY_IMAGE: from config.app import celery_app redis_lock_keys = [] redis_pipe = redis.pipeline() image_list_details_str = redis.get("{0}:{1}".format(constants.REGISTRY_IMAGES_CACHE_KEY_PREFIX, node_action_details["registry_images"][ "registry_id"])) if image_list_details_str: if node_action_details["registry_images"].get("all_registry_images", False): image_dict = json.loads(image_list_details_str) image_df = pd.DataFrame(image_dict['image_list']) image_df['timestamp'] = pd.to_datetime(image_df.pushed_at) sorted_df = image_df.sort_values(by=['timestamp'], ascending=False) df_unique_list = sorted_df["image_tag"].unique() df_unique = pd.DataFrame(data=df_unique_list, columns=["image_tag"]) sorted_df_by_image_tag = image_df.sort_values("image_tag") images_by_tags = df_unique.merge(sorted_df_by_image_tag, on=["image_tag"], how="outer")[ "image_name_with_tag"] node_action_details["registry_images"]["image_name_with_tag_list"] = images_by_tags elif node_action_details["registry_images"].get("only_new_images", False): image_dict = json.loads(image_list_details_str) all_registry_images = set([image["image_name_with_tag"] for image in image_dict['image_list']]) if cron_expr: pattern = '^0.*?\*/(\d).*?$' match = re.search(pattern, cron_expr) if match: days_interval = int(match.group(1)) else: days_interval = 1 images_need_to_be_scanned = all_registry_images - get_all_scanned_images(days_interval) node_action_details["registry_images"]["image_name_with_tag_list"] = list( images_need_to_be_scanned) elif node_action_details["registry_images"].get("registry_scan_type", None) == "latest_timestamp": image_dict = json.loads(image_list_details_str) image_df = pd.DataFrame(image_dict['image_list']) image_df['timestamp'] = pd.to_datetime(image_df.pushed_at) grouped = image_df.groupby(['image_name']).agg({"timestamp": max}).reset_index() latest_images_by_tags = image_df.merge(grouped, on=["image_name", "timestamp"], how="inner")[ 'image_name_with_tag'] node_action_details["registry_images"]["image_name_with_tag_list"] = latest_images_by_tags elif node_action_details["registry_images"].get("registry_scan_type", None) == "image_tags": if node_action_details["registry_images"].get("image_tags", []): image_tags = node_action_details["registry_images"].get("image_tags", []) image_dict = json.loads(image_list_details_str) image_df = pd.DataFrame(image_dict['image_list']) images_by_tags = image_df[image_df["image_tag"].isin(image_tags)]["image_name_with_tag"] node_action_details["registry_images"]["image_name_with_tag_list"] = images_by_tags else: node_action_details["registry_images"]["image_name_with_tag_list"] = [] for image_name_with_tag in node_action_details["registry_images"]["image_name_with_tag_list"]: lock_key = "{0}:{1}".format(constants.NODE_ACTION_CVE_SCAN_START, image_name_with_tag) redis_pipe.incr(lock_key) redis_lock_keys.append(lock_key) redis_resp = redis_pipe.execute() time.sleep(1) image_cve_status = get_image_cve_status() for i, image_name_with_tag in enumerate( node_action_details["registry_images"]["image_name_with_tag_list"]): try: if redis_resp[i] != 1: continue cve_status = image_cve_status.get(image_name_with_tag, {}).get("action", "") if cve_status: if cve_status == constants.CVE_SCAN_STATUS_QUEUED or cve_status in constants.CVE_SCAN_RUNNING_STATUS: continue datetime_now = datetime.now() scan_id = image_name_with_tag + "_" + datetime_now.strftime("%Y-%m-%dT%H:%M:%S") + ".000" body = { "masked": "false", "type": constants.CVE_SCAN_LOGS_INDEX, "scan_id": scan_id, "host": "", "@timestamp": datetime_now.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), "cve_scan_message": "", "action": constants.CVE_SCAN_STATUS_QUEUED, "host_name": "", "node_id": image_name_with_tag, "time_stamp": int(time.time() * 1000.0), "node_type": constants.NODE_TYPE_CONTAINER_IMAGE } ESConn.create_doc(constants.CVE_SCAN_LOGS_INDEX, body) scan_details = { "cve_node_id": image_name_with_tag, "scan_types": node_action_details["scan_type"], "registry_type": registry_credential.registry_type, "scan_id": scan_id, "credential_id": registry_credential.id} celery_task_id = "cve_scan:" + scan_id if node_action_details["registry_images"].get("priority", False): celery_app.send_task('tasks.vulnerability_scan_worker.vulnerability_scan', args=(), task_id=celery_task_id, kwargs={"scan_details": scan_details}, queue=constants.VULNERABILITY_SCAN_PRIORITY_QUEUE) else: celery_app.send_task('tasks.vulnerability_scan_worker.vulnerability_scan', args=(), task_id=celery_task_id, kwargs={"scan_details": scan_details}, queue=constants.VULNERABILITY_SCAN_QUEUE) except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) time.sleep(2) redis_pipe = redis.pipeline() for lock_key in redis_lock_keys: redis.delete(lock_key) redis_pipe.execute() else: node_list = [] redis_lock_keys = [] redis_pipe = redis.pipeline() for node_id in node_action_details["node_id_list"]: try: node = Node(node_id, df_id_to_scope_id_map=df_id_to_scope_id_map, topology_data_df_format=topology_data_df_format) if node.type == constants.NODE_TYPE_HOST: lock_key = "{0}:{1}".format(constants.NODE_ACTION_CVE_SCAN_START, node.host_name) else: if not node.image_name_tag: continue lock_key = "{0}:{1}".format(constants.NODE_ACTION_CVE_SCAN_START, node.image_name_tag) if lock_key in redis_lock_keys: # If same image, different container, already selected, don't scan again continue redis_lock_keys.append(lock_key) redis_pipe.incr(lock_key) node_list.append(node) except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) if not node_list: error_message = "No node available for scan" save_scheduled_task_status("Error: " + error_message) app.logger.error(error_message) return redis_resp = redis_pipe.execute() for i, node in enumerate(node_list): if redis_resp[i] != 1: continue try: node.cve_scan_start(node_action_details["scan_type"], priority=node_action_details.get("priority", False)) except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) time.sleep(1) redis_pipe = redis.pipeline() for lock_key in redis_lock_keys: redis.delete(lock_key) redis_pipe.execute() elif action == constants.NODE_ACTION_CVE_SCAN_STOP: if node_type == constants.NODE_TYPE_REGISTRY_IMAGE: from config.app import celery_app if node_action_details["registry_images"].get("all_registry_images", False): image_list_details_str = redis.get("{0}:{1}".format(constants.REGISTRY_IMAGES_CACHE_KEY_PREFIX, node_action_details["registry_images"][ "registry_id"])) image_dict = json.loads(image_list_details_str) node_action_details["registry_images"]["image_name_with_tag_list"] = [image["image_name_with_tag"] for image in image_dict['image_list']] for image_name_with_tag in node_action_details["registry_images"]["image_name_with_tag_list"]: try: es_response = ESConn.search_by_and_clause(constants.CVE_SCAN_LOGS_INDEX, {"node_id": image_name_with_tag}, 0, size=1) latest_cve_scan_doc = {} cve_scan_list = es_response.get("hits", []) if cve_scan_list: cve_scan_doc = cve_scan_list[0] latest_cve_scan_doc = cve_scan_doc.get('_source', {}) latest_cve_scan_doc.update({'_id': cve_scan_doc.get('_id', "")}) if latest_cve_scan_doc: status = latest_cve_scan_doc.get("action", "") scan_id = latest_cve_scan_doc.get("scan_id", "") if (status in constants.CVE_SCAN_NOT_RUNNING_STATUS) or (not scan_id): continue elif status != constants.CVE_SCAN_STATUS_QUEUED: continue celery_task_id = "cve_scan:" + scan_id celery_app.control.revoke(celery_task_id, terminate=False) body = { "masked": "false", "type": constants.CVE_SCAN_LOGS_INDEX, "scan_id": scan_id, "cve_scan_message": "Scan stopped by user", "time_stamp": int(time.time() * 1000.0), "@timestamp": datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ"), "host": "", "action": constants.CVE_SCAN_STATUS_STOPPED, "host_name": "", "node_id": latest_cve_scan_doc.get("node_id", ""), "node_type": constants.NODE_TYPE_CONTAINER_IMAGE } ESConn.create_doc(constants.CVE_SCAN_LOGS_INDEX, body) except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) else: for node_id in node_action_details["node_id_list"]: try: node = Node(node_id, df_id_to_scope_id_map=df_id_to_scope_id_map, topology_data_df_format=topology_data_df_format) node.cve_scan_stop() except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) elif action == constants.NODE_ACTION_SCHEDULE_SEND_REPORT: domain_name = "" console_url_setting = Setting.query.filter_by(key="console_url").one_or_none() if console_url_setting and console_url_setting.value: domain_name = console_url_setting.value.get("value") report_id = uuid.uuid4() body = { "type": constants.REPORT_INDEX, "report_id": report_id, "status": "started", "masked": "false", "@timestamp": datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ") } ESConn.create_doc(constants.REPORT_INDEX, body, refresh="wait_for") if node_action_details.get('include_dead_nodes') is True: if node_type == 'host': if len(node_action_details['filters'].get('host_name', [])) == 0: node_action_details['filters']['host_name'] = get_all_scanned_node() from config.app import celery_app celery_app.send_task( 'tasks.common_worker.generate_report', args=(), kwargs={"report_id": report_id, "filters": node_action_details.get("filters", {}), "lucene_query_string": "", "number": node_action_details.get("duration", {}).get("number", 0), "time_unit": node_action_details.get("duration", {}).get("time_unit", "day"), "domain_name": domain_name, "resources": node_action_details.get("resources", {}), "file_type": node_action_details.get("file_type", "xlsx"), "node_type": node_type, "include_dead_nodes": node_action_details.get("include_dead_nodes", False), "report_email": node_action_details["report_email"]}) return set_response(data="Started") elif action == constants.NODE_ACTION_DOWNLOAD_REPORT: domain_name = "" console_url_setting = Setting.query.filter_by(key="console_url").one_or_none() if console_url_setting and console_url_setting.value: domain_name = console_url_setting.value.get("value") report_id = uuid.uuid4() body = { "type": constants.REPORT_INDEX, "report_id": report_id, "status": "started", "masked": "false", "duration": "", "@timestamp": datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ") } ESConn.create_doc(constants.REPORT_INDEX, body, refresh="wait_for") if node_action_details.get('include_dead_nodes') is True: if node_type == 'host': if len(node_action_details['filters'].get('host_name', [])) == 0: node_action_details['filters']['host_name'] = get_all_scanned_node() from config.app import celery_app celery_app.send_task( 'tasks.common_worker.generate_report', args=(), kwargs={"report_id": report_id, "filters": node_action_details.get("filters", {}), "lucene_query_string": "", "number": node_action_details.get("duration", {}).get("number", 0), "time_unit": node_action_details.get("duration", {}).get("time_unit", "d"), "domain_name": domain_name, "resources": node_action_details.get("resources", {}), "file_type": node_action_details.get("file_type", "xlsx"), "node_type": node_type, "include_dead_nodes": node_action_details.get("include_dead_nodes", False), "report_email": ""}) return set_response(data="Started") save_scheduled_task_status("Success")
61.287791
129
0.538206
import arrow from config.app import celery_app, app from models.container_image_registry import RegistryCredential from models.scheduler import Scheduler from models.setting import Setting from croniter import croniter from utils import constants import time from datetime import datetime from utils.helper import websocketio_channel_name_format, get_image_cve_status from config.redisconfig import redis from utils.esconn import ESConn from resource_models.node import Node from utils.reports import prepare_report_download, prepare_report_email_body from utils.response import set_response from flask import make_response import json import uuid from copy import deepcopy from utils.helper import get_all_scanned_node, get_all_scanned_images import pandas as pd import re @celery_app.task def task_scheduler(): with app.app_context(): curr_time = arrow.now(tz="+00:00").datetime.replace(minute=0, second=0, microsecond=0) scheduled_tasks = Scheduler.query.filter_by(is_enabled=True).all() if not scheduled_tasks: return for scheduled_task in scheduled_tasks: if croniter.match(scheduled_task.cron_expr, curr_time): run_node_task(scheduled_task.action, scheduled_task.nodes, scheduled_task.id, scheduled_task.cron_expr) def run_node_task(action, node_action_details, scheduler_id=None, cron_expr=None): with app.app_context(): curr_time = arrow.now(tz="+00:00").datetime if scheduler_id: try: scheduled_task = Scheduler.query.get(scheduler_id) scheduled_task.last_ran_at = curr_time scheduled_task.status = "running" scheduled_task.save() except Exception as ex: app.logger.error(ex) return def save_scheduled_task_status(status): if scheduler_id: try: scheduled_task = Scheduler.query.get(scheduler_id) scheduled_task.status = status scheduled_task.save() except Exception as ex: app.logger.error(ex) save_scheduled_task_status("In Progress") node_type = node_action_details["node_type"] df_id_to_scope_id_map = {} topology_data_df_format = {} registry_credential = None if node_type == constants.NODE_TYPE_REGISTRY_IMAGE: try: registry_credential = RegistryCredential.query.get( node_action_details["registry_images"]["registry_id"]) except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) return else: if not node_action_details.get("node_id_list"): node_action_details["node_id_list"] = [] for i in range(3): try: redis_pipe = redis.pipeline() redis_pipe.hgetall(constants.DF_ID_TO_SCOPE_ID_REDIS_KEY_PREFIX + node_type.upper()) redis_pipe.get(websocketio_channel_name_format(node_type + "?format=deepfence")[1]) redis_resp = redis_pipe.execute() df_id_to_scope_id_map = redis_resp[0] if redis_resp[1]: topology_data_df_format = json.loads(redis_resp[1]) if topology_data_df_format and df_id_to_scope_id_map: break else: app.logger.error("topology data is empty, retrying") time.sleep(10) except Exception as ex: app.logger.error(ex) time.sleep(10) if action in [constants.NODE_ACTION_CVE_SCAN_START, constants.NODE_ACTION_SCHEDULE_CVE_SCAN]: if node_type == constants.NODE_TYPE_REGISTRY_IMAGE: from config.app import celery_app redis_lock_keys = [] redis_pipe = redis.pipeline() image_list_details_str = redis.get("{0}:{1}".format(constants.REGISTRY_IMAGES_CACHE_KEY_PREFIX, node_action_details["registry_images"][ "registry_id"])) if image_list_details_str: if node_action_details["registry_images"].get("all_registry_images", False): image_dict = json.loads(image_list_details_str) image_df = pd.DataFrame(image_dict['image_list']) image_df['timestamp'] = pd.to_datetime(image_df.pushed_at) sorted_df = image_df.sort_values(by=['timestamp'], ascending=False) df_unique_list = sorted_df["image_tag"].unique() df_unique = pd.DataFrame(data=df_unique_list, columns=["image_tag"]) sorted_df_by_image_tag = image_df.sort_values("image_tag") images_by_tags = df_unique.merge(sorted_df_by_image_tag, on=["image_tag"], how="outer")[ "image_name_with_tag"] node_action_details["registry_images"]["image_name_with_tag_list"] = images_by_tags elif node_action_details["registry_images"].get("only_new_images", False): image_dict = json.loads(image_list_details_str) all_registry_images = set([image["image_name_with_tag"] for image in image_dict['image_list']]) if cron_expr: pattern = '^0.*?\*/(\d).*?$' match = re.search(pattern, cron_expr) if match: days_interval = int(match.group(1)) else: days_interval = 1 images_need_to_be_scanned = all_registry_images - get_all_scanned_images(days_interval) node_action_details["registry_images"]["image_name_with_tag_list"] = list( images_need_to_be_scanned) elif node_action_details["registry_images"].get("registry_scan_type", None) == "latest_timestamp": image_dict = json.loads(image_list_details_str) image_df = pd.DataFrame(image_dict['image_list']) image_df['timestamp'] = pd.to_datetime(image_df.pushed_at) grouped = image_df.groupby(['image_name']).agg({"timestamp": max}).reset_index() latest_images_by_tags = image_df.merge(grouped, on=["image_name", "timestamp"], how="inner")[ 'image_name_with_tag'] node_action_details["registry_images"]["image_name_with_tag_list"] = latest_images_by_tags elif node_action_details["registry_images"].get("registry_scan_type", None) == "image_tags": if node_action_details["registry_images"].get("image_tags", []): image_tags = node_action_details["registry_images"].get("image_tags", []) image_dict = json.loads(image_list_details_str) image_df = pd.DataFrame(image_dict['image_list']) images_by_tags = image_df[image_df["image_tag"].isin(image_tags)]["image_name_with_tag"] node_action_details["registry_images"]["image_name_with_tag_list"] = images_by_tags else: node_action_details["registry_images"]["image_name_with_tag_list"] = [] for image_name_with_tag in node_action_details["registry_images"]["image_name_with_tag_list"]: lock_key = "{0}:{1}".format(constants.NODE_ACTION_CVE_SCAN_START, image_name_with_tag) redis_pipe.incr(lock_key) redis_lock_keys.append(lock_key) redis_resp = redis_pipe.execute() time.sleep(1) image_cve_status = get_image_cve_status() for i, image_name_with_tag in enumerate( node_action_details["registry_images"]["image_name_with_tag_list"]): try: if redis_resp[i] != 1: continue cve_status = image_cve_status.get(image_name_with_tag, {}).get("action", "") if cve_status: if cve_status == constants.CVE_SCAN_STATUS_QUEUED or cve_status in constants.CVE_SCAN_RUNNING_STATUS: continue datetime_now = datetime.now() scan_id = image_name_with_tag + "_" + datetime_now.strftime("%Y-%m-%dT%H:%M:%S") + ".000" body = { "masked": "false", "type": constants.CVE_SCAN_LOGS_INDEX, "scan_id": scan_id, "host": "", "@timestamp": datetime_now.strftime("%Y-%m-%dT%H:%M:%S.%fZ"), "cve_scan_message": "", "action": constants.CVE_SCAN_STATUS_QUEUED, "host_name": "", "node_id": image_name_with_tag, "time_stamp": int(time.time() * 1000.0), "node_type": constants.NODE_TYPE_CONTAINER_IMAGE } ESConn.create_doc(constants.CVE_SCAN_LOGS_INDEX, body) scan_details = { "cve_node_id": image_name_with_tag, "scan_types": node_action_details["scan_type"], "registry_type": registry_credential.registry_type, "scan_id": scan_id, "credential_id": registry_credential.id} celery_task_id = "cve_scan:" + scan_id if node_action_details["registry_images"].get("priority", False): celery_app.send_task('tasks.vulnerability_scan_worker.vulnerability_scan', args=(), task_id=celery_task_id, kwargs={"scan_details": scan_details}, queue=constants.VULNERABILITY_SCAN_PRIORITY_QUEUE) else: celery_app.send_task('tasks.vulnerability_scan_worker.vulnerability_scan', args=(), task_id=celery_task_id, kwargs={"scan_details": scan_details}, queue=constants.VULNERABILITY_SCAN_QUEUE) except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) time.sleep(2) redis_pipe = redis.pipeline() for lock_key in redis_lock_keys: redis.delete(lock_key) redis_pipe.execute() else: node_list = [] redis_lock_keys = [] redis_pipe = redis.pipeline() for node_id in node_action_details["node_id_list"]: try: node = Node(node_id, df_id_to_scope_id_map=df_id_to_scope_id_map, topology_data_df_format=topology_data_df_format) if node.type == constants.NODE_TYPE_HOST: lock_key = "{0}:{1}".format(constants.NODE_ACTION_CVE_SCAN_START, node.host_name) else: if not node.image_name_tag: continue lock_key = "{0}:{1}".format(constants.NODE_ACTION_CVE_SCAN_START, node.image_name_tag) if lock_key in redis_lock_keys: continue redis_lock_keys.append(lock_key) redis_pipe.incr(lock_key) node_list.append(node) except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) if not node_list: error_message = "No node available for scan" save_scheduled_task_status("Error: " + error_message) app.logger.error(error_message) return redis_resp = redis_pipe.execute() for i, node in enumerate(node_list): if redis_resp[i] != 1: continue try: node.cve_scan_start(node_action_details["scan_type"], priority=node_action_details.get("priority", False)) except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) time.sleep(1) redis_pipe = redis.pipeline() for lock_key in redis_lock_keys: redis.delete(lock_key) redis_pipe.execute() elif action == constants.NODE_ACTION_CVE_SCAN_STOP: if node_type == constants.NODE_TYPE_REGISTRY_IMAGE: from config.app import celery_app if node_action_details["registry_images"].get("all_registry_images", False): image_list_details_str = redis.get("{0}:{1}".format(constants.REGISTRY_IMAGES_CACHE_KEY_PREFIX, node_action_details["registry_images"][ "registry_id"])) image_dict = json.loads(image_list_details_str) node_action_details["registry_images"]["image_name_with_tag_list"] = [image["image_name_with_tag"] for image in image_dict['image_list']] for image_name_with_tag in node_action_details["registry_images"]["image_name_with_tag_list"]: try: es_response = ESConn.search_by_and_clause(constants.CVE_SCAN_LOGS_INDEX, {"node_id": image_name_with_tag}, 0, size=1) latest_cve_scan_doc = {} cve_scan_list = es_response.get("hits", []) if cve_scan_list: cve_scan_doc = cve_scan_list[0] latest_cve_scan_doc = cve_scan_doc.get('_source', {}) latest_cve_scan_doc.update({'_id': cve_scan_doc.get('_id', "")}) if latest_cve_scan_doc: status = latest_cve_scan_doc.get("action", "") scan_id = latest_cve_scan_doc.get("scan_id", "") if (status in constants.CVE_SCAN_NOT_RUNNING_STATUS) or (not scan_id): continue elif status != constants.CVE_SCAN_STATUS_QUEUED: continue celery_task_id = "cve_scan:" + scan_id celery_app.control.revoke(celery_task_id, terminate=False) body = { "masked": "false", "type": constants.CVE_SCAN_LOGS_INDEX, "scan_id": scan_id, "cve_scan_message": "Scan stopped by user", "time_stamp": int(time.time() * 1000.0), "@timestamp": datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ"), "host": "", "action": constants.CVE_SCAN_STATUS_STOPPED, "host_name": "", "node_id": latest_cve_scan_doc.get("node_id", ""), "node_type": constants.NODE_TYPE_CONTAINER_IMAGE } ESConn.create_doc(constants.CVE_SCAN_LOGS_INDEX, body) except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) else: for node_id in node_action_details["node_id_list"]: try: node = Node(node_id, df_id_to_scope_id_map=df_id_to_scope_id_map, topology_data_df_format=topology_data_df_format) node.cve_scan_stop() except Exception as ex: save_scheduled_task_status("Error: " + str(ex)) app.logger.error(ex) elif action == constants.NODE_ACTION_SCHEDULE_SEND_REPORT: domain_name = "" console_url_setting = Setting.query.filter_by(key="console_url").one_or_none() if console_url_setting and console_url_setting.value: domain_name = console_url_setting.value.get("value") report_id = uuid.uuid4() body = { "type": constants.REPORT_INDEX, "report_id": report_id, "status": "started", "masked": "false", "@timestamp": datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ") } ESConn.create_doc(constants.REPORT_INDEX, body, refresh="wait_for") if node_action_details.get('include_dead_nodes') is True: if node_type == 'host': if len(node_action_details['filters'].get('host_name', [])) == 0: node_action_details['filters']['host_name'] = get_all_scanned_node() from config.app import celery_app celery_app.send_task( 'tasks.common_worker.generate_report', args=(), kwargs={"report_id": report_id, "filters": node_action_details.get("filters", {}), "lucene_query_string": "", "number": node_action_details.get("duration", {}).get("number", 0), "time_unit": node_action_details.get("duration", {}).get("time_unit", "day"), "domain_name": domain_name, "resources": node_action_details.get("resources", {}), "file_type": node_action_details.get("file_type", "xlsx"), "node_type": node_type, "include_dead_nodes": node_action_details.get("include_dead_nodes", False), "report_email": node_action_details["report_email"]}) return set_response(data="Started") elif action == constants.NODE_ACTION_DOWNLOAD_REPORT: domain_name = "" console_url_setting = Setting.query.filter_by(key="console_url").one_or_none() if console_url_setting and console_url_setting.value: domain_name = console_url_setting.value.get("value") report_id = uuid.uuid4() body = { "type": constants.REPORT_INDEX, "report_id": report_id, "status": "started", "masked": "false", "duration": "", "@timestamp": datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ") } ESConn.create_doc(constants.REPORT_INDEX, body, refresh="wait_for") if node_action_details.get('include_dead_nodes') is True: if node_type == 'host': if len(node_action_details['filters'].get('host_name', [])) == 0: node_action_details['filters']['host_name'] = get_all_scanned_node() from config.app import celery_app celery_app.send_task( 'tasks.common_worker.generate_report', args=(), kwargs={"report_id": report_id, "filters": node_action_details.get("filters", {}), "lucene_query_string": "", "number": node_action_details.get("duration", {}).get("number", 0), "time_unit": node_action_details.get("duration", {}).get("time_unit", "d"), "domain_name": domain_name, "resources": node_action_details.get("resources", {}), "file_type": node_action_details.get("file_type", "xlsx"), "node_type": node_type, "include_dead_nodes": node_action_details.get("include_dead_nodes", False), "report_email": ""}) return set_response(data="Started") save_scheduled_task_status("Success")
true
true
f71ccea17261d0989d135fd953f856682a8dd848
1,284
py
Python
python-2-apps/fn_bluecoat_recategorization/setup.py
JayDi11a/Geralds-IBM-SOAR-Integrations
0e0eb18adbaf3a266e1dc5a316df7cd5a93f88d0
[ "MIT" ]
null
null
null
python-2-apps/fn_bluecoat_recategorization/setup.py
JayDi11a/Geralds-IBM-SOAR-Integrations
0e0eb18adbaf3a266e1dc5a316df7cd5a93f88d0
[ "MIT" ]
1
2022-03-06T00:10:13.000Z
2022-03-06T00:10:13.000Z
python-2-apps/fn_bluecoat_recategorization/setup.py
JayDi11a/Geralds-IBM-SOAR-Integrations
0e0eb18adbaf3a266e1dc5a316df7cd5a93f88d0
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages setup( name='fn_bluecoat_recategorization', version='1.0.0', license='<<insert here>>', author='<<your name here>>', author_email='you@example.com', url='<<your company url>>', description="Resilient Circuits Components for 'fn_bluecoat_recategorization'", long_description="Resilient Circuits Components for 'fn_bluecoat_recategorization'", install_requires=[ 'resilient_circuits>=30.0.0' ], packages=find_packages(), include_package_data=True, platforms='any', classifiers=[ 'Programming Language :: Python', ], entry_points={ "resilient.circuits.components": [ "BluecoatSiteReviewRecategorizationFunctionComponent = fn_bluecoat_recategorization.components.bluecoat_site_review_recategorization:FunctionComponent" ], "resilient.circuits.configsection": ["gen_config = fn_bluecoat_recategorization.util.config:config_section_data"], "resilient.circuits.customize": ["customize = fn_bluecoat_recategorization.util.customize:customization_data"], "resilient.circuits.selftest": ["selftest = fn_bluecoat_recategorization.util.selftest:selftest_function"] } )
40.125
163
0.719626
from setuptools import setup, find_packages setup( name='fn_bluecoat_recategorization', version='1.0.0', license='<<insert here>>', author='<<your name here>>', author_email='you@example.com', url='<<your company url>>', description="Resilient Circuits Components for 'fn_bluecoat_recategorization'", long_description="Resilient Circuits Components for 'fn_bluecoat_recategorization'", install_requires=[ 'resilient_circuits>=30.0.0' ], packages=find_packages(), include_package_data=True, platforms='any', classifiers=[ 'Programming Language :: Python', ], entry_points={ "resilient.circuits.components": [ "BluecoatSiteReviewRecategorizationFunctionComponent = fn_bluecoat_recategorization.components.bluecoat_site_review_recategorization:FunctionComponent" ], "resilient.circuits.configsection": ["gen_config = fn_bluecoat_recategorization.util.config:config_section_data"], "resilient.circuits.customize": ["customize = fn_bluecoat_recategorization.util.customize:customization_data"], "resilient.circuits.selftest": ["selftest = fn_bluecoat_recategorization.util.selftest:selftest_function"] } )
true
true
f71ccec79ef122b387b104a8ce307bf488b1bd1c
15,996
py
Python
src/auxil/eeMad_run.py
mortcanty/EESARDocker
855b41a3da19f3b07b42438784309ab48fc7fe98
[ "MIT" ]
23
2018-11-11T02:43:42.000Z
2021-08-21T21:53:19.000Z
src/auxil/eeMad_run.py
mortcanty/EESARDocker
855b41a3da19f3b07b42438784309ab48fc7fe98
[ "MIT" ]
8
2020-03-24T16:25:42.000Z
2021-08-23T20:35:38.000Z
src/auxil/eeMad_run.py
mortcanty/EESARDocker
855b41a3da19f3b07b42438784309ab48fc7fe98
[ "MIT" ]
8
2019-12-20T13:33:47.000Z
2021-10-24T02:18:37.000Z
''' Created on 08.04.2019 @author: mort ipywidget interface to the GEE for IR-MAD ''' import ee, time, warnings, math import ipywidgets as widgets from IPython.display import display from ipyleaflet import (Map,DrawControl,TileLayer, basemaps,basemap_to_tiles, LayersControl, MeasureControl, FullScreenControl) from auxil.eeMad import imad,radcal from geopy.geocoders import photon ee.Initialize() geolocator = photon.Photon(timeout=10) warnings.filterwarnings("ignore", message="numpy.dtype size changed") warnings.filterwarnings("ignore", message="numpy.ufunc size changed") poly = ee.Geometry.MultiPolygon([]) # poly = ee.Geometry.Polygon([[6.30154, 50.948329], [6.293307, 50.877329], # [6.427091, 50.875595], [6.417486, 50.947464], # [6.30154, 50.948329]]) def chi2cdf(chi2,df): ''' Chi square cumulative distribution function ''' return ee.Image(chi2.divide(2)).gammainc(ee.Number(df).divide(2)) def makefeature(data): ''' for exporting as CSV to Drive ''' return ee.Feature(None, {'data': data}) def handle_draw(self, action, geo_json): global poly coords = geo_json['geometry']['coordinates'] if action == 'created': poly = ee.Geometry.MultiPolygon(poly.coordinates().add(coords)) w_preview.disabled = True w_export.disabled = True w_collect.disabled = False elif action == 'deleted': poly1 = ee.Geometry.MultiPolygon(coords) poly = poly.difference(poly1) if len(poly.coordinates().getInfo()) == 0: w_collect.disabled = True dc = DrawControl(polyline={},circle={}) dc.on_draw(handle_draw) # def GetTileLayerUrl(ee_image_object): # map_id = ee.Image(ee_image_object).getMapId() # tile_url_template = "https://earthengine.googleapis.com/map/{mapid}/{{z}}/{{x}}/{{y}}?token={token}" # return tile_url_template.format(**map_id) def GetTileLayerUrl(ee_image_object): map_id = ee.Image(ee_image_object).getMapId() return map_id["tile_fetcher"].url_format w_text = widgets.Textarea( layout = widgets.Layout(width='75%'), value = 'Algorithm output', rows = 4, disabled = False ) w_platform = widgets.RadioButtons( options=['SENTINEL/S2(VNIR/SWIR)','SENTINEL/S2(NIR/SWIR)','LANDSAT LC08','LANDSAT LE07','LANDSAT LT05'], value='SENTINEL/S2(VNIR/SWIR)', description='Platform:', disabled=False ) w_startdate1 = widgets.Text( value='2020-05-01', placeholder=' ', description='Start T1:', disabled=False ) w_enddate1 = widgets.Text( value='2020-07-01', placeholder=' ', description='End T1:', disabled=False ) w_startdate2 = widgets.Text( value='2020-08-01', placeholder=' ', description='Start T2:', disabled=False ) w_enddate2 = widgets.Text( value='2020-10-01', placeholder=' ', description='End T2:', disabled=False ) w_iterations = widgets.IntText( value=30, placeholder=' ', description='Max Iter:', disabled=False ) w_scale = widgets.IntText( value=30, placeholder=' ', description='Scale:', disabled=False ) w_exportname = widgets.Text( value='users/<username>/<path>', placeholder=' ', disabled=False ) w_location = widgets.Text( value='Jülich', placeholder=' ', description='', disabled=False ) w_goto = widgets.Button(description="GoTo",disabled=False) w_collect = widgets.Button(description="Collect",disabled=True) w_preview = widgets.Button(description="Preview",disabled=True) w_export = widgets.Button(description='Export to assets',disabled=True) w_dates1 = widgets.VBox([w_startdate1,w_enddate1,w_iterations]) w_dates2 = widgets.VBox([w_startdate2,w_enddate2,w_scale]) w_dates = widgets.HBox([w_platform,w_dates1,w_dates2]) w_exp = widgets.HBox([w_export,w_exportname]) w_go = widgets.HBox([w_collect,w_preview,w_exp]) w_txt = widgets.HBox([w_text,w_goto,w_location]) box = widgets.VBox([w_txt,w_dates,w_go]) def on_widget_change(b): w_preview.disabled = True w_export.disabled = True w_platform.observe(on_widget_change,names='value') w_startdate1.observe(on_widget_change,names='value') w_enddate1.observe(on_widget_change,names='value') w_startdate2.observe(on_widget_change,names='value') w_enddate2.observe(on_widget_change,names='value') def on_goto_button_clicked(b): try: location = geolocator.geocode(w_location.value) m.center = (location.latitude,location.longitude) m.zoom = 11 except Exception as e: print('Error: %s'%e) w_goto.on_click(on_goto_button_clicked) def on_collect_button_clicked(b): global result,m,collection,count, \ w_startdate1,w_enddate1,w_startdate2, \ w_platfform,w_enddate2,w_changemap, \ scale,nbands, \ image1,image2, \ madnames,coords,timestamp1,timestamp2 try: coords = ee.List(poly.bounds().coordinates().get(0)) w_text.value = 'collecting, please wait ...' cloudcover = 'CLOUD_COVER' scale = 30.0 rgb = ['B4','B5','B7'] if w_platform.value=='SENTINEL/S2(VNIR/SWIR)': collectionid = 'COPERNICUS/S2' scale = 10.0 bands = ['B2','B3','B4','B8'] rgb = ['B8','B4','B3'] cloudcover = 'CLOUDY_PIXEL_PERCENTAGE' elif w_platform.value=='SENTINEL/S2(NIR/SWIR)': collectionid = 'COPERNICUS/S2' scale = 20.0 bands = ['B5','B6','B7','B8A','B11','B12'] rgb = ['B5','B7','B11'] cloudcover = 'CLOUDY_PIXEL_PERCENTAGE' elif w_platform.value=='LANDSAT LC08': collectionid = 'LANDSAT/LC08/C01/T1_RT_TOA' bands = ['B2','B3','B4','B5','B6','B7'] rgb = ['B5','B6','B7'] elif w_platform.value=='LANDSAT LE07': collectionid = 'LANDSAT/LE07/C01/T1_RT_TOA' bands = ['B1','B2','B3','B4','B5','B7'] else: collectionid = 'LANDSAT/LT05/C01/T1_TOA' bands = ['B1','B2','B3','B4','B5','B7'] collection1 = ee.ImageCollection(collectionid) \ .filterBounds(ee.Geometry.Point(coords.get(0))) \ .filterBounds(ee.Geometry.Point(coords.get(1))) \ .filterBounds(ee.Geometry.Point(coords.get(2))) \ .filterBounds(ee.Geometry.Point(coords.get(3))) \ .filterDate(ee.Date(w_startdate1.value), ee.Date(w_enddate1.value)) \ .sort(cloudcover, True) count = collection1.size().getInfo() if count==0: raise ValueError('No images found for first time interval: '+collectionid) collection2 = ee.ImageCollection(collectionid) \ .filterBounds(ee.Geometry.Point(coords.get(0))) \ .filterBounds(ee.Geometry.Point(coords.get(1))) \ .filterBounds(ee.Geometry.Point(coords.get(2))) \ .filterBounds(ee.Geometry.Point(coords.get(3))) \ .filterDate(ee.Date(w_startdate2.value), ee.Date(w_enddate2.value)) \ .sort(cloudcover, True) count = collection2.size().getInfo() if count==0: raise ValueError('No images found for second time interval: '+collectionid) image1 = ee.Image(collection1.first()).select(bands) timestamp1 = ee.Date(image1.get('system:time_start')).getInfo() timestamp1 = time.gmtime(int(timestamp1['value'])/1000) timestamp1 = time.strftime('%c', timestamp1) systemid1 = image1.get('system:id').getInfo() cloudcover1 = image1.get(cloudcover).getInfo() image2 = ee.Image(collection2.first()).select(bands) timestamp2 = ee.Date(image2.get('system:time_start')).getInfo() timestamp2 = time.gmtime(int(timestamp2['value'])/1000) timestamp2 = time.strftime('%c', timestamp2) systemid2 = image2.get('system:id').getInfo() cloudcover2 = image2.get(cloudcover).getInfo() txt = 'Image1: %s \n'%systemid1 txt += 'Acquisition date: %s, Cloud cover: %f \n'%(timestamp1,cloudcover1) txt += 'Image2: %s \n'%systemid2 txt += 'Acquisition date: %s, Cloud cover: %f \n'%(timestamp2,cloudcover2) w_text.value = txt nbands = image1.bandNames().length() madnames = ['MAD'+str(i+1) for i in range(nbands.getInfo())] # co-register image2 = image2.register(image1,60) w_preview.disabled = False w_export.disabled = False # display first image if len(m.layers)>3: m.remove_layer(m.layers[3]) img = image1.clip(poly).select(rgb).rename('r','g','b') ps = img.reduceRegion(ee.Reducer.percentile([2,98]),maxPixels=1e10).getInfo() mn = [ps['r_p2'],ps['g_p2'],ps['b_p2']] mx = [ps['r_p98'],ps['g_p98'],ps['b_p98']] m.add_layer(TileLayer(url=GetTileLayerUrl(img.visualize(min=mn,max=mx)))) except Exception as e: w_text.value = 'Error: %s'%e w_collect.on_click(on_collect_button_clicked) def on_preview_button_clicked(b): global nbands try: w_text.value = 'iteration started, please wait ...\n' # iMAD inputlist = ee.List.sequence(1,w_iterations.value) first = ee.Dictionary({'done':ee.Number(0), 'scale':ee.Number(w_scale.value), 'niter':ee.Number(0), 'image':image1.addBands(image2).clip(poly), 'allrhos': [ee.List.sequence(1,nbands)], 'chi2':ee.Image.constant(0), 'MAD':ee.Image.constant(0)}) result = ee.Dictionary(inputlist.iterate(imad,first)) MAD = ee.Image(result.get('MAD')).rename(madnames) niter = ee.Number(result.get('niter')).getInfo() # threshold nbands = MAD.bandNames().length() chi2 = ee.Image(result.get('chi2')).rename(['chi2']) pval = chi2cdf(chi2,nbands).subtract(1).multiply(-1) tst = pval.gt(ee.Image.constant(0.0001)) MAD = MAD.where(tst,ee.Image.constant(0)) allrhos = ee.Array(result.get('allrhos')).toList() txt = 'Canonical correlations: %s \nIterations: %i\n'%(str(allrhos.get(-1).getInfo()),niter) w_text.value += txt if len(m.layers)>3: m.remove_layer(m.layers[3]) MAD2 = MAD.select(1).rename('b') ps = MAD2.reduceRegion(ee.Reducer.percentile([1,99])).getInfo() mn = ps['b_p1'] mx = ps['b_p99'] m.add_layer(TileLayer(url=GetTileLayerUrl( MAD2.visualize(min=mn,max=mx)))) except Exception as e: w_text.value = 'Error: %s\n Retry collect/preview or export to assets'%e w_preview.on_click(on_preview_button_clicked) def on_export_button_clicked(b): global w_exportname, nbands try: # iMAD inputlist = ee.List.sequence(1,w_iterations.value) first = ee.Dictionary({'done':ee.Number(0), 'scale':ee.Number(w_scale.value), 'niter':ee.Number(0), 'image':image1.addBands(image2).clip(poly), 'allrhos': [ee.List.sequence(1,nbands)], 'chi2':ee.Image.constant(0), 'MAD':ee.Image.constant(0)}) result = ee.Dictionary(inputlist.iterate(imad,first)) MAD = ee.Image(result.get('MAD')).rename(madnames) # threshold chi2 = ee.Image(result.get('chi2')).rename(['chi2']) pval = chi2cdf(chi2,nbands).subtract(1).multiply(-1) tst = pval.gt(ee.Image.constant(0.0001)) MAD = MAD.where(tst,ee.Image.constant(0)) allrhos = ee.Array(result.get('allrhos')).toList().slice(1,-1) # radcal ncmask = chi2cdf(chi2,nbands).lt(ee.Image.constant(0.05)).rename(['invarpix']) inputlist1 = ee.List.sequence(0,nbands.subtract(1)) first = ee.Dictionary({'image':image1.addBands(image2), 'ncmask':ncmask, 'nbands':nbands, 'scale':ee.Number(w_scale.value), 'rect':poly, 'coeffs': ee.List([]), 'normalized':ee.Image()}) result1 = ee.Dictionary(inputlist1.iterate(radcal,first)) coeffs = ee.List(result1.get('coeffs')) sel = ee.List.sequence(1,nbands) normalized = ee.Image(result1.get('normalized')).select(sel) MADs = ee.Image.cat(MAD,chi2,ncmask,image1.clip(poly),image2.clip(poly),normalized) assexport = ee.batch.Export.image.toAsset(MADs, description='assetExportTask', assetId=w_exportname.value,scale=scale,maxPixels=1e9) assexport.start() assexportid = str(assexport.id) w_text.value= 'Exporting change map, chisqr, original images and normalized image to %s\n task id: %s'%(w_exportname.value,assexportid) except Exception as e: w_text.value = 'Error: %s'%e # export metadata to drive ninvar = ee.String(ncmask.reduceRegion(ee.Reducer.sum().unweighted(), scale=scale,maxPixels= 1e9).toArray().project([0])) metadata = ee.List(['IR-MAD: '+time.asctime(), 'Platform: '+w_platform.value, 'Asset export name: '+w_exportname.value, 'Timestamps: %s %s'%(timestamp1,timestamp2)]) \ .cat(['Canonical Correlations:']) \ .cat(allrhos) \ .cat(['Radiometric Normalization, Invariant Pixels:']) \ .cat([ninvar]) \ .cat(['Slope, Intercept, R:']) \ .cat(coeffs) fileNamePrefix=w_exportname.value.replace('/','-') gdexport = ee.batch.Export.table.toDrive(ee.FeatureCollection(metadata.map(makefeature)).merge(ee.Feature(poly)), description='driveExportTask_meta', folder = 'gee', fileNamePrefix=fileNamePrefix ) gdexport.start() w_text.value += '\n Exporting metadata to Drive/EarthEngineImages/%s\n task id: %s'%(fileNamePrefix,str(gdexport.id)) w_export.on_click(on_export_button_clicked) def run(): global m,center center = [51.0,6.4] osm = basemap_to_tiles(basemaps.OpenStreetMap.Mapnik) ews = basemap_to_tiles(basemaps.Esri.WorldStreetMap) ewi = basemap_to_tiles(basemaps.Esri.WorldImagery) dc = DrawControl(polyline={},circlemarker={}) dc.rectangle = {"shapeOptions": {"fillColor": "#0000ff","color": "#0000ff","fillOpacity": 0.05}} dc.polygon = {"shapeOptions": {"fillColor": "#0000ff","color": "#0000ff","fillOpacity": 0.05}} dc.on_draw(handle_draw) lc = LayersControl(position='topright') fs = FullScreenControl(position='topleft') mc = MeasureControl(position='topright',primary_length_unit = 'kilometers') m = Map(center=center, zoom=11, layout={'height':'500px'},layers=(ewi,ews,osm),controls=(mc,dc,lc,fs)) # m = Map(center=center, zoom=11, layout={'height':'500px'},controls=(lc,dc,fs,mc,sm_control)) display(m) return box
42.429708
157
0.582146
import ee, time, warnings, math import ipywidgets as widgets from IPython.display import display from ipyleaflet import (Map,DrawControl,TileLayer, basemaps,basemap_to_tiles, LayersControl, MeasureControl, FullScreenControl) from auxil.eeMad import imad,radcal from geopy.geocoders import photon ee.Initialize() geolocator = photon.Photon(timeout=10) warnings.filterwarnings("ignore", message="numpy.dtype size changed") warnings.filterwarnings("ignore", message="numpy.ufunc size changed") poly = ee.Geometry.MultiPolygon([]) def chi2cdf(chi2,df): return ee.Image(chi2.divide(2)).gammainc(ee.Number(df).divide(2)) def makefeature(data): return ee.Feature(None, {'data': data}) def handle_draw(self, action, geo_json): global poly coords = geo_json['geometry']['coordinates'] if action == 'created': poly = ee.Geometry.MultiPolygon(poly.coordinates().add(coords)) w_preview.disabled = True w_export.disabled = True w_collect.disabled = False elif action == 'deleted': poly1 = ee.Geometry.MultiPolygon(coords) poly = poly.difference(poly1) if len(poly.coordinates().getInfo()) == 0: w_collect.disabled = True dc = DrawControl(polyline={},circle={}) dc.on_draw(handle_draw) def GetTileLayerUrl(ee_image_object): map_id = ee.Image(ee_image_object).getMapId() return map_id["tile_fetcher"].url_format w_text = widgets.Textarea( layout = widgets.Layout(width='75%'), value = 'Algorithm output', rows = 4, disabled = False ) w_platform = widgets.RadioButtons( options=['SENTINEL/S2(VNIR/SWIR)','SENTINEL/S2(NIR/SWIR)','LANDSAT LC08','LANDSAT LE07','LANDSAT LT05'], value='SENTINEL/S2(VNIR/SWIR)', description='Platform:', disabled=False ) w_startdate1 = widgets.Text( value='2020-05-01', placeholder=' ', description='Start T1:', disabled=False ) w_enddate1 = widgets.Text( value='2020-07-01', placeholder=' ', description='End T1:', disabled=False ) w_startdate2 = widgets.Text( value='2020-08-01', placeholder=' ', description='Start T2:', disabled=False ) w_enddate2 = widgets.Text( value='2020-10-01', placeholder=' ', description='End T2:', disabled=False ) w_iterations = widgets.IntText( value=30, placeholder=' ', description='Max Iter:', disabled=False ) w_scale = widgets.IntText( value=30, placeholder=' ', description='Scale:', disabled=False ) w_exportname = widgets.Text( value='users/<username>/<path>', placeholder=' ', disabled=False ) w_location = widgets.Text( value='Jülich', placeholder=' ', description='', disabled=False ) w_goto = widgets.Button(description="GoTo",disabled=False) w_collect = widgets.Button(description="Collect",disabled=True) w_preview = widgets.Button(description="Preview",disabled=True) w_export = widgets.Button(description='Export to assets',disabled=True) w_dates1 = widgets.VBox([w_startdate1,w_enddate1,w_iterations]) w_dates2 = widgets.VBox([w_startdate2,w_enddate2,w_scale]) w_dates = widgets.HBox([w_platform,w_dates1,w_dates2]) w_exp = widgets.HBox([w_export,w_exportname]) w_go = widgets.HBox([w_collect,w_preview,w_exp]) w_txt = widgets.HBox([w_text,w_goto,w_location]) box = widgets.VBox([w_txt,w_dates,w_go]) def on_widget_change(b): w_preview.disabled = True w_export.disabled = True w_platform.observe(on_widget_change,names='value') w_startdate1.observe(on_widget_change,names='value') w_enddate1.observe(on_widget_change,names='value') w_startdate2.observe(on_widget_change,names='value') w_enddate2.observe(on_widget_change,names='value') def on_goto_button_clicked(b): try: location = geolocator.geocode(w_location.value) m.center = (location.latitude,location.longitude) m.zoom = 11 except Exception as e: print('Error: %s'%e) w_goto.on_click(on_goto_button_clicked) def on_collect_button_clicked(b): global result,m,collection,count, \ w_startdate1,w_enddate1,w_startdate2, \ w_platfform,w_enddate2,w_changemap, \ scale,nbands, \ image1,image2, \ madnames,coords,timestamp1,timestamp2 try: coords = ee.List(poly.bounds().coordinates().get(0)) w_text.value = 'collecting, please wait ...' cloudcover = 'CLOUD_COVER' scale = 30.0 rgb = ['B4','B5','B7'] if w_platform.value=='SENTINEL/S2(VNIR/SWIR)': collectionid = 'COPERNICUS/S2' scale = 10.0 bands = ['B2','B3','B4','B8'] rgb = ['B8','B4','B3'] cloudcover = 'CLOUDY_PIXEL_PERCENTAGE' elif w_platform.value=='SENTINEL/S2(NIR/SWIR)': collectionid = 'COPERNICUS/S2' scale = 20.0 bands = ['B5','B6','B7','B8A','B11','B12'] rgb = ['B5','B7','B11'] cloudcover = 'CLOUDY_PIXEL_PERCENTAGE' elif w_platform.value=='LANDSAT LC08': collectionid = 'LANDSAT/LC08/C01/T1_RT_TOA' bands = ['B2','B3','B4','B5','B6','B7'] rgb = ['B5','B6','B7'] elif w_platform.value=='LANDSAT LE07': collectionid = 'LANDSAT/LE07/C01/T1_RT_TOA' bands = ['B1','B2','B3','B4','B5','B7'] else: collectionid = 'LANDSAT/LT05/C01/T1_TOA' bands = ['B1','B2','B3','B4','B5','B7'] collection1 = ee.ImageCollection(collectionid) \ .filterBounds(ee.Geometry.Point(coords.get(0))) \ .filterBounds(ee.Geometry.Point(coords.get(1))) \ .filterBounds(ee.Geometry.Point(coords.get(2))) \ .filterBounds(ee.Geometry.Point(coords.get(3))) \ .filterDate(ee.Date(w_startdate1.value), ee.Date(w_enddate1.value)) \ .sort(cloudcover, True) count = collection1.size().getInfo() if count==0: raise ValueError('No images found for first time interval: '+collectionid) collection2 = ee.ImageCollection(collectionid) \ .filterBounds(ee.Geometry.Point(coords.get(0))) \ .filterBounds(ee.Geometry.Point(coords.get(1))) \ .filterBounds(ee.Geometry.Point(coords.get(2))) \ .filterBounds(ee.Geometry.Point(coords.get(3))) \ .filterDate(ee.Date(w_startdate2.value), ee.Date(w_enddate2.value)) \ .sort(cloudcover, True) count = collection2.size().getInfo() if count==0: raise ValueError('No images found for second time interval: '+collectionid) image1 = ee.Image(collection1.first()).select(bands) timestamp1 = ee.Date(image1.get('system:time_start')).getInfo() timestamp1 = time.gmtime(int(timestamp1['value'])/1000) timestamp1 = time.strftime('%c', timestamp1) systemid1 = image1.get('system:id').getInfo() cloudcover1 = image1.get(cloudcover).getInfo() image2 = ee.Image(collection2.first()).select(bands) timestamp2 = ee.Date(image2.get('system:time_start')).getInfo() timestamp2 = time.gmtime(int(timestamp2['value'])/1000) timestamp2 = time.strftime('%c', timestamp2) systemid2 = image2.get('system:id').getInfo() cloudcover2 = image2.get(cloudcover).getInfo() txt = 'Image1: %s \n'%systemid1 txt += 'Acquisition date: %s, Cloud cover: %f \n'%(timestamp1,cloudcover1) txt += 'Image2: %s \n'%systemid2 txt += 'Acquisition date: %s, Cloud cover: %f \n'%(timestamp2,cloudcover2) w_text.value = txt nbands = image1.bandNames().length() madnames = ['MAD'+str(i+1) for i in range(nbands.getInfo())] image2 = image2.register(image1,60) w_preview.disabled = False w_export.disabled = False if len(m.layers)>3: m.remove_layer(m.layers[3]) img = image1.clip(poly).select(rgb).rename('r','g','b') ps = img.reduceRegion(ee.Reducer.percentile([2,98]),maxPixels=1e10).getInfo() mn = [ps['r_p2'],ps['g_p2'],ps['b_p2']] mx = [ps['r_p98'],ps['g_p98'],ps['b_p98']] m.add_layer(TileLayer(url=GetTileLayerUrl(img.visualize(min=mn,max=mx)))) except Exception as e: w_text.value = 'Error: %s'%e w_collect.on_click(on_collect_button_clicked) def on_preview_button_clicked(b): global nbands try: w_text.value = 'iteration started, please wait ...\n' inputlist = ee.List.sequence(1,w_iterations.value) first = ee.Dictionary({'done':ee.Number(0), 'scale':ee.Number(w_scale.value), 'niter':ee.Number(0), 'image':image1.addBands(image2).clip(poly), 'allrhos': [ee.List.sequence(1,nbands)], 'chi2':ee.Image.constant(0), 'MAD':ee.Image.constant(0)}) result = ee.Dictionary(inputlist.iterate(imad,first)) MAD = ee.Image(result.get('MAD')).rename(madnames) niter = ee.Number(result.get('niter')).getInfo() nbands = MAD.bandNames().length() chi2 = ee.Image(result.get('chi2')).rename(['chi2']) pval = chi2cdf(chi2,nbands).subtract(1).multiply(-1) tst = pval.gt(ee.Image.constant(0.0001)) MAD = MAD.where(tst,ee.Image.constant(0)) allrhos = ee.Array(result.get('allrhos')).toList() txt = 'Canonical correlations: %s \nIterations: %i\n'%(str(allrhos.get(-1).getInfo()),niter) w_text.value += txt if len(m.layers)>3: m.remove_layer(m.layers[3]) MAD2 = MAD.select(1).rename('b') ps = MAD2.reduceRegion(ee.Reducer.percentile([1,99])).getInfo() mn = ps['b_p1'] mx = ps['b_p99'] m.add_layer(TileLayer(url=GetTileLayerUrl( MAD2.visualize(min=mn,max=mx)))) except Exception as e: w_text.value = 'Error: %s\n Retry collect/preview or export to assets'%e w_preview.on_click(on_preview_button_clicked) def on_export_button_clicked(b): global w_exportname, nbands try: inputlist = ee.List.sequence(1,w_iterations.value) first = ee.Dictionary({'done':ee.Number(0), 'scale':ee.Number(w_scale.value), 'niter':ee.Number(0), 'image':image1.addBands(image2).clip(poly), 'allrhos': [ee.List.sequence(1,nbands)], 'chi2':ee.Image.constant(0), 'MAD':ee.Image.constant(0)}) result = ee.Dictionary(inputlist.iterate(imad,first)) MAD = ee.Image(result.get('MAD')).rename(madnames) chi2 = ee.Image(result.get('chi2')).rename(['chi2']) pval = chi2cdf(chi2,nbands).subtract(1).multiply(-1) tst = pval.gt(ee.Image.constant(0.0001)) MAD = MAD.where(tst,ee.Image.constant(0)) allrhos = ee.Array(result.get('allrhos')).toList().slice(1,-1) ncmask = chi2cdf(chi2,nbands).lt(ee.Image.constant(0.05)).rename(['invarpix']) inputlist1 = ee.List.sequence(0,nbands.subtract(1)) first = ee.Dictionary({'image':image1.addBands(image2), 'ncmask':ncmask, 'nbands':nbands, 'scale':ee.Number(w_scale.value), 'rect':poly, 'coeffs': ee.List([]), 'normalized':ee.Image()}) result1 = ee.Dictionary(inputlist1.iterate(radcal,first)) coeffs = ee.List(result1.get('coeffs')) sel = ee.List.sequence(1,nbands) normalized = ee.Image(result1.get('normalized')).select(sel) MADs = ee.Image.cat(MAD,chi2,ncmask,image1.clip(poly),image2.clip(poly),normalized) assexport = ee.batch.Export.image.toAsset(MADs, description='assetExportTask', assetId=w_exportname.value,scale=scale,maxPixels=1e9) assexport.start() assexportid = str(assexport.id) w_text.value= 'Exporting change map, chisqr, original images and normalized image to %s\n task id: %s'%(w_exportname.value,assexportid) except Exception as e: w_text.value = 'Error: %s'%e ninvar = ee.String(ncmask.reduceRegion(ee.Reducer.sum().unweighted(), scale=scale,maxPixels= 1e9).toArray().project([0])) metadata = ee.List(['IR-MAD: '+time.asctime(), 'Platform: '+w_platform.value, 'Asset export name: '+w_exportname.value, 'Timestamps: %s %s'%(timestamp1,timestamp2)]) \ .cat(['Canonical Correlations:']) \ .cat(allrhos) \ .cat(['Radiometric Normalization, Invariant Pixels:']) \ .cat([ninvar]) \ .cat(['Slope, Intercept, R:']) \ .cat(coeffs) fileNamePrefix=w_exportname.value.replace('/','-') gdexport = ee.batch.Export.table.toDrive(ee.FeatureCollection(metadata.map(makefeature)).merge(ee.Feature(poly)), description='driveExportTask_meta', folder = 'gee', fileNamePrefix=fileNamePrefix ) gdexport.start() w_text.value += '\n Exporting metadata to Drive/EarthEngineImages/%s\n task id: %s'%(fileNamePrefix,str(gdexport.id)) w_export.on_click(on_export_button_clicked) def run(): global m,center center = [51.0,6.4] osm = basemap_to_tiles(basemaps.OpenStreetMap.Mapnik) ews = basemap_to_tiles(basemaps.Esri.WorldStreetMap) ewi = basemap_to_tiles(basemaps.Esri.WorldImagery) dc = DrawControl(polyline={},circlemarker={}) dc.rectangle = {"shapeOptions": {"fillColor": "#0000ff","color": "#0000ff","fillOpacity": 0.05}} dc.polygon = {"shapeOptions": {"fillColor": "#0000ff","color": "#0000ff","fillOpacity": 0.05}} dc.on_draw(handle_draw) lc = LayersControl(position='topright') fs = FullScreenControl(position='topleft') mc = MeasureControl(position='topright',primary_length_unit = 'kilometers') m = Map(center=center, zoom=11, layout={'height':'500px'},layers=(ewi,ews,osm),controls=(mc,dc,lc,fs)) display(m) return box
true
true
f71ccf566a61303c7989a45da65942c1b7aef635
10,678
py
Python
examples/scales/step1.py
KNPSystem/server
85aa991cf86b10330054bd8ea4a12543851cb9fc
[ "MIT" ]
null
null
null
examples/scales/step1.py
KNPSystem/server
85aa991cf86b10330054bd8ea4a12543851cb9fc
[ "MIT" ]
null
null
null
examples/scales/step1.py
KNPSystem/server
85aa991cf86b10330054bd8ea4a12543851cb9fc
[ "MIT" ]
null
null
null
""" Get the data from covidtracking.com. Store it in a knpsValue. Assign to a knpsVariable. Everytime we run this, the knpsVariable is not changed. But the knpsValue it points to should be updated. i.e. a new knpsValue is created. """ import requests import json from collections import defaultdict from datetime import datetime, timedelta from urllib.request import urlopen from lib import get_user_id, create_data_object, update_data_object USER_NAME = "Mike Anderson" USER_EMAIL = "mrander@umich.edu" sample_data_file = "data/Unemployment_data_2019.csv" sample_data_file2 = "data/all_court_records.csv" sample_data_file3 = "data/judicial_districts.csv" sample_data_file4 = "data/fips_counties.csv" if __name__ == "__main__": user_id = get_user_id(USER_EMAIL, USER_NAME) user_id2 = get_user_id("andrewpaley2022@u.northwestern.edu", "Andrew Paley") user_id3 = get_user_id("alicezou@umich.edu", "Jiayun Zou") user_id4 = get_user_id("michjc@csail.mit.edu", "Michael Cafarella") user_id5 = get_user_id("ctm310@yahoo.com", "Carol McLaughlin") with urlopen('https://raw.githubusercontent.com/plotly/datasets/master/geojson-counties-fips.json') as response: counties = json.load(response) json_obj_data = create_data_object( name = 'GeoJSON US County FIPS data', ownerid = user_id, description = 'Geo FIPS data for US Counties', data = counties, comment = 'Downloaded from Plotly', datatype = '/datatypes/json', mimetype = 'application/json' ) csv_obj_data = create_data_object( name = '2019 U.S. Unemployment and Income Data', ownerid = user_id, description = 'Unemployment and income data by county', datafile = sample_data_file, comment = 'Downloaded from USDA', datatype = '/datatypes/csv', mimetype = 'text/csv' ) csv_obj_data = create_data_object( name = '2016 Court Cases - All Districts', ownerid = user_id2, description = 'Court cases by district', datafile = sample_data_file2, comment = 'Downloaded from Scales', datatype = '/datatypes/csv', mimetype = 'text/csv' ) csv_obj_data = create_data_object( name = 'U.S. Judicial Districts by County', ownerid = user_id2, description = 'US counts annotated by Judicial District', datafile = sample_data_file3, comment = 'From the web', datatype = '/datatypes/csv', mimetype = 'text/csv' ) csv_obj_data = create_data_object( name = 'FIPS Codes for US Counties', ownerid = user_id4, description = 'FIPS Codes for US Counties', datafile = sample_data_file4, comment = 'Downloaded from bls.gov', datatype = '/datatypes/csv', mimetype = 'text/csv' ) map_func = """def cloropleth_county_map(dobj_id, columns=[]): from urllib.request import urlopen import json import plotly.graph_objects as go import pandas as pd from io import BytesIO, StringIO GEO_DATA_ID = 25 counties = get_dobj_contents(GEO_DATA_ID) input_data = get_dobj_contents(dobj_id) df = pd.read_csv(StringIO(input_data.decode('utf-8')), dtype={columns[0]: str}) fig = go.Figure(go.Choroplethmapbox(geojson=counties, locations=df[columns[0]], z=df[columns[1]], colorscale="Viridis", zmin=min(df[columns[1]]), zmax=max(df[columns[1]]), marker_opacity=0.5, marker_line_width=0)) fig.update_layout(mapbox_style="carto-positron", mapbox_zoom=5.6, mapbox_center = {"lat": 43.15, "lon": -76.14}) fig.update_layout(margin={"r":0,"t":0,"l":0,"b":0}) #{"lat": 37.0902, "lon": -95.7129} output_buffer = BytesIO() fig.write_image(output_buffer, format='png') output = output_buffer.getvalue() return {'contents': output, 'datatype': '/datatypes/img', 'mimetype': 'image/png', 'predecessors': [GEO_DATA_ID]}""" code_obj_data = create_data_object( name = 'US County Chloropleth Map Function', ownerid = user_id, description = 'Function to create Chloropleth Maps from US County Data', code = map_func, comment = 'Inputs: (dobj_id, [fips_col_name, data_col_name])' ) fips_func = """def add_fips_codes_counties(dobj_id, params=[]): # params = (county column, state column) from io import StringIO import csv FIPS_DATA = 29 fips_csv = StringIO(get_dobj_contents(FIPS_DATA).decode()) fips = {} fips_header = {} reader = csv.reader(fips_csv, delimiter=',', quotechar='"') for row in reader: if len(fips_header) == 0: fips_header = {x: i for i, x in enumerate(row)} else: fips[row[fips_header['area_name']]] = row[fips_header['fips_txt']] input_data = get_dobj_contents(dobj_id) csv_file = StringIO(input_data.decode()) reader = csv.reader(csv_file, delimiter=',', quotechar='"') output = [] header = {} out_str = StringIO() writer = csv.writer(out_str, delimiter=',', quotechar='"') for row in reader: if len(header) == 0: writer.writerow(row + ['fips_code']) header = {x: i for i, x in enumerate(row)} else: county = row[header[params[0]]] state = row[header[params[1]]] if state.lower() in ABBREV_US_STATE: state = ABBREV_US_STATE[state.lower()] fips_code = fips["{}, {}".format(county, state)] writer.writerow(row + [fips_code]) return {'contents': out_str.getvalue().encode(), 'datatype': '/datatypes/csv', 'mimetype': 'text/csv', 'predecessors': [FIPS_DATA]}""" code_obj_data = create_data_object( name = 'Add FIPS', ownerid = user_id3, description = 'Adds additional FIPS column to CSV containing US county column', code = fips_func, comment = 'Inputs: (dobj_id, [county_col_name, state_col_name])' ) filter_func = """def filter_csv_by_text(dobj_id, params=[]): # params = (column to filter, string to match) from io import StringIO import csv input_data = get_dobj_contents(dobj_id) csv_file = StringIO(input_data.decode()) reader = csv.reader(csv_file, delimiter=',', quotechar='"') output = [] header = {} out_str = StringIO() writer = csv.writer(out_str, delimiter=',', quotechar='"') for row in reader: if len(header) == 0: writer.writerow(row) header = {x: i for i, x in enumerate(row)} else: if params[1] in row[header[params[0]]]: writer.writerow(row) return {'contents': out_str.getvalue().encode(), 'datatype': '/datatypes/csv', 'mimetype': 'text/csv', 'predecessors': []}""" code_obj_data = create_data_object( name = 'Filter CSV by text value', ownerid = user_id4, description = 'Function to filter CSV by text value in one column', code = filter_func, comment = 'Inputs: (dobj_id, [col_name, filter_text])' ) filter_func = """def aggregate_csv_mean(dobj_id, params=[]): # params = (group by column, aggegrate column) from io import StringIO import csv input_data = get_dobj_contents(dobj_id) csv_file = StringIO(input_data.decode()) reader = csv.reader(csv_file, delimiter=',', quotechar='"') header = {} vals = {} for row in reader: if len(header) == 0: header = {x: i for i, x in enumerate(row)} else: if row[header[params[0]]] not in vals: vals[row[header[params[0]]]] = [] try: vals[row[header[params[0]]]].append(float(row[header[params[1]]])) except: pass out_str = StringIO() writer = csv.writer(out_str, delimiter=',', quotechar='"') writer.writerow([params[0], params[1]]) for k, v in vals.items(): writer.writerow([k, sum(v)/len(v)]) return {'contents': out_str.getvalue().encode(), 'datatype': '/datatypes/csv', 'mimetype': 'text/csv', 'predecessors': []}""" code_obj_data = create_data_object( name = 'Mean of CSV column, group by', ownerid = user_id, description = 'Function to find mean of CSV column, grouped by another column', code = filter_func, comment = 'Inputs: (dobj_id, [group by column, aggregate column])' ) filter_func = """def join_csvs(dobj_id, params=[]): # params = (join csv, join column1, join column2, filter_col, filter_val) from io import StringIO import csv import json input_data = get_dobj_contents(dobj_id) csv_file = StringIO(input_data.decode()) join_data = get_dobj_contents(params[0]) join_file = StringIO(join_data.decode()) reader = csv.reader(csv_file, delimiter=',', quotechar='"') header = {} table1 = {} output_header = [] for row in reader: if len(header) == 0: header = {x: i for i, x in enumerate(row)} output_header += row else: join_idx = header[params[1]] if row[join_idx] not in table1: table1[row[join_idx]] = [] table1[row[join_idx]].append(row) reader = csv.reader(join_file, delimiter=',', quotechar='"') out_str = StringIO() writer = csv.writer(out_str, delimiter=',', quotechar='"') join_header = {} for row in reader: if len(join_header) == 0: join_header = {x: i for i, x in enumerate(row)} output_header += row[:join_header[params[2]]] output_header += row[join_header[params[2]]+1:] writer.writerow(output_header) else: if params[3] in join_header and row[join_header[params[3]]] != params[4]: continue join_idx = join_header[params[2]] if row[join_idx] in table1: for t1 in table1[row[join_idx]]: out_data = [] out_data += t1 out_data += row[:join_idx] out_data += row[join_idx+1:] writer.writerow(out_data) return {'contents': out_str.getvalue().encode(), 'datatype': '/datatypes/csv', 'mimetype': 'text/csv', 'predecessors': [params[0]]}""" code_obj_data = create_data_object( name = 'Join CSV', ownerid = user_id5, description = 'Function to join CSVs', code = filter_func, comment = 'Inputs: (dobj_id, [join_table, join_column1, join_column2])' )
34.445161
138
0.613879
import requests import json from collections import defaultdict from datetime import datetime, timedelta from urllib.request import urlopen from lib import get_user_id, create_data_object, update_data_object USER_NAME = "Mike Anderson" USER_EMAIL = "mrander@umich.edu" sample_data_file = "data/Unemployment_data_2019.csv" sample_data_file2 = "data/all_court_records.csv" sample_data_file3 = "data/judicial_districts.csv" sample_data_file4 = "data/fips_counties.csv" if __name__ == "__main__": user_id = get_user_id(USER_EMAIL, USER_NAME) user_id2 = get_user_id("andrewpaley2022@u.northwestern.edu", "Andrew Paley") user_id3 = get_user_id("alicezou@umich.edu", "Jiayun Zou") user_id4 = get_user_id("michjc@csail.mit.edu", "Michael Cafarella") user_id5 = get_user_id("ctm310@yahoo.com", "Carol McLaughlin") with urlopen('https://raw.githubusercontent.com/plotly/datasets/master/geojson-counties-fips.json') as response: counties = json.load(response) json_obj_data = create_data_object( name = 'GeoJSON US County FIPS data', ownerid = user_id, description = 'Geo FIPS data for US Counties', data = counties, comment = 'Downloaded from Plotly', datatype = '/datatypes/json', mimetype = 'application/json' ) csv_obj_data = create_data_object( name = '2019 U.S. Unemployment and Income Data', ownerid = user_id, description = 'Unemployment and income data by county', datafile = sample_data_file, comment = 'Downloaded from USDA', datatype = '/datatypes/csv', mimetype = 'text/csv' ) csv_obj_data = create_data_object( name = '2016 Court Cases - All Districts', ownerid = user_id2, description = 'Court cases by district', datafile = sample_data_file2, comment = 'Downloaded from Scales', datatype = '/datatypes/csv', mimetype = 'text/csv' ) csv_obj_data = create_data_object( name = 'U.S. Judicial Districts by County', ownerid = user_id2, description = 'US counts annotated by Judicial District', datafile = sample_data_file3, comment = 'From the web', datatype = '/datatypes/csv', mimetype = 'text/csv' ) csv_obj_data = create_data_object( name = 'FIPS Codes for US Counties', ownerid = user_id4, description = 'FIPS Codes for US Counties', datafile = sample_data_file4, comment = 'Downloaded from bls.gov', datatype = '/datatypes/csv', mimetype = 'text/csv' ) map_func = """def cloropleth_county_map(dobj_id, columns=[]): from urllib.request import urlopen import json import plotly.graph_objects as go import pandas as pd from io import BytesIO, StringIO GEO_DATA_ID = 25 counties = get_dobj_contents(GEO_DATA_ID) input_data = get_dobj_contents(dobj_id) df = pd.read_csv(StringIO(input_data.decode('utf-8')), dtype={columns[0]: str}) fig = go.Figure(go.Choroplethmapbox(geojson=counties, locations=df[columns[0]], z=df[columns[1]], colorscale="Viridis", zmin=min(df[columns[1]]), zmax=max(df[columns[1]]), marker_opacity=0.5, marker_line_width=0)) fig.update_layout(mapbox_style="carto-positron", mapbox_zoom=5.6, mapbox_center = {"lat": 43.15, "lon": -76.14}) fig.update_layout(margin={"r":0,"t":0,"l":0,"b":0}) #{"lat": 37.0902, "lon": -95.7129} output_buffer = BytesIO() fig.write_image(output_buffer, format='png') output = output_buffer.getvalue() return {'contents': output, 'datatype': '/datatypes/img', 'mimetype': 'image/png', 'predecessors': [GEO_DATA_ID]}""" code_obj_data = create_data_object( name = 'US County Chloropleth Map Function', ownerid = user_id, description = 'Function to create Chloropleth Maps from US County Data', code = map_func, comment = 'Inputs: (dobj_id, [fips_col_name, data_col_name])' ) fips_func = """def add_fips_codes_counties(dobj_id, params=[]): # params = (county column, state column) from io import StringIO import csv FIPS_DATA = 29 fips_csv = StringIO(get_dobj_contents(FIPS_DATA).decode()) fips = {} fips_header = {} reader = csv.reader(fips_csv, delimiter=',', quotechar='"') for row in reader: if len(fips_header) == 0: fips_header = {x: i for i, x in enumerate(row)} else: fips[row[fips_header['area_name']]] = row[fips_header['fips_txt']] input_data = get_dobj_contents(dobj_id) csv_file = StringIO(input_data.decode()) reader = csv.reader(csv_file, delimiter=',', quotechar='"') output = [] header = {} out_str = StringIO() writer = csv.writer(out_str, delimiter=',', quotechar='"') for row in reader: if len(header) == 0: writer.writerow(row + ['fips_code']) header = {x: i for i, x in enumerate(row)} else: county = row[header[params[0]]] state = row[header[params[1]]] if state.lower() in ABBREV_US_STATE: state = ABBREV_US_STATE[state.lower()] fips_code = fips["{}, {}".format(county, state)] writer.writerow(row + [fips_code]) return {'contents': out_str.getvalue().encode(), 'datatype': '/datatypes/csv', 'mimetype': 'text/csv', 'predecessors': [FIPS_DATA]}""" code_obj_data = create_data_object( name = 'Add FIPS', ownerid = user_id3, description = 'Adds additional FIPS column to CSV containing US county column', code = fips_func, comment = 'Inputs: (dobj_id, [county_col_name, state_col_name])' ) filter_func = """def filter_csv_by_text(dobj_id, params=[]): # params = (column to filter, string to match) from io import StringIO import csv input_data = get_dobj_contents(dobj_id) csv_file = StringIO(input_data.decode()) reader = csv.reader(csv_file, delimiter=',', quotechar='"') output = [] header = {} out_str = StringIO() writer = csv.writer(out_str, delimiter=',', quotechar='"') for row in reader: if len(header) == 0: writer.writerow(row) header = {x: i for i, x in enumerate(row)} else: if params[1] in row[header[params[0]]]: writer.writerow(row) return {'contents': out_str.getvalue().encode(), 'datatype': '/datatypes/csv', 'mimetype': 'text/csv', 'predecessors': []}""" code_obj_data = create_data_object( name = 'Filter CSV by text value', ownerid = user_id4, description = 'Function to filter CSV by text value in one column', code = filter_func, comment = 'Inputs: (dobj_id, [col_name, filter_text])' ) filter_func = """def aggregate_csv_mean(dobj_id, params=[]): # params = (group by column, aggegrate column) from io import StringIO import csv input_data = get_dobj_contents(dobj_id) csv_file = StringIO(input_data.decode()) reader = csv.reader(csv_file, delimiter=',', quotechar='"') header = {} vals = {} for row in reader: if len(header) == 0: header = {x: i for i, x in enumerate(row)} else: if row[header[params[0]]] not in vals: vals[row[header[params[0]]]] = [] try: vals[row[header[params[0]]]].append(float(row[header[params[1]]])) except: pass out_str = StringIO() writer = csv.writer(out_str, delimiter=',', quotechar='"') writer.writerow([params[0], params[1]]) for k, v in vals.items(): writer.writerow([k, sum(v)/len(v)]) return {'contents': out_str.getvalue().encode(), 'datatype': '/datatypes/csv', 'mimetype': 'text/csv', 'predecessors': []}""" code_obj_data = create_data_object( name = 'Mean of CSV column, group by', ownerid = user_id, description = 'Function to find mean of CSV column, grouped by another column', code = filter_func, comment = 'Inputs: (dobj_id, [group by column, aggregate column])' ) filter_func = """def join_csvs(dobj_id, params=[]): # params = (join csv, join column1, join column2, filter_col, filter_val) from io import StringIO import csv import json input_data = get_dobj_contents(dobj_id) csv_file = StringIO(input_data.decode()) join_data = get_dobj_contents(params[0]) join_file = StringIO(join_data.decode()) reader = csv.reader(csv_file, delimiter=',', quotechar='"') header = {} table1 = {} output_header = [] for row in reader: if len(header) == 0: header = {x: i for i, x in enumerate(row)} output_header += row else: join_idx = header[params[1]] if row[join_idx] not in table1: table1[row[join_idx]] = [] table1[row[join_idx]].append(row) reader = csv.reader(join_file, delimiter=',', quotechar='"') out_str = StringIO() writer = csv.writer(out_str, delimiter=',', quotechar='"') join_header = {} for row in reader: if len(join_header) == 0: join_header = {x: i for i, x in enumerate(row)} output_header += row[:join_header[params[2]]] output_header += row[join_header[params[2]]+1:] writer.writerow(output_header) else: if params[3] in join_header and row[join_header[params[3]]] != params[4]: continue join_idx = join_header[params[2]] if row[join_idx] in table1: for t1 in table1[row[join_idx]]: out_data = [] out_data += t1 out_data += row[:join_idx] out_data += row[join_idx+1:] writer.writerow(out_data) return {'contents': out_str.getvalue().encode(), 'datatype': '/datatypes/csv', 'mimetype': 'text/csv', 'predecessors': [params[0]]}""" code_obj_data = create_data_object( name = 'Join CSV', ownerid = user_id5, description = 'Function to join CSVs', code = filter_func, comment = 'Inputs: (dobj_id, [join_table, join_column1, join_column2])' )
true
true
f71ccf5e40a5d9802ed0c6e2043dda4342fb9258
391
py
Python
serveup/wsgi.py
ASquirrelsTail/serve-up
9533ba82f5b4989434b3b20352d17a8131bb9619
[ "MIT" ]
null
null
null
serveup/wsgi.py
ASquirrelsTail/serve-up
9533ba82f5b4989434b3b20352d17a8131bb9619
[ "MIT" ]
10
2021-03-30T14:05:21.000Z
2022-03-12T00:41:15.000Z
serveup/wsgi.py
ASquirrelsTail/serve-up
9533ba82f5b4989434b3b20352d17a8131bb9619
[ "MIT" ]
null
null
null
""" WSGI config for serveup project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'serveup.settings') application = get_wsgi_application()
23
78
0.785166
import os from django.core.wsgi import get_wsgi_application os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'serveup.settings') application = get_wsgi_application()
true
true
f71ccfaa61b4b5f0ebf12cbfe2ca50b9ddc66c78
417
py
Python
pypy/rlib/test/test_rgc.py
camillobruni/pygirl
ddbd442d53061d6ff4af831c1eab153bcc771b5a
[ "MIT" ]
12
2016-01-06T07:10:28.000Z
2021-05-13T23:02:02.000Z
pypy/rlib/test/test_rgc.py
camillobruni/pygirl
ddbd442d53061d6ff4af831c1eab153bcc771b5a
[ "MIT" ]
null
null
null
pypy/rlib/test/test_rgc.py
camillobruni/pygirl
ddbd442d53061d6ff4af831c1eab153bcc771b5a
[ "MIT" ]
2
2016-07-29T07:09:50.000Z
2016-10-16T08:50:26.000Z
from pypy.rpython.test.test_llinterp import gengraph, interpret from pypy.rlib import rgc # Force registration of gc.collect import gc def test_collect(): def f(): return gc.collect() t, typer, graph = gengraph(f, []) ops = list(graph.iterblockops()) assert len(ops) == 1 op = ops[0][1] assert op.opname == 'gc__collect' res = interpret(f, []) assert res is None
20.85
63
0.630695
from pypy.rpython.test.test_llinterp import gengraph, interpret from pypy.rlib import rgc import gc def test_collect(): def f(): return gc.collect() t, typer, graph = gengraph(f, []) ops = list(graph.iterblockops()) assert len(ops) == 1 op = ops[0][1] assert op.opname == 'gc__collect' res = interpret(f, []) assert res is None
true
true
f71cd18cea2ada57b893164f00c100b5a386de43
3,792
py
Python
test/core_arguments.py
sdarwin/build
2c4217ebb6bdeb5001b33a5d0d6718420aef988c
[ "BSL-1.0" ]
106
2015-08-07T04:23:50.000Z
2020-12-27T18:25:15.000Z
test/core_arguments.py
sdarwin/build
2c4217ebb6bdeb5001b33a5d0d6718420aef988c
[ "BSL-1.0" ]
130
2016-06-22T22:11:25.000Z
2020-11-29T20:24:09.000Z
test/core_arguments.py
sdarwin/build
2c4217ebb6bdeb5001b33a5d0d6718420aef988c
[ "BSL-1.0" ]
41
2015-07-08T19:18:35.000Z
2021-01-14T16:39:56.000Z
#!/usr/bin/python # Copyright 2001 Dave Abrahams # Copyright 2011 Steven Watanabe # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE.txt or copy at # https://www.bfgroup.xyz/b2/LICENSE.txt) import BoostBuild def simple_args(start, finish): return " : ".join("%d" % x for x in range(start, finish + 1)) def test(t, type, input, output, status=0): code = ["include echo_args.jam ; echo_%s" % type] if input: code.append(input) code.append(";") t.write("file.jam", " ".join(code)) t.run_build_system(["-ffile.jam"], status=status) t.expect_output_lines(output) def test_args(t, *args, **kwargs): test(t, "args", *args, **kwargs) def test_varargs(t, *args, **kwargs): test(t, "varargs", *args, **kwargs) t = BoostBuild.Tester(pass_toolset=0) t.write("echo_args.jam", """\ NOCARE all ; rule echo_args ( a b ? c ? : d + : e * ) { ECHO a= $(a) b= $(b) c= $(c) ":" d= $(d) ":" e= $(e) ; } rule echo_varargs ( a b ? c ? : d + : e * : * ) { ECHO a= $(a) b= $(b) c= $(c) ":" d= $(d) ":" e= $(e) ": rest= "$(4[1]) $(4[2-]) ": "$(5[1]) $(5[2-]) ": "$(6[1]) $(6[2-]) ": "$(7[1]) $(7[2-]) ": "$(8[1]) $(8[2-]) ": "$(9[1]) $(9[2-]) ": "$(10[1]) $(10[2-]) ": "$(11[1]) $(11[2-]) ": "$(12[1]) $(12[2-]) ": "$(13[1]) $(13[2-]) ": "$(14[1]) $(14[2-]) ": "$(15[1]) $(15[2-]) ": "$(16[1]) $(16[2-]) ": "$(17[1]) $(17[2-]) ": "$(18[1]) $(18[2-]) ": "$(19[1]) $(19[2-]) ": "$(20[1]) $(20[2-]) ": "$(21[1]) $(21[2-]) ": "$(22[1]) $(22[2-]) ": "$(23[1]) $(23[2-]) ": "$(24[1]) $(24[2-]) ": "$(25[1]) $(25[2-]) ; } """) test_args(t, "", "* missing argument a", status=1) test_args(t, "1 2 : 3 : 4 : 5", "* extra argument 5", status=1) test_args(t, "a b c1 c2 : d", "* extra argument c2", status=1) # Check modifier '?' test_args(t, "1 2 3 : 4", "a= 1 b= 2 c= 3 : d= 4 : e=") test_args(t, "1 2 : 3", "a= 1 b= 2 c= : d= 3 : e=") test_args(t, "1 2 : 3", "a= 1 b= 2 c= : d= 3 : e=") test_args(t, "1 : 2", "a= 1 b= c= : d= 2 : e=") # Check modifier '+' test_args(t, "1", "* missing argument d", status=1) test_args(t, "1 : 2 3", "a= 1 b= c= : d= 2 3 : e=") test_args(t, "1 : 2 3 4", "a= 1 b= c= : d= 2 3 4 : e=") # Check modifier '*' test_args(t, "1 : 2 : 3", "a= 1 b= c= : d= 2 : e= 3") test_args(t, "1 : 2 : 3 4", "a= 1 b= c= : d= 2 : e= 3 4") test_args(t, "1 : 2 : 3 4 5", "a= 1 b= c= : d= 2 : e= 3 4 5") # Check varargs test_varargs(t, "1 : 2 : 3 4 5", "a= 1 b= c= : d= 2 : e= 3 4 5") test_varargs(t, "1 : 2 : 3 4 5 : 6", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6") test_varargs(t, "1 : 2 : 3 4 5 : 6 7", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7") test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8") test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8 : 9") test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : " "16 : 17 : 18 : 19a 19b", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8 : " "9 : 10 : 11 : 12 : 13 : 14 : 15 : 16 : 17 : 18 : 19a 19b") test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : " "16 : 17 : 18 : 19a 19b 19c : 20", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= " "6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : 16 : 17 : 18 : 19a 19b 19c : " "20") # Check varargs upper limit expected = "a= 1 b= c= : d= 2 : e= 3 : rest= " + simple_args(4, 19) test_varargs(t, simple_args(1, 19), expected) test_varargs(t, simple_args(1, 19) + " 19b 19c 19d", expected + " 19b 19c 19d") test_varargs(t, simple_args(1, 19) + " 19b 19c 19d : 20", expected + " 19b " "19c 19d") test_varargs(t, simple_args(1, 20), expected) test_varargs(t, simple_args(1, 50), expected) t.cleanup()
36.461538
79
0.474947
import BoostBuild def simple_args(start, finish): return " : ".join("%d" % x for x in range(start, finish + 1)) def test(t, type, input, output, status=0): code = ["include echo_args.jam ; echo_%s" % type] if input: code.append(input) code.append(";") t.write("file.jam", " ".join(code)) t.run_build_system(["-ffile.jam"], status=status) t.expect_output_lines(output) def test_args(t, *args, **kwargs): test(t, "args", *args, **kwargs) def test_varargs(t, *args, **kwargs): test(t, "varargs", *args, **kwargs) t = BoostBuild.Tester(pass_toolset=0) t.write("echo_args.jam", """\ NOCARE all ; rule echo_args ( a b ? c ? : d + : e * ) { ECHO a= $(a) b= $(b) c= $(c) ":" d= $(d) ":" e= $(e) ; } rule echo_varargs ( a b ? c ? : d + : e * : * ) { ECHO a= $(a) b= $(b) c= $(c) ":" d= $(d) ":" e= $(e) ": rest= "$(4[1]) $(4[2-]) ": "$(5[1]) $(5[2-]) ": "$(6[1]) $(6[2-]) ": "$(7[1]) $(7[2-]) ": "$(8[1]) $(8[2-]) ": "$(9[1]) $(9[2-]) ": "$(10[1]) $(10[2-]) ": "$(11[1]) $(11[2-]) ": "$(12[1]) $(12[2-]) ": "$(13[1]) $(13[2-]) ": "$(14[1]) $(14[2-]) ": "$(15[1]) $(15[2-]) ": "$(16[1]) $(16[2-]) ": "$(17[1]) $(17[2-]) ": "$(18[1]) $(18[2-]) ": "$(19[1]) $(19[2-]) ": "$(20[1]) $(20[2-]) ": "$(21[1]) $(21[2-]) ": "$(22[1]) $(22[2-]) ": "$(23[1]) $(23[2-]) ": "$(24[1]) $(24[2-]) ": "$(25[1]) $(25[2-]) ; } """) test_args(t, "", "* missing argument a", status=1) test_args(t, "1 2 : 3 : 4 : 5", "* extra argument 5", status=1) test_args(t, "a b c1 c2 : d", "* extra argument c2", status=1) test_args(t, "1 2 3 : 4", "a= 1 b= 2 c= 3 : d= 4 : e=") test_args(t, "1 2 : 3", "a= 1 b= 2 c= : d= 3 : e=") test_args(t, "1 2 : 3", "a= 1 b= 2 c= : d= 3 : e=") test_args(t, "1 : 2", "a= 1 b= c= : d= 2 : e=") test_args(t, "1", "* missing argument d", status=1) test_args(t, "1 : 2 3", "a= 1 b= c= : d= 2 3 : e=") test_args(t, "1 : 2 3 4", "a= 1 b= c= : d= 2 3 4 : e=") test_args(t, "1 : 2 : 3", "a= 1 b= c= : d= 2 : e= 3") test_args(t, "1 : 2 : 3 4", "a= 1 b= c= : d= 2 : e= 3 4") test_args(t, "1 : 2 : 3 4 5", "a= 1 b= c= : d= 2 : e= 3 4 5") test_varargs(t, "1 : 2 : 3 4 5", "a= 1 b= c= : d= 2 : e= 3 4 5") test_varargs(t, "1 : 2 : 3 4 5 : 6", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6") test_varargs(t, "1 : 2 : 3 4 5 : 6 7", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7") test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8") test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8 : 9") test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : " "16 : 17 : 18 : 19a 19b", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= 6 7 : 8 : " "9 : 10 : 11 : 12 : 13 : 14 : 15 : 16 : 17 : 18 : 19a 19b") test_varargs(t, "1 : 2 : 3 4 5 : 6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : " "16 : 17 : 18 : 19a 19b 19c : 20", "a= 1 b= c= : d= 2 : e= 3 4 5 : rest= " "6 7 : 8 : 9 : 10 : 11 : 12 : 13 : 14 : 15 : 16 : 17 : 18 : 19a 19b 19c : " "20") expected = "a= 1 b= c= : d= 2 : e= 3 : rest= " + simple_args(4, 19) test_varargs(t, simple_args(1, 19), expected) test_varargs(t, simple_args(1, 19) + " 19b 19c 19d", expected + " 19b 19c 19d") test_varargs(t, simple_args(1, 19) + " 19b 19c 19d : 20", expected + " 19b " "19c 19d") test_varargs(t, simple_args(1, 20), expected) test_varargs(t, simple_args(1, 50), expected) t.cleanup()
true
true
f71cd2549c8bf4f50eefa40b961f15351236ec5b
350
py
Python
openctrl/display.py
openctrl-python/openctrl
5adda5d79262950eceab91b8412ead3d3a13e712
[ "MIT" ]
null
null
null
openctrl/display.py
openctrl-python/openctrl
5adda5d79262950eceab91b8412ead3d3a13e712
[ "MIT" ]
1
2021-06-02T18:59:19.000Z
2021-06-02T18:59:19.000Z
openctrl/display.py
pyopencontrol/openctrl
24f08970052301cb0c4a13fc855b80353a3cb975
[ "MIT" ]
null
null
null
import threading try: from .grab import Image except:pass def grab_bytes(): return Image().asbytes def send(s,a): s.post(b's'+grab_bytes(),a) def show_bytes(r): if not r.startswith('s'):return Image(r[1:]).show() def conf(s,a): def _conf(): while True: send(s,a) threading.Thread(target=_conf).start()
20.588235
42
0.617143
import threading try: from .grab import Image except:pass def grab_bytes(): return Image().asbytes def send(s,a): s.post(b's'+grab_bytes(),a) def show_bytes(r): if not r.startswith('s'):return Image(r[1:]).show() def conf(s,a): def _conf(): while True: send(s,a) threading.Thread(target=_conf).start()
true
true
f71cd2c0ab36287199c78e7dfc110494800caf19
8,955
py
Python
test_pretrain.py
anonymous-cv/cvpr-sub
6307520c73716de73ef63f5239bdac8dda20da41
[ "BSD-3-Clause" ]
null
null
null
test_pretrain.py
anonymous-cv/cvpr-sub
6307520c73716de73ef63f5239bdac8dda20da41
[ "BSD-3-Clause" ]
null
null
null
test_pretrain.py
anonymous-cv/cvpr-sub
6307520c73716de73ef63f5239bdac8dda20da41
[ "BSD-3-Clause" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import os import time import argparse import sys import numpy as np import torch import torch.optim as optim from tqdm import tqdm from network.BEV_Unet import BEV_Unet from network.ptBEV import ptBEVnet from dataloader.dataset import collate_fn_BEV,collate_fn_BEV_test,SemKITTI,SemKITTI_label_name,spherical_dataset,voxel_dataset #ignore weird np warning import warnings warnings.filterwarnings("ignore") def fast_hist(pred, label, n): k = (label >= 0) & (label < n) bin_count=np.bincount( n * label[k].astype(int) + pred[k], minlength=n ** 2) return bin_count[:n ** 2].reshape(n, n) def per_class_iu(hist): return np.diag(hist) / (hist.sum(1) + hist.sum(0) - np.diag(hist)) def fast_hist_crop(output, target, unique_label): hist = fast_hist(output.flatten(), target.flatten(), np.max(unique_label)+1) hist=hist[unique_label,:] hist=hist[:,unique_label] return hist def SemKITTI2train(label): if isinstance(label, list): return [SemKITTI2train_single(a) for a in label] else: return SemKITTI2train_single(label) def SemKITTI2train_single(label): remove_ind = label == 0 label -= 1 label[remove_ind] = 255 return label def train2SemKITTI(input_label): # delete 0 label new_labels=np.copy(input_label) new_labels[input_label==255]=0 for label_num in range(0,19): new_labels[input_label==label_num]=label_num+1 return new_labels def main(args): data_path = args.data_dir test_batch_size = args.test_batch_size model_save_path = args.model_save_path output_path = args.test_output_path compression_model = args.grid_size[2] grid_size = args.grid_size pytorch_device = torch.device('cuda:0') model = args.model if model == 'polar': fea_dim = 9 circular_padding = True elif model == 'traditional': fea_dim = 7 circular_padding = False # prepare miou fun unique_label=np.asarray(sorted(list(SemKITTI_label_name.keys())))[1:] - 1 unique_label_str=[SemKITTI_label_name[x] for x in unique_label+1] # prepare model my_BEV_model=BEV_Unet(n_class=len(unique_label), n_height = compression_model, input_batch_norm = True, dropout = 0.5, circular_padding = circular_padding) my_model = ptBEVnet(my_BEV_model, pt_model = 'pointnet', grid_size = grid_size, fea_dim = fea_dim, max_pt_per_encode = 256, out_pt_fea_dim = 512, kernal_size = 1, pt_selection = 'random', fea_compre = compression_model) if os.path.exists(model_save_path): my_model.load_state_dict(torch.load(model_save_path)) my_model.to(pytorch_device) # prepare dataset test_pt_dataset = SemKITTI(data_path + '/sequences/', imageset = 'test', return_ref = True) val_pt_dataset = SemKITTI(data_path + '/sequences/', imageset = 'val', return_ref = True) if model == 'polar': test_dataset=spherical_dataset(test_pt_dataset, grid_size = grid_size, ignore_label = 0, fixed_volume_space = True, return_test= True) val_dataset=spherical_dataset(val_pt_dataset, grid_size = grid_size, ignore_label = 0, fixed_volume_space = True) elif model == 'traditional': test_dataset=voxel_dataset(test_pt_dataset, grid_size = grid_size, ignore_label = 0, fixed_volume_space = True, return_test= True) val_dataset=voxel_dataset(val_pt_dataset, grid_size = grid_size, ignore_label = 0, fixed_volume_space = True) test_dataset_loader = torch.utils.data.DataLoader(dataset = test_dataset, batch_size = test_batch_size, collate_fn = collate_fn_BEV_test, shuffle = False, num_workers = 4) val_dataset_loader = torch.utils.data.DataLoader(dataset = val_dataset, batch_size = test_batch_size, collate_fn = collate_fn_BEV, shuffle = False, num_workers = 4) # validation print('*'*80) print('Test network performance on validation split') print('*'*80) pbar = tqdm(total=len(val_dataset_loader)) my_model.eval() hist_list = [] time_list = [] with torch.no_grad(): for i_iter_val,(_,val_vox_label,val_grid,val_pt_labs,val_pt_fea) in enumerate(val_dataset_loader): val_vox_label = SemKITTI2train(val_vox_label) val_pt_labs = SemKITTI2train(val_pt_labs) val_pt_fea_ten = [torch.from_numpy(i).type(torch.FloatTensor).to(pytorch_device) for i in val_pt_fea] val_grid_ten = [torch.from_numpy(i[:,:2]).to(pytorch_device) for i in val_grid] val_label_tensor=val_vox_label.type(torch.LongTensor).to(pytorch_device) torch.cuda.synchronize() start_time = time.time() predict_labels = my_model(val_pt_fea_ten, val_grid_ten) torch.cuda.synchronize() time_list.append(time.time()-start_time) predict_labels = torch.argmax(predict_labels,dim=1) predict_labels = predict_labels.cpu().detach().numpy() for count,i_val_grid in enumerate(val_grid): hist_list.append(fast_hist_crop(predict_labels[count,val_grid[count][:,0],val_grid[count][:,1],val_grid[count][:,2]],val_pt_labs[count],unique_label)) pbar.update(1) iou = per_class_iu(sum(hist_list)) print('Validation per class iou: ') for class_name, class_iou in zip(unique_label_str,iou): print('%s : %.2f%%' % (class_name, class_iou*100)) val_miou = np.nanmean(iou) * 100 del val_vox_label,val_grid,val_pt_fea,val_grid_ten pbar.close() print('Current val miou is %.3f ' % val_miou) print('Inference time per %d is %.4f seconds\n' % (test_batch_size,np.mean(time_list))) # test print('*'*80) print('Generate predictions for test split') print('*'*80) pbar = tqdm(total=len(test_dataset_loader)) for i_iter_test,(_,_,test_grid,_,test_pt_fea,test_index) in enumerate(test_dataset_loader): # predict test_pt_fea_ten = [torch.from_numpy(i).type(torch.FloatTensor).to(pytorch_device) for i in test_pt_fea] test_grid_ten = [torch.from_numpy(i[:,:2]).to(pytorch_device) for i in test_grid] predict_labels = my_model(test_pt_fea_ten,test_grid_ten) predict_labels = torch.argmax(predict_labels,1) predict_labels = predict_labels.cpu().detach().numpy() # write to label file for count,i_test_grid in enumerate(test_grid): test_pred_label = predict_labels[count,test_grid[count][:,0],test_grid[count][:,1],test_grid[count][:,2]] test_pred_label = train2SemKITTI(test_pred_label) test_pred_label = np.expand_dims(test_pred_label,axis=1) save_dir = test_pt_dataset.im_idx[test_index[count]] _,dir2 = save_dir.split('/sequences/',1) new_save_dir = output_path + '/sequences/' +dir2.replace('velodyne','predictions')[:-3]+'label' if not os.path.exists(os.path.dirname(new_save_dir)): try: os.makedirs(os.path.dirname(new_save_dir)) except OSError as exc: if exc.errno != errno.EEXIST: raise test_pred_label = test_pred_label.astype(np.uint32) test_pred_label.tofile(new_save_dir) pbar.update(1) del test_grid,test_pt_fea,test_index pbar.close() print('Predicted test labels are saved in %s. Need to be shifted to original label format before submitting to the Competition website.' % output_path) print('Remap script can be found in semantic-kitti-api.') if __name__ == '__main__': # Testing settings parser = argparse.ArgumentParser(description='') parser.add_argument('-d', '--data_dir', default='data') parser.add_argument('-p', '--model_save_path', default='pretained_weight/SemKITTI_PolarSeg.pt') parser.add_argument('-o', '--test_output_path', default='out/SemKITTI_test') parser.add_argument('-m', '--model', choices=['polar','traditional'], default='polar', help='training model: polar or traditional (default: polar)') parser.add_argument('-s', '--grid_size', nargs='+', type=int, default = [480,360,32], help='grid size of BEV representation (default: [480,360,32])') parser.add_argument('--test_batch_size', type=int, default=1, help='batch size for training (default: 1)') args = parser.parse_args() if not len(args.grid_size) == 3: raise Exception('Invalid grid size! Grid size should have 3 dimensions.') print(' '.join(sys.argv)) print(args) main(args)
46.884817
166
0.656505
import os import time import argparse import sys import numpy as np import torch import torch.optim as optim from tqdm import tqdm from network.BEV_Unet import BEV_Unet from network.ptBEV import ptBEVnet from dataloader.dataset import collate_fn_BEV,collate_fn_BEV_test,SemKITTI,SemKITTI_label_name,spherical_dataset,voxel_dataset import warnings warnings.filterwarnings("ignore") def fast_hist(pred, label, n): k = (label >= 0) & (label < n) bin_count=np.bincount( n * label[k].astype(int) + pred[k], minlength=n ** 2) return bin_count[:n ** 2].reshape(n, n) def per_class_iu(hist): return np.diag(hist) / (hist.sum(1) + hist.sum(0) - np.diag(hist)) def fast_hist_crop(output, target, unique_label): hist = fast_hist(output.flatten(), target.flatten(), np.max(unique_label)+1) hist=hist[unique_label,:] hist=hist[:,unique_label] return hist def SemKITTI2train(label): if isinstance(label, list): return [SemKITTI2train_single(a) for a in label] else: return SemKITTI2train_single(label) def SemKITTI2train_single(label): remove_ind = label == 0 label -= 1 label[remove_ind] = 255 return label def train2SemKITTI(input_label): new_labels=np.copy(input_label) new_labels[input_label==255]=0 for label_num in range(0,19): new_labels[input_label==label_num]=label_num+1 return new_labels def main(args): data_path = args.data_dir test_batch_size = args.test_batch_size model_save_path = args.model_save_path output_path = args.test_output_path compression_model = args.grid_size[2] grid_size = args.grid_size pytorch_device = torch.device('cuda:0') model = args.model if model == 'polar': fea_dim = 9 circular_padding = True elif model == 'traditional': fea_dim = 7 circular_padding = False unique_label=np.asarray(sorted(list(SemKITTI_label_name.keys())))[1:] - 1 unique_label_str=[SemKITTI_label_name[x] for x in unique_label+1] my_BEV_model=BEV_Unet(n_class=len(unique_label), n_height = compression_model, input_batch_norm = True, dropout = 0.5, circular_padding = circular_padding) my_model = ptBEVnet(my_BEV_model, pt_model = 'pointnet', grid_size = grid_size, fea_dim = fea_dim, max_pt_per_encode = 256, out_pt_fea_dim = 512, kernal_size = 1, pt_selection = 'random', fea_compre = compression_model) if os.path.exists(model_save_path): my_model.load_state_dict(torch.load(model_save_path)) my_model.to(pytorch_device) test_pt_dataset = SemKITTI(data_path + '/sequences/', imageset = 'test', return_ref = True) val_pt_dataset = SemKITTI(data_path + '/sequences/', imageset = 'val', return_ref = True) if model == 'polar': test_dataset=spherical_dataset(test_pt_dataset, grid_size = grid_size, ignore_label = 0, fixed_volume_space = True, return_test= True) val_dataset=spherical_dataset(val_pt_dataset, grid_size = grid_size, ignore_label = 0, fixed_volume_space = True) elif model == 'traditional': test_dataset=voxel_dataset(test_pt_dataset, grid_size = grid_size, ignore_label = 0, fixed_volume_space = True, return_test= True) val_dataset=voxel_dataset(val_pt_dataset, grid_size = grid_size, ignore_label = 0, fixed_volume_space = True) test_dataset_loader = torch.utils.data.DataLoader(dataset = test_dataset, batch_size = test_batch_size, collate_fn = collate_fn_BEV_test, shuffle = False, num_workers = 4) val_dataset_loader = torch.utils.data.DataLoader(dataset = val_dataset, batch_size = test_batch_size, collate_fn = collate_fn_BEV, shuffle = False, num_workers = 4) print('*'*80) print('Test network performance on validation split') print('*'*80) pbar = tqdm(total=len(val_dataset_loader)) my_model.eval() hist_list = [] time_list = [] with torch.no_grad(): for i_iter_val,(_,val_vox_label,val_grid,val_pt_labs,val_pt_fea) in enumerate(val_dataset_loader): val_vox_label = SemKITTI2train(val_vox_label) val_pt_labs = SemKITTI2train(val_pt_labs) val_pt_fea_ten = [torch.from_numpy(i).type(torch.FloatTensor).to(pytorch_device) for i in val_pt_fea] val_grid_ten = [torch.from_numpy(i[:,:2]).to(pytorch_device) for i in val_grid] val_label_tensor=val_vox_label.type(torch.LongTensor).to(pytorch_device) torch.cuda.synchronize() start_time = time.time() predict_labels = my_model(val_pt_fea_ten, val_grid_ten) torch.cuda.synchronize() time_list.append(time.time()-start_time) predict_labels = torch.argmax(predict_labels,dim=1) predict_labels = predict_labels.cpu().detach().numpy() for count,i_val_grid in enumerate(val_grid): hist_list.append(fast_hist_crop(predict_labels[count,val_grid[count][:,0],val_grid[count][:,1],val_grid[count][:,2]],val_pt_labs[count],unique_label)) pbar.update(1) iou = per_class_iu(sum(hist_list)) print('Validation per class iou: ') for class_name, class_iou in zip(unique_label_str,iou): print('%s : %.2f%%' % (class_name, class_iou*100)) val_miou = np.nanmean(iou) * 100 del val_vox_label,val_grid,val_pt_fea,val_grid_ten pbar.close() print('Current val miou is %.3f ' % val_miou) print('Inference time per %d is %.4f seconds\n' % (test_batch_size,np.mean(time_list))) print('*'*80) print('Generate predictions for test split') print('*'*80) pbar = tqdm(total=len(test_dataset_loader)) for i_iter_test,(_,_,test_grid,_,test_pt_fea,test_index) in enumerate(test_dataset_loader): test_pt_fea_ten = [torch.from_numpy(i).type(torch.FloatTensor).to(pytorch_device) for i in test_pt_fea] test_grid_ten = [torch.from_numpy(i[:,:2]).to(pytorch_device) for i in test_grid] predict_labels = my_model(test_pt_fea_ten,test_grid_ten) predict_labels = torch.argmax(predict_labels,1) predict_labels = predict_labels.cpu().detach().numpy() for count,i_test_grid in enumerate(test_grid): test_pred_label = predict_labels[count,test_grid[count][:,0],test_grid[count][:,1],test_grid[count][:,2]] test_pred_label = train2SemKITTI(test_pred_label) test_pred_label = np.expand_dims(test_pred_label,axis=1) save_dir = test_pt_dataset.im_idx[test_index[count]] _,dir2 = save_dir.split('/sequences/',1) new_save_dir = output_path + '/sequences/' +dir2.replace('velodyne','predictions')[:-3]+'label' if not os.path.exists(os.path.dirname(new_save_dir)): try: os.makedirs(os.path.dirname(new_save_dir)) except OSError as exc: if exc.errno != errno.EEXIST: raise test_pred_label = test_pred_label.astype(np.uint32) test_pred_label.tofile(new_save_dir) pbar.update(1) del test_grid,test_pt_fea,test_index pbar.close() print('Predicted test labels are saved in %s. Need to be shifted to original label format before submitting to the Competition website.' % output_path) print('Remap script can be found in semantic-kitti-api.') if __name__ == '__main__': parser = argparse.ArgumentParser(description='') parser.add_argument('-d', '--data_dir', default='data') parser.add_argument('-p', '--model_save_path', default='pretained_weight/SemKITTI_PolarSeg.pt') parser.add_argument('-o', '--test_output_path', default='out/SemKITTI_test') parser.add_argument('-m', '--model', choices=['polar','traditional'], default='polar', help='training model: polar or traditional (default: polar)') parser.add_argument('-s', '--grid_size', nargs='+', type=int, default = [480,360,32], help='grid size of BEV representation (default: [480,360,32])') parser.add_argument('--test_batch_size', type=int, default=1, help='batch size for training (default: 1)') args = parser.parse_args() if not len(args.grid_size) == 3: raise Exception('Invalid grid size! Grid size should have 3 dimensions.') print(' '.join(sys.argv)) print(args) main(args)
true
true
f71cd34ea15347fc0a22c67ddefbd517bbd2ed66
101
py
Python
src/ytdl/playlists/models/__init__.py
Asday/ytdl
96a51ba3589e855b27f75095b0cd4a6f00f8eefa
[ "MIT" ]
null
null
null
src/ytdl/playlists/models/__init__.py
Asday/ytdl
96a51ba3589e855b27f75095b0cd4a6f00f8eefa
[ "MIT" ]
1
2019-04-15T02:09:37.000Z
2019-04-15T02:09:37.000Z
src/ytdl/playlists/models/__init__.py
Asday/ytdl
96a51ba3589e855b27f75095b0cd4a6f00f8eefa
[ "MIT" ]
null
null
null
from .playlist import Playlist from .video import Video __all__ = [ 'Playlist', 'Video', ]
11.222222
30
0.653465
from .playlist import Playlist from .video import Video __all__ = [ 'Playlist', 'Video', ]
true
true
f71cd35b8a5ccd91aeb1a0d8c570e794aa9da303
403
py
Python
LeetCode/0069 _ Sqrt(x).py
Achyut-sudo/PythonAlgorithms
21fb6522510fde7a0877b19a8cedd4665938a4df
[ "MIT" ]
144
2020-09-13T22:54:57.000Z
2022-02-24T21:54:25.000Z
LeetCode/0069 _ Sqrt(x).py
Achyut-sudo/PythonAlgorithms
21fb6522510fde7a0877b19a8cedd4665938a4df
[ "MIT" ]
587
2020-05-06T18:55:07.000Z
2021-09-20T13:14:53.000Z
LeetCode/0069 _ Sqrt(x).py
Achyut-sudo/PythonAlgorithms
21fb6522510fde7a0877b19a8cedd4665938a4df
[ "MIT" ]
523
2020-09-09T12:07:13.000Z
2022-02-24T21:54:31.000Z
class Solution(object): def mySqrt(self, x): if x<2: return x low = 0 high = x result=0 while(low<=high): mid = (low+high)//2 if(mid*mid==x): return mid elif(mid*mid<x): low = mid+1 result = mid else: high = mid-1 return result
23.705882
31
0.377171
class Solution(object): def mySqrt(self, x): if x<2: return x low = 0 high = x result=0 while(low<=high): mid = (low+high)//2 if(mid*mid==x): return mid elif(mid*mid<x): low = mid+1 result = mid else: high = mid-1 return result
true
true
f71cd3983554ca85caddf54de589eb916e0b3596
2,924
py
Python
nova/virt/baremetal/vif_driver.py
SnabbCo/nova
d156d7fdf241569da2c27ae02ec88e6ef448f7e2
[ "Apache-2.0" ]
2
2016-04-19T08:20:39.000Z
2021-10-03T16:00:37.000Z
nova/virt/baremetal/vif_driver.py
SnabbCo/nova
d156d7fdf241569da2c27ae02ec88e6ef448f7e2
[ "Apache-2.0" ]
9
2015-05-20T11:20:17.000Z
2017-07-27T08:21:33.000Z
nova/virt/baremetal/vif_driver.py
SnabbCo/nova
d156d7fdf241569da2c27ae02ec88e6ef448f7e2
[ "Apache-2.0" ]
13
2015-05-05T09:34:04.000Z
2017-11-08T02:03:46.000Z
# Copyright (c) 2012 NTT DOCOMO, INC. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo.config import cfg from nova import context from nova import exception from nova.i18n import _ from nova.openstack.common import log as logging from nova.virt.baremetal import db as bmdb CONF = cfg.CONF LOG = logging.getLogger(__name__) class BareMetalVIFDriver(object): def _after_plug(self, instance, vif, pif): pass def _after_unplug(self, instance, vif, pif): pass def plug(self, instance, vif): LOG.debug("plug: instance_uuid=%(uuid)s vif=%(vif)s", {'uuid': instance['uuid'], 'vif': vif}) vif_uuid = vif['id'] ctx = context.get_admin_context() node = bmdb.bm_node_get_by_instance_uuid(ctx, instance['uuid']) # TODO(deva): optimize this database query # this is just searching for a free physical interface pifs = bmdb.bm_interface_get_all_by_bm_node_id(ctx, node['id']) for pif in pifs: if not pif['vif_uuid']: bmdb.bm_interface_set_vif_uuid(ctx, pif['id'], vif_uuid) LOG.debug("pif:%(id)s is plugged (vif_uuid=%(vif_uuid)s)", {'id': pif['id'], 'vif_uuid': vif_uuid}) self._after_plug(instance, vif, pif) return # NOTE(deva): should this really be raising an exception # when there are no physical interfaces left? raise exception.NovaException(_( "Baremetal node: %(id)s has no available physical interface" " for virtual interface %(vif_uuid)s") % {'id': node['id'], 'vif_uuid': vif_uuid}) def unplug(self, instance, vif): LOG.debug("unplug: instance_uuid=%(uuid)s vif=%(vif)s", {'uuid': instance['uuid'], 'vif': vif}) vif_uuid = vif['id'] ctx = context.get_admin_context() try: pif = bmdb.bm_interface_get_by_vif_uuid(ctx, vif_uuid) bmdb.bm_interface_set_vif_uuid(ctx, pif['id'], None) LOG.debug("pif:%(id)s is unplugged (vif_uuid=%(vif_uuid)s)", {'id': pif['id'], 'vif_uuid': vif_uuid}) self._after_unplug(instance, vif, pif) except exception.NovaException: LOG.warn(_("no pif for vif_uuid=%s") % vif_uuid)
38.986667
78
0.619699
from oslo.config import cfg from nova import context from nova import exception from nova.i18n import _ from nova.openstack.common import log as logging from nova.virt.baremetal import db as bmdb CONF = cfg.CONF LOG = logging.getLogger(__name__) class BareMetalVIFDriver(object): def _after_plug(self, instance, vif, pif): pass def _after_unplug(self, instance, vif, pif): pass def plug(self, instance, vif): LOG.debug("plug: instance_uuid=%(uuid)s vif=%(vif)s", {'uuid': instance['uuid'], 'vif': vif}) vif_uuid = vif['id'] ctx = context.get_admin_context() node = bmdb.bm_node_get_by_instance_uuid(ctx, instance['uuid']) pifs = bmdb.bm_interface_get_all_by_bm_node_id(ctx, node['id']) for pif in pifs: if not pif['vif_uuid']: bmdb.bm_interface_set_vif_uuid(ctx, pif['id'], vif_uuid) LOG.debug("pif:%(id)s is plugged (vif_uuid=%(vif_uuid)s)", {'id': pif['id'], 'vif_uuid': vif_uuid}) self._after_plug(instance, vif, pif) return raise exception.NovaException(_( "Baremetal node: %(id)s has no available physical interface" " for virtual interface %(vif_uuid)s") % {'id': node['id'], 'vif_uuid': vif_uuid}) def unplug(self, instance, vif): LOG.debug("unplug: instance_uuid=%(uuid)s vif=%(vif)s", {'uuid': instance['uuid'], 'vif': vif}) vif_uuid = vif['id'] ctx = context.get_admin_context() try: pif = bmdb.bm_interface_get_by_vif_uuid(ctx, vif_uuid) bmdb.bm_interface_set_vif_uuid(ctx, pif['id'], None) LOG.debug("pif:%(id)s is unplugged (vif_uuid=%(vif_uuid)s)", {'id': pif['id'], 'vif_uuid': vif_uuid}) self._after_unplug(instance, vif, pif) except exception.NovaException: LOG.warn(_("no pif for vif_uuid=%s") % vif_uuid)
true
true
f71cd40457b398174b6dfb10bb74cacb3e7ed2d5
736
py
Python
Tutorials/Intro_To_NN/NNDL-solutions/code/chap6p2/exec_shifted_2sig.py
lev1khachatryan/ASDS_CV
c9f0c0412002e929bcb7cc2fc6e5392977a9fa76
[ "MIT" ]
5
2019-12-13T16:26:10.000Z
2020-01-10T07:44:05.000Z
Tutorials/Intro_To_NN/NNDL-solutions/code/chap6p2/exec_shifted_2sig.py
lev1khachatryan/ASDS_CV
c9f0c0412002e929bcb7cc2fc6e5392977a9fa76
[ "MIT" ]
1
2020-01-07T16:48:21.000Z
2020-03-18T18:43:37.000Z
Tutorials/Intro_To_NN/NNDL-solutions/code/chap6p2/exec_shifted_2sig.py
lev1khachatryan/ASDS_CV
c9f0c0412002e929bcb7cc2fc6e5392977a9fa76
[ "MIT" ]
null
null
null
import network3 from network3 import Network from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer training_data, validation_data, test_data = network3.load_data_shared() mini_batch_size = 10 net = Network([ ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28), filter_shape=(20, 1, 5, 5), poolsize=(2, 2), activation_fn=network3.shifted_2sig), FullyConnectedLayer(n_in=20*12*12, n_out=100, activation_fn=network3.shifted_2sig), SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size) net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)
46
71
0.633152
import network3 from network3 import Network from network3 import ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer training_data, validation_data, test_data = network3.load_data_shared() mini_batch_size = 10 net = Network([ ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28), filter_shape=(20, 1, 5, 5), poolsize=(2, 2), activation_fn=network3.shifted_2sig), FullyConnectedLayer(n_in=20*12*12, n_out=100, activation_fn=network3.shifted_2sig), SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size) net.SGD(training_data, 60, mini_batch_size, 0.1, validation_data, test_data)
true
true
f71cd4211d3578ba41ce735db4be9c8ac9129dc8
83
py
Python
app/gis/__init__.py
athkishore/vgr
48cc04eb9136e7f1d0753173188c9ddbc730f1cb
[ "MIT" ]
null
null
null
app/gis/__init__.py
athkishore/vgr
48cc04eb9136e7f1d0753173188c9ddbc730f1cb
[ "MIT" ]
3
2016-02-15T02:36:05.000Z
2016-02-18T06:07:15.000Z
app/gis/__init__.py
athkishore/vgr
48cc04eb9136e7f1d0753173188c9ddbc730f1cb
[ "MIT" ]
null
null
null
from flask import Blueprint gis = Blueprint('gis', __name__) from . import views
13.833333
32
0.746988
from flask import Blueprint gis = Blueprint('gis', __name__) from . import views
true
true
f71cd48b6dfbc3793e7303d63dfae53f7d22f018
19,977
py
Python
pymongo/encryption.py
anryko/mongo-python-driver
eda4fbb1591bd88d58d5bd3452f82ed656e95b1c
[ "Apache-2.0" ]
4
2020-04-25T16:53:58.000Z
2020-04-30T20:43:06.000Z
pymongo/encryption.py
anryko/mongo-python-driver
eda4fbb1591bd88d58d5bd3452f82ed656e95b1c
[ "Apache-2.0" ]
30
2020-04-15T19:37:40.000Z
2020-04-22T21:19:35.000Z
pymongo/encryption.py
anryko/mongo-python-driver
eda4fbb1591bd88d58d5bd3452f82ed656e95b1c
[ "Apache-2.0" ]
2
2020-03-12T23:20:22.000Z
2021-02-15T21:54:02.000Z
# Copyright 2019-present MongoDB, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Support for explicit client-side field level encryption.""" import contextlib import os import subprocess import uuid import weakref try: from pymongocrypt.auto_encrypter import AutoEncrypter from pymongocrypt.errors import MongoCryptError from pymongocrypt.explicit_encrypter import ExplicitEncrypter from pymongocrypt.mongocrypt import MongoCryptOptions from pymongocrypt.state_machine import MongoCryptCallback _HAVE_PYMONGOCRYPT = True except ImportError: _HAVE_PYMONGOCRYPT = False MongoCryptCallback = object from bson import _dict_to_bson, decode, encode from bson.codec_options import CodecOptions from bson.binary import (Binary, STANDARD, UUID_SUBTYPE) from bson.errors import BSONError from bson.raw_bson import (DEFAULT_RAW_BSON_OPTIONS, RawBSONDocument, _inflate_bson) from bson.son import SON from pymongo.errors import (ConfigurationError, EncryptionError, InvalidOperation, ServerSelectionTimeoutError) from pymongo.mongo_client import MongoClient from pymongo.pool import _configured_socket, PoolOptions from pymongo.read_concern import ReadConcern from pymongo.ssl_support import get_ssl_context from pymongo.uri_parser import parse_host from pymongo.write_concern import WriteConcern from pymongo.daemon import _spawn_daemon _HTTPS_PORT = 443 _KMS_CONNECT_TIMEOUT = 10 # TODO: CDRIVER-3262 will define this value. _MONGOCRYPTD_TIMEOUT_MS = 1000 _DATA_KEY_OPTS = CodecOptions(document_class=SON, uuid_representation=STANDARD) # Use RawBSONDocument codec options to avoid needlessly decoding # documents from the key vault. _KEY_VAULT_OPTS = CodecOptions(document_class=RawBSONDocument, uuid_representation=STANDARD) @contextlib.contextmanager def _wrap_encryption_errors(): """Context manager to wrap encryption related errors.""" try: yield except BSONError: # BSON encoding/decoding errors are unrelated to encryption so # we should propagate them unchanged. raise except Exception as exc: raise EncryptionError(exc) class _EncryptionIO(MongoCryptCallback): def __init__(self, client, key_vault_coll, mongocryptd_client, opts): """Internal class to perform I/O on behalf of pymongocrypt.""" # Use a weak ref to break reference cycle. if client is not None: self.client_ref = weakref.ref(client) else: self.client_ref = None self.key_vault_coll = key_vault_coll.with_options( codec_options=_KEY_VAULT_OPTS, read_concern=ReadConcern(level='majority'), write_concern=WriteConcern(w='majority')) self.mongocryptd_client = mongocryptd_client self.opts = opts self._spawned = False def kms_request(self, kms_context): """Complete a KMS request. :Parameters: - `kms_context`: A :class:`MongoCryptKmsContext`. :Returns: None """ endpoint = kms_context.endpoint message = kms_context.message host, port = parse_host(endpoint, _HTTPS_PORT) ctx = get_ssl_context(None, None, None, None, None, None, True) opts = PoolOptions(connect_timeout=_KMS_CONNECT_TIMEOUT, socket_timeout=_KMS_CONNECT_TIMEOUT, ssl_context=ctx) conn = _configured_socket((host, port), opts) try: conn.sendall(message) while kms_context.bytes_needed > 0: data = conn.recv(kms_context.bytes_needed) kms_context.feed(data) finally: conn.close() def collection_info(self, database, filter): """Get the collection info for a namespace. The returned collection info is passed to libmongocrypt which reads the JSON schema. :Parameters: - `database`: The database on which to run listCollections. - `filter`: The filter to pass to listCollections. :Returns: The first document from the listCollections command response as BSON. """ with self.client_ref()[database].list_collections( filter=RawBSONDocument(filter)) as cursor: for doc in cursor: return _dict_to_bson(doc, False, _DATA_KEY_OPTS) def spawn(self): """Spawn mongocryptd. Note this method is thread safe; at most one mongocryptd will start successfully. """ self._spawned = True args = [self.opts._mongocryptd_spawn_path or 'mongocryptd'] args.extend(self.opts._mongocryptd_spawn_args) _spawn_daemon(args) def mark_command(self, database, cmd): """Mark a command for encryption. :Parameters: - `database`: The database on which to run this command. - `cmd`: The BSON command to run. :Returns: The marked command response from mongocryptd. """ if not self._spawned and not self.opts._mongocryptd_bypass_spawn: self.spawn() # Database.command only supports mutable mappings so we need to decode # the raw BSON command first. inflated_cmd = _inflate_bson(cmd, DEFAULT_RAW_BSON_OPTIONS) try: res = self.mongocryptd_client[database].command( inflated_cmd, codec_options=DEFAULT_RAW_BSON_OPTIONS) except ServerSelectionTimeoutError: if self.opts._mongocryptd_bypass_spawn: raise self.spawn() res = self.mongocryptd_client[database].command( inflated_cmd, codec_options=DEFAULT_RAW_BSON_OPTIONS) return res.raw def fetch_keys(self, filter): """Yields one or more keys from the key vault. :Parameters: - `filter`: The filter to pass to find. :Returns: A generator which yields the requested keys from the key vault. """ with self.key_vault_coll.find(RawBSONDocument(filter)) as cursor: for key in cursor: yield key.raw def insert_data_key(self, data_key): """Insert a data key into the key vault. :Parameters: - `data_key`: The data key document to insert. :Returns: The _id of the inserted data key document. """ raw_doc = RawBSONDocument(data_key) data_key_id = raw_doc.get('_id') if not isinstance(data_key_id, uuid.UUID): raise TypeError('data_key _id must be a UUID') self.key_vault_coll.insert_one(raw_doc) return Binary(data_key_id.bytes, subtype=UUID_SUBTYPE) def bson_encode(self, doc): """Encode a document to BSON. A document can be any mapping type (like :class:`dict`). :Parameters: - `doc`: mapping type representing a document :Returns: The encoded BSON bytes. """ return encode(doc) def close(self): """Release resources. Note it is not safe to call this method from __del__ or any GC hooks. """ self.client_ref = None self.key_vault_coll = None if self.mongocryptd_client: self.mongocryptd_client.close() self.mongocryptd_client = None class _Encrypter(object): def __init__(self, io_callbacks, opts): """Encrypts and decrypts MongoDB commands. This class is used to support automatic encryption and decryption of MongoDB commands. :Parameters: - `io_callbacks`: A :class:`MongoCryptCallback`. - `opts`: The encrypted client's :class:`AutoEncryptionOpts`. """ if opts._schema_map is None: schema_map = None else: schema_map = _dict_to_bson(opts._schema_map, False, _DATA_KEY_OPTS) self._auto_encrypter = AutoEncrypter(io_callbacks, MongoCryptOptions( opts._kms_providers, schema_map)) self._bypass_auto_encryption = opts._bypass_auto_encryption self._closed = False def encrypt(self, database, cmd, check_keys, codec_options): """Encrypt a MongoDB command. :Parameters: - `database`: The database for this command. - `cmd`: A command document. - `check_keys`: If True, check `cmd` for invalid keys. - `codec_options`: The CodecOptions to use while encoding `cmd`. :Returns: The encrypted command to execute. """ self._check_closed() # Workaround for $clusterTime which is incompatible with # check_keys. cluster_time = check_keys and cmd.pop('$clusterTime', None) encoded_cmd = _dict_to_bson(cmd, check_keys, codec_options) with _wrap_encryption_errors(): encrypted_cmd = self._auto_encrypter.encrypt(database, encoded_cmd) # TODO: PYTHON-1922 avoid decoding the encrypted_cmd. encrypt_cmd = _inflate_bson( encrypted_cmd, DEFAULT_RAW_BSON_OPTIONS) if cluster_time: encrypt_cmd['$clusterTime'] = cluster_time return encrypt_cmd def decrypt(self, response): """Decrypt a MongoDB command response. :Parameters: - `response`: A MongoDB command response as BSON. :Returns: The decrypted command response. """ self._check_closed() with _wrap_encryption_errors(): return self._auto_encrypter.decrypt(response) def _check_closed(self): if self._closed: raise InvalidOperation("Cannot use MongoClient after close") def close(self): """Cleanup resources.""" self._closed = True self._auto_encrypter.close() @staticmethod def create(client, opts): """Create a _CommandEncyptor for a client. :Parameters: - `client`: The encrypted MongoClient. - `opts`: The encrypted client's :class:`AutoEncryptionOpts`. :Returns: A :class:`_CommandEncrypter` for this client. """ key_vault_client = opts._key_vault_client or client db, coll = opts._key_vault_namespace.split('.', 1) key_vault_coll = key_vault_client[db][coll] mongocryptd_client = MongoClient( opts._mongocryptd_uri, connect=False, serverSelectionTimeoutMS=_MONGOCRYPTD_TIMEOUT_MS) io_callbacks = _EncryptionIO( client, key_vault_coll, mongocryptd_client, opts) return _Encrypter(io_callbacks, opts) class Algorithm(object): """An enum that defines the supported encryption algorithms.""" AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = ( "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") AEAD_AES_256_CBC_HMAC_SHA_512_Random = ( "AEAD_AES_256_CBC_HMAC_SHA_512-Random") class ClientEncryption(object): """Explicit client-side field level encryption.""" def __init__(self, kms_providers, key_vault_namespace, key_vault_client, codec_options): """Explicit client-side field level encryption. The ClientEncryption class encapsulates explicit operations on a key vault collection that cannot be done directly on a MongoClient. Similar to configuring auto encryption on a MongoClient, it is constructed with a MongoClient (to a MongoDB cluster containing the key vault collection), KMS provider configuration, and keyVaultNamespace. It provides an API for explicitly encrypting and decrypting values, and creating data keys. It does not provide an API to query keys from the key vault collection, as this can be done directly on the MongoClient. See :ref:`explicit-client-side-encryption` for an example. :Parameters: - `kms_providers`: Map of KMS provider options. Two KMS providers are supported: "aws" and "local". The kmsProviders map values differ by provider: - `aws`: Map with "accessKeyId" and "secretAccessKey" as strings. These are the AWS access key ID and AWS secret access key used to generate KMS messages. - `local`: Map with "key" as a 96-byte array or string. "key" is the master key used to encrypt/decrypt data keys. This key should be generated and stored as securely as possible. - `key_vault_namespace`: The namespace for the key vault collection. The key vault collection contains all data keys used for encryption and decryption. Data keys are stored as documents in this MongoDB collection. Data keys are protected with encryption by a KMS provider. - `key_vault_client`: A MongoClient connected to a MongoDB cluster containing the `key_vault_namespace` collection. - `codec_options`: An instance of :class:`~bson.codec_options.CodecOptions` to use when encoding a value for encryption and decoding the decrypted BSON value. This should be the same CodecOptions instance configured on the MongoClient, Database, or Collection used to access application data. .. versionadded:: 3.9 """ if not _HAVE_PYMONGOCRYPT: raise ConfigurationError( "client-side field level encryption requires the pymongocrypt " "library: install a compatible version with: " "python -m pip install 'pymongo[encryption]'") if not isinstance(codec_options, CodecOptions): raise TypeError("codec_options must be an instance of " "bson.codec_options.CodecOptions") self._kms_providers = kms_providers self._key_vault_namespace = key_vault_namespace self._key_vault_client = key_vault_client self._codec_options = codec_options db, coll = key_vault_namespace.split('.', 1) key_vault_coll = key_vault_client[db][coll] self._io_callbacks = _EncryptionIO(None, key_vault_coll, None, None) self._encryption = ExplicitEncrypter( self._io_callbacks, MongoCryptOptions(kms_providers, None)) def create_data_key(self, kms_provider, master_key=None, key_alt_names=None): """Create and insert a new data key into the key vault collection. :Parameters: - `kms_provider`: The KMS provider to use. Supported values are "aws" and "local". - `master_key`: Identifies a KMS-specific key used to encrypt the new data key. If the kmsProvider is "local" the `master_key` is not applicable and may be omitted. If the `kms_provider` is "aws" it is required and has the following fields:: - `region` (string): Required. The AWS region, e.g. "us-east-1". - `key` (string): Required. The Amazon Resource Name (ARN) to the AWS customer. - `endpoint` (string): Optional. An alternate host to send KMS requests to. May include port number, e.g. "kms.us-east-1.amazonaws.com:443". - `key_alt_names` (optional): An optional list of string alternate names used to reference a key. If a key is created with alternate names, then encryption may refer to the key by the unique alternate name instead of by ``key_id``. The following example shows creating and referring to a data key by alternate name:: client_encryption.create_data_key("local", keyAltNames=["name1"]) # reference the key with the alternate name client_encryption.encrypt("457-55-5462", keyAltName="name1", algorithm=Algorithm.Random) :Returns: The ``_id`` of the created data key document as a :class:`~bson.binary.Binary` with subtype :data:`~bson.binary.UUID_SUBTYPE`. """ self._check_closed() with _wrap_encryption_errors(): return self._encryption.create_data_key( kms_provider, master_key=master_key, key_alt_names=key_alt_names) def encrypt(self, value, algorithm, key_id=None, key_alt_name=None): """Encrypt a BSON value with a given key and algorithm. Note that exactly one of ``key_id`` or ``key_alt_name`` must be provided. :Parameters: - `value`: The BSON value to encrypt. - `algorithm` (string): The encryption algorithm to use. See :class:`Algorithm` for some valid options. - `key_id`: Identifies a data key by ``_id`` which must be a :class:`~bson.binary.Binary` with subtype 4 ( :attr:`~bson.binary.UUID_SUBTYPE`). - `key_alt_name`: Identifies a key vault document by 'keyAltName'. :Returns: The encrypted value, a :class:`~bson.binary.Binary` with subtype 6. """ self._check_closed() if (key_id is not None and not ( isinstance(key_id, Binary) and key_id.subtype == UUID_SUBTYPE)): raise TypeError( 'key_id must be a bson.binary.Binary with subtype 4') doc = encode({'v': value}, codec_options=self._codec_options) with _wrap_encryption_errors(): encrypted_doc = self._encryption.encrypt( doc, algorithm, key_id=key_id, key_alt_name=key_alt_name) return decode(encrypted_doc)['v'] def decrypt(self, value): """Decrypt an encrypted value. :Parameters: - `value` (Binary): The encrypted value, a :class:`~bson.binary.Binary` with subtype 6. :Returns: The decrypted BSON value. """ self._check_closed() if not (isinstance(value, Binary) and value.subtype == 6): raise TypeError( 'value to decrypt must be a bson.binary.Binary with subtype 6') with _wrap_encryption_errors(): doc = encode({'v': value}) decrypted_doc = self._encryption.decrypt(doc) return decode(decrypted_doc, codec_options=self._codec_options)['v'] def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def _check_closed(self): if self._encryption is None: raise InvalidOperation("Cannot use closed ClientEncryption") def close(self): """Release resources. Note that using this class in a with-statement will automatically call :meth:`close`:: with ClientEncryption(...) as client_encryption: encrypted = client_encryption.encrypt(value, ...) decrypted = client_encryption.decrypt(encrypted) """ if self._io_callbacks: self._io_callbacks.close() self._encryption.close() self._io_callbacks = None self._encryption = None
38.270115
79
0.638634
import contextlib import os import subprocess import uuid import weakref try: from pymongocrypt.auto_encrypter import AutoEncrypter from pymongocrypt.errors import MongoCryptError from pymongocrypt.explicit_encrypter import ExplicitEncrypter from pymongocrypt.mongocrypt import MongoCryptOptions from pymongocrypt.state_machine import MongoCryptCallback _HAVE_PYMONGOCRYPT = True except ImportError: _HAVE_PYMONGOCRYPT = False MongoCryptCallback = object from bson import _dict_to_bson, decode, encode from bson.codec_options import CodecOptions from bson.binary import (Binary, STANDARD, UUID_SUBTYPE) from bson.errors import BSONError from bson.raw_bson import (DEFAULT_RAW_BSON_OPTIONS, RawBSONDocument, _inflate_bson) from bson.son import SON from pymongo.errors import (ConfigurationError, EncryptionError, InvalidOperation, ServerSelectionTimeoutError) from pymongo.mongo_client import MongoClient from pymongo.pool import _configured_socket, PoolOptions from pymongo.read_concern import ReadConcern from pymongo.ssl_support import get_ssl_context from pymongo.uri_parser import parse_host from pymongo.write_concern import WriteConcern from pymongo.daemon import _spawn_daemon _HTTPS_PORT = 443 _KMS_CONNECT_TIMEOUT = 10 _MONGOCRYPTD_TIMEOUT_MS = 1000 _DATA_KEY_OPTS = CodecOptions(document_class=SON, uuid_representation=STANDARD) _KEY_VAULT_OPTS = CodecOptions(document_class=RawBSONDocument, uuid_representation=STANDARD) @contextlib.contextmanager def _wrap_encryption_errors(): try: yield except BSONError: raise except Exception as exc: raise EncryptionError(exc) class _EncryptionIO(MongoCryptCallback): def __init__(self, client, key_vault_coll, mongocryptd_client, opts): if client is not None: self.client_ref = weakref.ref(client) else: self.client_ref = None self.key_vault_coll = key_vault_coll.with_options( codec_options=_KEY_VAULT_OPTS, read_concern=ReadConcern(level='majority'), write_concern=WriteConcern(w='majority')) self.mongocryptd_client = mongocryptd_client self.opts = opts self._spawned = False def kms_request(self, kms_context): endpoint = kms_context.endpoint message = kms_context.message host, port = parse_host(endpoint, _HTTPS_PORT) ctx = get_ssl_context(None, None, None, None, None, None, True) opts = PoolOptions(connect_timeout=_KMS_CONNECT_TIMEOUT, socket_timeout=_KMS_CONNECT_TIMEOUT, ssl_context=ctx) conn = _configured_socket((host, port), opts) try: conn.sendall(message) while kms_context.bytes_needed > 0: data = conn.recv(kms_context.bytes_needed) kms_context.feed(data) finally: conn.close() def collection_info(self, database, filter): with self.client_ref()[database].list_collections( filter=RawBSONDocument(filter)) as cursor: for doc in cursor: return _dict_to_bson(doc, False, _DATA_KEY_OPTS) def spawn(self): self._spawned = True args = [self.opts._mongocryptd_spawn_path or 'mongocryptd'] args.extend(self.opts._mongocryptd_spawn_args) _spawn_daemon(args) def mark_command(self, database, cmd): if not self._spawned and not self.opts._mongocryptd_bypass_spawn: self.spawn() inflated_cmd = _inflate_bson(cmd, DEFAULT_RAW_BSON_OPTIONS) try: res = self.mongocryptd_client[database].command( inflated_cmd, codec_options=DEFAULT_RAW_BSON_OPTIONS) except ServerSelectionTimeoutError: if self.opts._mongocryptd_bypass_spawn: raise self.spawn() res = self.mongocryptd_client[database].command( inflated_cmd, codec_options=DEFAULT_RAW_BSON_OPTIONS) return res.raw def fetch_keys(self, filter): with self.key_vault_coll.find(RawBSONDocument(filter)) as cursor: for key in cursor: yield key.raw def insert_data_key(self, data_key): raw_doc = RawBSONDocument(data_key) data_key_id = raw_doc.get('_id') if not isinstance(data_key_id, uuid.UUID): raise TypeError('data_key _id must be a UUID') self.key_vault_coll.insert_one(raw_doc) return Binary(data_key_id.bytes, subtype=UUID_SUBTYPE) def bson_encode(self, doc): return encode(doc) def close(self): self.client_ref = None self.key_vault_coll = None if self.mongocryptd_client: self.mongocryptd_client.close() self.mongocryptd_client = None class _Encrypter(object): def __init__(self, io_callbacks, opts): if opts._schema_map is None: schema_map = None else: schema_map = _dict_to_bson(opts._schema_map, False, _DATA_KEY_OPTS) self._auto_encrypter = AutoEncrypter(io_callbacks, MongoCryptOptions( opts._kms_providers, schema_map)) self._bypass_auto_encryption = opts._bypass_auto_encryption self._closed = False def encrypt(self, database, cmd, check_keys, codec_options): self._check_closed() cluster_time = check_keys and cmd.pop('$clusterTime', None) encoded_cmd = _dict_to_bson(cmd, check_keys, codec_options) with _wrap_encryption_errors(): encrypted_cmd = self._auto_encrypter.encrypt(database, encoded_cmd) encrypt_cmd = _inflate_bson( encrypted_cmd, DEFAULT_RAW_BSON_OPTIONS) if cluster_time: encrypt_cmd['$clusterTime'] = cluster_time return encrypt_cmd def decrypt(self, response): self._check_closed() with _wrap_encryption_errors(): return self._auto_encrypter.decrypt(response) def _check_closed(self): if self._closed: raise InvalidOperation("Cannot use MongoClient after close") def close(self): self._closed = True self._auto_encrypter.close() @staticmethod def create(client, opts): key_vault_client = opts._key_vault_client or client db, coll = opts._key_vault_namespace.split('.', 1) key_vault_coll = key_vault_client[db][coll] mongocryptd_client = MongoClient( opts._mongocryptd_uri, connect=False, serverSelectionTimeoutMS=_MONGOCRYPTD_TIMEOUT_MS) io_callbacks = _EncryptionIO( client, key_vault_coll, mongocryptd_client, opts) return _Encrypter(io_callbacks, opts) class Algorithm(object): AEAD_AES_256_CBC_HMAC_SHA_512_Deterministic = ( "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic") AEAD_AES_256_CBC_HMAC_SHA_512_Random = ( "AEAD_AES_256_CBC_HMAC_SHA_512-Random") class ClientEncryption(object): def __init__(self, kms_providers, key_vault_namespace, key_vault_client, codec_options): if not _HAVE_PYMONGOCRYPT: raise ConfigurationError( "client-side field level encryption requires the pymongocrypt " "library: install a compatible version with: " "python -m pip install 'pymongo[encryption]'") if not isinstance(codec_options, CodecOptions): raise TypeError("codec_options must be an instance of " "bson.codec_options.CodecOptions") self._kms_providers = kms_providers self._key_vault_namespace = key_vault_namespace self._key_vault_client = key_vault_client self._codec_options = codec_options db, coll = key_vault_namespace.split('.', 1) key_vault_coll = key_vault_client[db][coll] self._io_callbacks = _EncryptionIO(None, key_vault_coll, None, None) self._encryption = ExplicitEncrypter( self._io_callbacks, MongoCryptOptions(kms_providers, None)) def create_data_key(self, kms_provider, master_key=None, key_alt_names=None): self._check_closed() with _wrap_encryption_errors(): return self._encryption.create_data_key( kms_provider, master_key=master_key, key_alt_names=key_alt_names) def encrypt(self, value, algorithm, key_id=None, key_alt_name=None): self._check_closed() if (key_id is not None and not ( isinstance(key_id, Binary) and key_id.subtype == UUID_SUBTYPE)): raise TypeError( 'key_id must be a bson.binary.Binary with subtype 4') doc = encode({'v': value}, codec_options=self._codec_options) with _wrap_encryption_errors(): encrypted_doc = self._encryption.encrypt( doc, algorithm, key_id=key_id, key_alt_name=key_alt_name) return decode(encrypted_doc)['v'] def decrypt(self, value): self._check_closed() if not (isinstance(value, Binary) and value.subtype == 6): raise TypeError( 'value to decrypt must be a bson.binary.Binary with subtype 6') with _wrap_encryption_errors(): doc = encode({'v': value}) decrypted_doc = self._encryption.decrypt(doc) return decode(decrypted_doc, codec_options=self._codec_options)['v'] def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self.close() def _check_closed(self): if self._encryption is None: raise InvalidOperation("Cannot use closed ClientEncryption") def close(self): if self._io_callbacks: self._io_callbacks.close() self._encryption.close() self._io_callbacks = None self._encryption = None
true
true
f71cd544582f43f1165b14c78a32c371d80fe797
14,036
py
Python
stellargraph/mapper/mini_batch_node_generators.py
zblumen/stellargraph
10e62006907dd5968286f33648d1054e9c961c1b
[ "Apache-2.0" ]
null
null
null
stellargraph/mapper/mini_batch_node_generators.py
zblumen/stellargraph
10e62006907dd5968286f33648d1054e9c961c1b
[ "Apache-2.0" ]
null
null
null
stellargraph/mapper/mini_batch_node_generators.py
zblumen/stellargraph
10e62006907dd5968286f33648d1054e9c961c1b
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # # Copyright 2018-2020 Data61, CSIRO # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Mappers to provide input data for the graph models in layers. """ __all__ = ["ClusterNodeGenerator", "ClusterNodeSequence"] import random import copy import numpy as np import networkx as nx from tensorflow.keras.utils import Sequence from scipy import sparse from ..core.graph import StellarGraph from ..core.utils import is_real_iterable class ClusterNodeGenerator: """ A data generator for use with ClusterGCN models on homogeneous graphs, [1]. The supplied graph G should be a StellarGraph object that is ready for machine learning. Currently the model requires node features to be available for all nodes in the graph. Use the :meth:`flow` method supplying the nodes and (optionally) targets to get an object that can be used as a Keras data generator. This generator will supply the features array and the adjacency matrix to a mini-batch Keras graph ML model. [1] `W. Chiang, X. Liu, S. Si, Y. Li, S. Bengio, C. Hsieh, 2019 <https://arxiv.org/abs/1905.07953>`_. For more information, please see the ClusterGCN demo: `<https://github.com/stellargraph/stellargraph/blob/master/demos/>`_ Args: G (StellarGraph): a machine-learning StellarGraph-type graph clusters (int or list): If int then it indicates the number of clusters (default is 1 that is the given graph). If clusters is greater than 1, then nodes are uniformly at random assigned to a cluster. If list, then it should be a list of lists of node IDs such that each list corresponds to a cluster of nodes in G. The clusters should be non-overlapping. q (float): The number of clusters to combine for each mini-batch. The default is 1. lam (float): The mixture coefficient for adjacency matrix normalisation. name (str): an optional name of the generator """ def __init__(self, G, clusters=1, q=1, lam=0.1, name=None): if not isinstance(G, StellarGraph): raise TypeError("Graph must be a StellarGraph or StellarDiGraph object.") self.graph = G self.name = name self.q = q # The number of clusters to sample per mini-batch self.lam = lam self.clusters = clusters if isinstance(clusters, list): self.k = len(clusters) elif isinstance(clusters, int): if clusters <= 0: raise ValueError( "{}: clusters must be greater than 0.".format(type(self).__name__) ) self.k = clusters else: raise TypeError( "{}: clusters must be either int or list type.".format( type(self).__name__ ) ) # Some error checking on the given parameter values if not isinstance(lam, float): raise TypeError("{}: lam must be a float type.".format(type(self).__name__)) if lam < 0 or lam > 1: raise ValueError( "{}: lam must be in the range [0, 1].".format(type(self).__name__) ) if not isinstance(q, int): raise TypeError("{}: q must be integer type.".format(type(self).__name__)) if q <= 0: raise ValueError( "{}: q must be greater than 0.".format(type(self).__name__) ) if self.k % q != 0: raise ValueError( "{}: the number of clusters must be exactly divisible by q.".format( type(self).__name__ ) ) # Check if the graph has features G.check_graph_for_ml() self.node_list = list(G.nodes()) # Check that there is only a single node type if len(G.node_types) > 1: raise ValueError( "{}: node generator requires graph with single node type; " "a graph with multiple node types is passed. Stopping.".format( type(self).__name__ ) ) if isinstance(clusters, int): # We are not given graph clusters. # We are going to split the graph into self.k random clusters all_nodes = list(G.nodes()) random.shuffle(all_nodes) cluster_size = len(all_nodes) // self.k self.clusters = [ all_nodes[i : i + cluster_size] for i in range(0, len(all_nodes), cluster_size) ] if len(self.clusters) > self.k: # for the case that the number of nodes is not exactly divisible by k, we combine # the last cluster with the second last one self.clusters[-2].extend(self.clusters[-1]) del self.clusters[-1] print(f"Number of clusters {self.k}") for i, c in enumerate(self.clusters): print(f"{i} cluster has size {len(c)}") # Get the features for the nodes self.features = G.node_features(self.node_list) def flow(self, node_ids, targets=None, name=None): """ Creates a generator/sequence object for training, evaluation, or prediction with the supplied node ids and numeric targets. Args: node_ids (iterable): an iterable of node ids for the nodes of interest (e.g., training, validation, or test set nodes) targets (2d array, optional): a 2D array of numeric node targets with shape `(len(node_ids), target_size)` name (str, optional): An optional name for the returned generator object. Returns: A ClusterNodeSequence object to use with ClusterGCN in Keras methods :meth:`fit_generator`, :meth:`evaluate_generator`, and :meth:`predict_generator` """ if targets is not None: # Check targets is an iterable if not is_real_iterable(targets): raise TypeError( "{}: Targets must be an iterable or None".format( type(self).__name__ ) ) # Check targets correct shape if len(targets) != len(node_ids): raise ValueError( "{}: Targets must be the same length as node_ids".format( type(self).__name__ ) ) return ClusterNodeSequence( self.graph, self.clusters, targets=targets, node_ids=node_ids, q=self.q, lam=self.lam, name=name, ) class ClusterNodeSequence(Sequence): """ A Keras-compatible data generator for node inference using ClusterGCN model. Use this class with the Keras methods :meth:`keras.Model.fit_generator`, :meth:`keras.Model.evaluate_generator`, and :meth:`keras.Model.predict_generator`, This class should be created using the `.flow(...)` method of :class:`ClusterNodeGenerator`. Args: graph (StellarGraph): The graph clusters (list): A list of lists such that each sub-list indicates the nodes in a cluster. The length of this list, len(clusters) indicates the number of batches in one epoch. targets (np.ndarray, optional): An optional array of node targets of size (N x C), where C is the target size (e.g., number of classes for one-hot class targets) node_ids (iterable, optional): The node IDs for the target nodes. Required if targets is not None. normalize_adj (bool, optional): Specifies whether the adjacency matrix for each mini-batch should be normalized or not. The default is True. q (int, optional): The number of subgraphs to combine for each batch. The default value is 1 such that the generator treats each subgraph as a batch. lam (float, optional): The mixture coefficient for adjacency matrix normalisation (the 'diagonal enhancement' method). Valid values are in the interval [0, 1] and the default value is 0.1. name (str, optional): An optional name for this generator object. """ def __init__( self, graph, clusters, targets=None, node_ids=None, normalize_adj=True, q=1, lam=0.1, name=None, ): self.name = name self.clusters = list() self.clusters_original = copy.deepcopy(clusters) self.graph = graph self.node_list = list(graph.nodes()) self.normalize_adj = normalize_adj self.q = q self.lam = lam self.node_order = list() self._node_order_in_progress = list() self.__node_buffer = dict() self.target_ids = list() if len(clusters) % self.q != 0: raise ValueError( "The number of clusters should be exactly divisible by q. However, {} number of clusters is not exactly divisible by {}.".format( len(clusters), q ) ) if node_ids is not None: self.target_ids = list(node_ids) if targets is not None: if node_ids is None: raise ValueError( "Since targets is not None, node_ids must be given and cannot be None." ) if len(node_ids) != len(targets): raise ValueError( "When passed together targets and indices should be the same length." ) self.targets = np.asanyarray(targets) self.target_node_lookup = dict( zip(self.target_ids, range(len(self.target_ids))) ) else: self.targets = None self.on_epoch_end() def __len__(self): num_batches = len(self.clusters_original) // self.q return num_batches def __getitem__(self, index): # The next batch should be the adjacency matrix for the cluster and the corresponding feature vectors # and targets if available. cluster = self.clusters[index] adj_cluster = self.graph.to_adjacency_matrix(cluster) # The operations to normalize the adjacency matrix are too slow. # Either optimize this or implement as a layer(?) if self.normalize_adj: # add self loops adj_cluster.setdiag(1) # add self loops degree_matrix_diag = 1.0 / (adj_cluster.sum(axis=1) + 1) degree_matrix_diag = np.squeeze(np.asarray(degree_matrix_diag)) degree_matrix = sparse.lil_matrix(adj_cluster.shape) degree_matrix.setdiag(degree_matrix_diag) adj_cluster = degree_matrix.tocsr() @ adj_cluster adj_cluster.setdiag((1.0 + self.lam) * adj_cluster.diagonal()) adj_cluster = adj_cluster.toarray() g_node_list = list(cluster) # Determine the target nodes that exist in this cluster target_nodes_in_cluster = np.asanyarray( list(set(g_node_list).intersection(self.target_ids)) ) self.__node_buffer[index] = target_nodes_in_cluster # Dictionary to store node indices for quicker node index lookups node_lookup = dict(zip(g_node_list, range(len(g_node_list)))) # The list of indices of the target nodes in self.node_list target_node_indices = np.array( [node_lookup[n] for n in target_nodes_in_cluster] ) if index == (len(self.clusters_original) // self.q) - 1: # last batch self.__node_buffer_dict_to_list() cluster_targets = None # if self.targets is not None: # Dictionary to store node indices for quicker node index lookups # The list of indices of the target nodes in self.node_list cluster_target_indices = np.array( [self.target_node_lookup[n] for n in target_nodes_in_cluster] ) cluster_targets = self.targets[cluster_target_indices] cluster_targets = cluster_targets.reshape((1,) + cluster_targets.shape) features = self.graph.node_features(g_node_list) features = np.reshape(features, (1,) + features.shape) adj_cluster = adj_cluster.reshape((1,) + adj_cluster.shape) target_node_indices = target_node_indices[np.newaxis, np.newaxis, :] return [features, target_node_indices, adj_cluster], cluster_targets def __node_buffer_dict_to_list(self): self.node_order = [] for k, v in self.__node_buffer.items(): self.node_order.extend(v) def on_epoch_end(self): """ Shuffle all nodes at the end of each epoch """ if self.q > 1: # combine clusters cluster_indices = list(range(len(self.clusters_original))) random.shuffle(cluster_indices) self.clusters = [] for i in range(0, len(cluster_indices) - 1, self.q): cc = cluster_indices[i : i + self.q] tmp = [] for l in cc: tmp.extend(list(self.clusters_original[l])) self.clusters.append(tmp) else: self.clusters = copy.deepcopy(self.clusters_original) self.__node_buffer = dict() random.shuffle(self.clusters)
38.245232
145
0.604731
__all__ = ["ClusterNodeGenerator", "ClusterNodeSequence"] import random import copy import numpy as np import networkx as nx from tensorflow.keras.utils import Sequence from scipy import sparse from ..core.graph import StellarGraph from ..core.utils import is_real_iterable class ClusterNodeGenerator: def __init__(self, G, clusters=1, q=1, lam=0.1, name=None): if not isinstance(G, StellarGraph): raise TypeError("Graph must be a StellarGraph or StellarDiGraph object.") self.graph = G self.name = name self.q = q self.lam = lam self.clusters = clusters if isinstance(clusters, list): self.k = len(clusters) elif isinstance(clusters, int): if clusters <= 0: raise ValueError( "{}: clusters must be greater than 0.".format(type(self).__name__) ) self.k = clusters else: raise TypeError( "{}: clusters must be either int or list type.".format( type(self).__name__ ) ) if not isinstance(lam, float): raise TypeError("{}: lam must be a float type.".format(type(self).__name__)) if lam < 0 or lam > 1: raise ValueError( "{}: lam must be in the range [0, 1].".format(type(self).__name__) ) if not isinstance(q, int): raise TypeError("{}: q must be integer type.".format(type(self).__name__)) if q <= 0: raise ValueError( "{}: q must be greater than 0.".format(type(self).__name__) ) if self.k % q != 0: raise ValueError( "{}: the number of clusters must be exactly divisible by q.".format( type(self).__name__ ) ) G.check_graph_for_ml() self.node_list = list(G.nodes()) if len(G.node_types) > 1: raise ValueError( "{}: node generator requires graph with single node type; " "a graph with multiple node types is passed. Stopping.".format( type(self).__name__ ) ) if isinstance(clusters, int): all_nodes = list(G.nodes()) random.shuffle(all_nodes) cluster_size = len(all_nodes) // self.k self.clusters = [ all_nodes[i : i + cluster_size] for i in range(0, len(all_nodes), cluster_size) ] if len(self.clusters) > self.k: self.clusters[-2].extend(self.clusters[-1]) del self.clusters[-1] print(f"Number of clusters {self.k}") for i, c in enumerate(self.clusters): print(f"{i} cluster has size {len(c)}") self.features = G.node_features(self.node_list) def flow(self, node_ids, targets=None, name=None): if targets is not None: if not is_real_iterable(targets): raise TypeError( "{}: Targets must be an iterable or None".format( type(self).__name__ ) ) if len(targets) != len(node_ids): raise ValueError( "{}: Targets must be the same length as node_ids".format( type(self).__name__ ) ) return ClusterNodeSequence( self.graph, self.clusters, targets=targets, node_ids=node_ids, q=self.q, lam=self.lam, name=name, ) class ClusterNodeSequence(Sequence): def __init__( self, graph, clusters, targets=None, node_ids=None, normalize_adj=True, q=1, lam=0.1, name=None, ): self.name = name self.clusters = list() self.clusters_original = copy.deepcopy(clusters) self.graph = graph self.node_list = list(graph.nodes()) self.normalize_adj = normalize_adj self.q = q self.lam = lam self.node_order = list() self._node_order_in_progress = list() self.__node_buffer = dict() self.target_ids = list() if len(clusters) % self.q != 0: raise ValueError( "The number of clusters should be exactly divisible by q. However, {} number of clusters is not exactly divisible by {}.".format( len(clusters), q ) ) if node_ids is not None: self.target_ids = list(node_ids) if targets is not None: if node_ids is None: raise ValueError( "Since targets is not None, node_ids must be given and cannot be None." ) if len(node_ids) != len(targets): raise ValueError( "When passed together targets and indices should be the same length." ) self.targets = np.asanyarray(targets) self.target_node_lookup = dict( zip(self.target_ids, range(len(self.target_ids))) ) else: self.targets = None self.on_epoch_end() def __len__(self): num_batches = len(self.clusters_original) // self.q return num_batches def __getitem__(self, index): cluster = self.clusters[index] adj_cluster = self.graph.to_adjacency_matrix(cluster) if self.normalize_adj: adj_cluster.setdiag(1) degree_matrix_diag = 1.0 / (adj_cluster.sum(axis=1) + 1) degree_matrix_diag = np.squeeze(np.asarray(degree_matrix_diag)) degree_matrix = sparse.lil_matrix(adj_cluster.shape) degree_matrix.setdiag(degree_matrix_diag) adj_cluster = degree_matrix.tocsr() @ adj_cluster adj_cluster.setdiag((1.0 + self.lam) * adj_cluster.diagonal()) adj_cluster = adj_cluster.toarray() g_node_list = list(cluster) target_nodes_in_cluster = np.asanyarray( list(set(g_node_list).intersection(self.target_ids)) ) self.__node_buffer[index] = target_nodes_in_cluster node_lookup = dict(zip(g_node_list, range(len(g_node_list)))) target_node_indices = np.array( [node_lookup[n] for n in target_nodes_in_cluster] ) if index == (len(self.clusters_original) // self.q) - 1: self.__node_buffer_dict_to_list() cluster_targets = None if self.targets is not None: cluster_target_indices = np.array( [self.target_node_lookup[n] for n in target_nodes_in_cluster] ) cluster_targets = self.targets[cluster_target_indices] cluster_targets = cluster_targets.reshape((1,) + cluster_targets.shape) features = self.graph.node_features(g_node_list) features = np.reshape(features, (1,) + features.shape) adj_cluster = adj_cluster.reshape((1,) + adj_cluster.shape) target_node_indices = target_node_indices[np.newaxis, np.newaxis, :] return [features, target_node_indices, adj_cluster], cluster_targets def __node_buffer_dict_to_list(self): self.node_order = [] for k, v in self.__node_buffer.items(): self.node_order.extend(v) def on_epoch_end(self): if self.q > 1: cluster_indices = list(range(len(self.clusters_original))) random.shuffle(cluster_indices) self.clusters = [] for i in range(0, len(cluster_indices) - 1, self.q): cc = cluster_indices[i : i + self.q] tmp = [] for l in cc: tmp.extend(list(self.clusters_original[l])) self.clusters.append(tmp) else: self.clusters = copy.deepcopy(self.clusters_original) self.__node_buffer = dict() random.shuffle(self.clusters)
true
true
f71cd723c890ed8cd37ed7a27760fb739e4bfcaf
2,832
py
Python
app/scraping.py
Jvism/web-scraper
3d3625f1831ca51d8c77e47e799c822e3e19d97d
[ "MIT" ]
null
null
null
app/scraping.py
Jvism/web-scraper
3d3625f1831ca51d8c77e47e799c822e3e19d97d
[ "MIT" ]
null
null
null
app/scraping.py
Jvism/web-scraper
3d3625f1831ca51d8c77e47e799c822e3e19d97d
[ "MIT" ]
null
null
null
import requests as req from bs4 import BeautifulSoup as bs import os def clearConsole(): command = 'clear' if os.name in ('nt', 'dos'): # If Machine is running on Windows, use cls command = 'cls' os.system(command) def soup_recover(url): request = req.get(url) return bs(request.text,features="html.parser") def urls_extract(soup): urls = [] for article in soup.find_all('article'): url = article.find_all('a',limit=1)[0] urls.append('https://books.toscrape.com/catalogue/' + url['href']) return urls def extract_data(soup): data_book = [] for page in soup.select('.page'): book_information = page.find_all('td') title = page.find_all('h1')[0].text price = page.find_all('p')[0].text stock = book_information[5].text category = page.find_all('li')[2].text.split('\n')[1] cover = 'https://books.toscrape.com/' + page.find_all('img')[0]['src'].split('../')[2] upc = book_information[0].text product_type = book_information[1].text price_excl_tax = book_information[2].text price_incl_tax = book_information[3].text tax = book_information[4].text number_reviews = book_information[6].text data_book.extend([title,price,stock,category,cover,upc,product_type,price_excl_tax,price_incl_tax,tax,stock,number_reviews]) return data_book def export_csv(data): contador = 0 file = open('books_data.csv','w',encoding="utf-8") file.write('title,price,stock,category,cover,upc,product type,price (excl. tax),price (incl. tac),tax,availability,number of reviews\n') for book_data in data: info = '' for index,information in enumerate(book_data): if index == 0: words = information.split(',') title = '' for word in words: title += word info += title + ',' elif index == len(book_data)-1: info += information else: info += information + ',' file.write(info + '\n') clearConsole() print(str(round(contador*0.1,1)) + '%') contador += 1 file.close() def launch_app(): url_web = 'https://books.toscrape.com/' urls_books = [] number_pages = 50 for page in range(number_pages): urls_books.extend(urls_extract(soup_recover(url_web + 'catalogue/page-' + str(page+1) + '.html'))) clearConsole() print(str(round((page)*2,1)) + '%') data_books = [] for index,url in enumerate(urls_books): data_books.append(extract_data(soup_recover(url))) clearConsole() print(str(round(index*0.1,1)) + '%') export_csv(data_books) return launch_app()
27.495146
140
0.592514
import requests as req from bs4 import BeautifulSoup as bs import os def clearConsole(): command = 'clear' if os.name in ('nt', 'dos'): command = 'cls' os.system(command) def soup_recover(url): request = req.get(url) return bs(request.text,features="html.parser") def urls_extract(soup): urls = [] for article in soup.find_all('article'): url = article.find_all('a',limit=1)[0] urls.append('https://books.toscrape.com/catalogue/' + url['href']) return urls def extract_data(soup): data_book = [] for page in soup.select('.page'): book_information = page.find_all('td') title = page.find_all('h1')[0].text price = page.find_all('p')[0].text stock = book_information[5].text category = page.find_all('li')[2].text.split('\n')[1] cover = 'https://books.toscrape.com/' + page.find_all('img')[0]['src'].split('../')[2] upc = book_information[0].text product_type = book_information[1].text price_excl_tax = book_information[2].text price_incl_tax = book_information[3].text tax = book_information[4].text number_reviews = book_information[6].text data_book.extend([title,price,stock,category,cover,upc,product_type,price_excl_tax,price_incl_tax,tax,stock,number_reviews]) return data_book def export_csv(data): contador = 0 file = open('books_data.csv','w',encoding="utf-8") file.write('title,price,stock,category,cover,upc,product type,price (excl. tax),price (incl. tac),tax,availability,number of reviews\n') for book_data in data: info = '' for index,information in enumerate(book_data): if index == 0: words = information.split(',') title = '' for word in words: title += word info += title + ',' elif index == len(book_data)-1: info += information else: info += information + ',' file.write(info + '\n') clearConsole() print(str(round(contador*0.1,1)) + '%') contador += 1 file.close() def launch_app(): url_web = 'https://books.toscrape.com/' urls_books = [] number_pages = 50 for page in range(number_pages): urls_books.extend(urls_extract(soup_recover(url_web + 'catalogue/page-' + str(page+1) + '.html'))) clearConsole() print(str(round((page)*2,1)) + '%') data_books = [] for index,url in enumerate(urls_books): data_books.append(extract_data(soup_recover(url))) clearConsole() print(str(round(index*0.1,1)) + '%') export_csv(data_books) return launch_app()
true
true
f71cd7eb1d7001125af24fce5bc28d33488eb10f
1,313
py
Python
runtime/opt/taupage/init.d/06-update-sysctl.py
pc-alves/taupage
07025d45772d47b43e0a20d7ee21f10a6ff5162d
[ "Apache-2.0" ]
49
2015-04-14T13:55:10.000Z
2020-02-14T22:55:43.000Z
runtime/opt/taupage/init.d/06-update-sysctl.py
pc-alves/taupage
07025d45772d47b43e0a20d7ee21f10a6ff5162d
[ "Apache-2.0" ]
538
2015-04-01T10:53:09.000Z
2020-04-17T08:43:36.000Z
runtime/opt/taupage/init.d/06-update-sysctl.py
pc-alves/taupage
07025d45772d47b43e0a20d7ee21f10a6ff5162d
[ "Apache-2.0" ]
67
2015-05-05T19:48:30.000Z
2020-11-04T04:59:00.000Z
#!/usr/bin/env python3 import logging import sys import subprocess from taupage import configure_logging, get_config def main(): """Configure custom sysctl parameters If a sysctl section is present, add the valid parameters to sysctl and reloads. """ CUSTOM_SYSCTL_CONF = '/etc/sysctl.d/99-custom.conf' configure_logging() config = get_config() sysctl = config.get('sysctl') if sysctl is None: sys.exit(0) try: sysctl_entries = ['{} = {}'.format(key, value) for key, value in sysctl.items()] with open(CUSTOM_SYSCTL_CONF, 'w') as file: file.write('\n'.join(sysctl_entries)+'\n') logging.info('Successfully written sysctl parameters') except Exception as e: logging.error('Failed to write sysctl parameters') logging.exception(e) sys.exit(1) try: exitcode = subprocess.call(['/sbin/sysctl', '-p', CUSTOM_SYSCTL_CONF]) if exitcode != 0: logging.error('Reloading sysctl failed with exitcode {}'.format(exitcode)) sys.exit(1) logging.info('Successfully reloaded sysctl parameters') except Exception as e: logging.error('Failed to reload sysctl') logging.exception(e) sys.exit(1) if __name__ == '__main__': main()
26.795918
88
0.638995
import logging import sys import subprocess from taupage import configure_logging, get_config def main(): CUSTOM_SYSCTL_CONF = '/etc/sysctl.d/99-custom.conf' configure_logging() config = get_config() sysctl = config.get('sysctl') if sysctl is None: sys.exit(0) try: sysctl_entries = ['{} = {}'.format(key, value) for key, value in sysctl.items()] with open(CUSTOM_SYSCTL_CONF, 'w') as file: file.write('\n'.join(sysctl_entries)+'\n') logging.info('Successfully written sysctl parameters') except Exception as e: logging.error('Failed to write sysctl parameters') logging.exception(e) sys.exit(1) try: exitcode = subprocess.call(['/sbin/sysctl', '-p', CUSTOM_SYSCTL_CONF]) if exitcode != 0: logging.error('Reloading sysctl failed with exitcode {}'.format(exitcode)) sys.exit(1) logging.info('Successfully reloaded sysctl parameters') except Exception as e: logging.error('Failed to reload sysctl') logging.exception(e) sys.exit(1) if __name__ == '__main__': main()
true
true
f71cd8d3024c1d478964feaae2c4352cb8ad6a81
852
py
Python
lib/utils.py
Titorat/SSrehab
6691ee1ed442073bfa00a51f0d9ab74b9252d302
[ "MIT" ]
null
null
null
lib/utils.py
Titorat/SSrehab
6691ee1ed442073bfa00a51f0d9ab74b9252d302
[ "MIT" ]
null
null
null
lib/utils.py
Titorat/SSrehab
6691ee1ed442073bfa00a51f0d9ab74b9252d302
[ "MIT" ]
null
null
null
# standard library from subprocess import run, PIPE from typing import List RUN_CMD_ONFAIL_EXITCODE = 22 def run_cmd(cmd: List[str]): """A wrapper around subprocess.run that nicely fails on a non-zero exit code""" if len(cmd) == 0: raise ValueError('cmd has to be a non-empty list') res = run(cmd, stdout=PIPE, stderr=PIPE) if res.returncode != 0: print(f"command \"{cmd[0]}\" finished with exit code: {res.returncode}") stderr = res.stderr.decode('utf-8') if stderr: print("and produced the following error message:") print(stderr) exit(RUN_CMD_ONFAIL_EXITCODE) return res.stdout.decode('utf-8').rstrip() def run_bash(bash_code: str): """Safely runs a bash command, and nicely fails on a non-zero exit code""" return run_cmd(['bash', '-c', bash_code])
30.428571
83
0.651408
from subprocess import run, PIPE from typing import List RUN_CMD_ONFAIL_EXITCODE = 22 def run_cmd(cmd: List[str]): if len(cmd) == 0: raise ValueError('cmd has to be a non-empty list') res = run(cmd, stdout=PIPE, stderr=PIPE) if res.returncode != 0: print(f"command \"{cmd[0]}\" finished with exit code: {res.returncode}") stderr = res.stderr.decode('utf-8') if stderr: print("and produced the following error message:") print(stderr) exit(RUN_CMD_ONFAIL_EXITCODE) return res.stdout.decode('utf-8').rstrip() def run_bash(bash_code: str): return run_cmd(['bash', '-c', bash_code])
true
true
f71cd9187dca0fd26b3cdbda0a4c8921d179e358
14,773
py
Python
python/helpers/pydev/pydevd_plugins/jinja2_debug.py
teddywest32/intellij-community
e0268d7a1da1d318b441001448cdd3e8929b2f29
[ "Apache-2.0" ]
null
null
null
python/helpers/pydev/pydevd_plugins/jinja2_debug.py
teddywest32/intellij-community
e0268d7a1da1d318b441001448cdd3e8929b2f29
[ "Apache-2.0" ]
11
2017-02-27T22:35:32.000Z
2021-12-24T08:07:40.000Z
python/helpers/pydev/pydevd_plugins/jinja2_debug.py
teddywest32/intellij-community
e0268d7a1da1d318b441001448cdd3e8929b2f29
[ "Apache-2.0" ]
null
null
null
import traceback from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint, get_exception_name from _pydevd_bundle.pydevd_constants import get_thread_id, STATE_SUSPEND, dict_contains, dict_iter_items, dict_keys, JINJA2_SUSPEND from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK from _pydevd_bundle import pydevd_vars from pydevd_file_utils import get_abs_path_real_path_and_base_from_file from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode class Jinja2LineBreakpoint(LineBreakpoint): def __init__(self, file, line, condition, func_name, expression): self.file = file LineBreakpoint.__init__(self, line, condition, func_name, expression) def is_triggered(self, template_frame_file, template_frame_line): return self.file == template_frame_file and self.line == template_frame_line def __str__(self): return "Jinja2LineBreakpoint: %s-%d" %(self.file, self.line) def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, func_name): result = None if type == 'jinja2-line': breakpoint = Jinja2LineBreakpoint(file, line, condition, func_name, expression) if not hasattr(pydb, 'jinja2_breakpoints'): _init_plugin_breaks(pydb) result = breakpoint, pydb.jinja2_breakpoints return result return result def add_exception_breakpoint(plugin, pydb, type, exception): if type == 'jinja2': if not hasattr(pydb, 'jinja2_exception_break'): _init_plugin_breaks(pydb) pydb.jinja2_exception_break[exception] = True pydb.set_tracing_for_untraced_contexts() return True return False def _init_plugin_breaks(pydb): pydb.jinja2_exception_break = {} pydb.jinja2_breakpoints = {} def remove_exception_breakpoint(plugin, pydb, type, exception): if type == 'jinja2': try: del pydb.jinja2_exception_break[exception] return True except: pass return False def get_breakpoints(plugin, pydb, type): if type == 'jinja2-line': return pydb.jinja2_breakpoints return None def _is_jinja2_render_call(frame): try: name = frame.f_code.co_name if dict_contains(frame.f_globals, "__jinja_template__") and name in ("root", "loop", "macro") or name.startswith("block_"): return True return False except: traceback.print_exc() return False def _suspend_jinja2(pydb, thread, frame, cmd=CMD_SET_BREAK, message=None): frame = Jinja2TemplateFrame(frame) if frame.f_lineno is None: return None pydevd_vars.add_additional_frame_by_id(get_thread_id(thread), {id(frame): frame}) pydb.set_suspend(thread, cmd) thread.additional_info.suspend_type = JINJA2_SUSPEND if cmd == CMD_ADD_EXCEPTION_BREAK: # send exception name as message if message: message = str(message) thread.additional_info.pydev_message = message return frame def _is_jinja2_suspended(thread): return thread.additional_info.suspend_type == JINJA2_SUSPEND def _is_jinja2_context_call(frame): return dict_contains(frame.f_locals, "_Context__obj") def _is_jinja2_internal_function(frame): return dict_contains(frame.f_locals, 'self') and frame.f_locals['self'].__class__.__name__ in \ ('LoopContext', 'TemplateReference', 'Macro', 'BlockReference') def _find_jinja2_render_frame(frame): while frame is not None and not _is_jinja2_render_call(frame): frame = frame.f_back return frame #======================================================================================================================= # Jinja2 Frame #======================================================================================================================= class Jinja2TemplateFrame: def __init__(self, frame): file_name = _get_jinja2_template_filename(frame) self.back_context = None if 'context' in frame.f_locals: #sometimes we don't have 'context', e.g. in macros self.back_context = frame.f_locals['context'] self.f_code = FCode('template', file_name) self.f_lineno = _get_jinja2_template_line(frame) self.f_back = frame self.f_globals = {} self.f_locals = self.collect_context(frame) self.f_trace = None def collect_context(self, frame): res = {} for k, v in frame.f_locals.items(): if not k.startswith('l_'): res[k] = v elif v and not _is_missing(v): res[k[2:]] = v if self.back_context is not None: for k, v in self.back_context.items(): res[k] = v return res def _change_variable(self, frame, name, value): in_vars_or_parents = False if 'context' in frame.f_locals: if name in frame.f_locals['context'].parent: self.back_context.parent[name] = value in_vars_or_parents = True if name in frame.f_locals['context'].vars: self.back_context.vars[name] = value in_vars_or_parents = True l_name = 'l_' + name if l_name in frame.f_locals: if in_vars_or_parents: frame.f_locals[l_name] = self.back_context.resolve(name) else: frame.f_locals[l_name] = value def change_variable(plugin, frame, attr, expression): if isinstance(frame, Jinja2TemplateFrame): result = eval(expression, frame.f_globals, frame.f_locals) frame._change_variable(frame.f_back, attr, result) return result return False def _is_missing(item): if item.__class__.__name__ == 'MissingType': return True return False def _find_render_function_frame(frame): #in order to hide internal rendering functions old_frame = frame try: while not (dict_contains(frame.f_locals, 'self') and frame.f_locals['self'].__class__.__name__ == 'Template' and \ frame.f_code.co_name == 'render'): frame = frame.f_back if frame is None: return old_frame return frame except: return old_frame def _get_jinja2_template_line(frame): debug_info = None if dict_contains(frame.f_globals,'__jinja_template__'): _debug_info = frame.f_globals['__jinja_template__']._debug_info if _debug_info != '': #sometimes template contains only plain text debug_info = frame.f_globals['__jinja_template__'].debug_info if debug_info is None: return None lineno = frame.f_lineno for pair in debug_info: if pair[1] == lineno: return pair[0] return None def _get_jinja2_template_filename(frame): if dict_contains(frame.f_globals, '__jinja_template__'): fname = frame.f_globals['__jinja_template__'].filename abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_file(fname) return abs_path_real_path_and_base[1] return None #======================================================================================================================= # Jinja2 Step Commands #======================================================================================================================= def has_exception_breaks(plugin): if len(plugin.main_debugger.jinja2_exception_break) > 0: return True return False def has_line_breaks(plugin): for file, breakpoints in dict_iter_items(plugin.main_debugger.jinja2_breakpoints): if len(breakpoints) > 0: return True return False def can_not_skip(plugin, pydb, pydb_frame, frame): if pydb.jinja2_breakpoints and _is_jinja2_render_call(frame): filename = _get_jinja2_template_filename(frame) jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(filename) if jinja2_breakpoints_for_file: return True return False def cmd_step_into(plugin, pydb, frame, event, args, stop_info, stop): pydb, filename, info, thread = args plugin_stop = False stop_info['jinja2_stop'] = False if _is_jinja2_suspended(thread): stop_info['jinja2_stop'] = event in ('call', 'line') and _is_jinja2_render_call(frame) plugin_stop = stop_info['jinja2_stop'] stop = False if info.pydev_call_from_jinja2 is not None: if _is_jinja2_internal_function(frame): #if internal Jinja2 function was called, we sould continue debugging inside template info.pydev_call_from_jinja2 = None else: #we go into python code from Jinja2 rendering frame stop = True if event == 'call' and _is_jinja2_context_call(frame.f_back): #we called function from context, the next step will be in function info.pydev_call_from_jinja2 = 1 if event == 'return' and _is_jinja2_context_call(frame.f_back): #we return from python code to Jinja2 rendering frame info.pydev_step_stop = info.pydev_call_from_jinja2 info.pydev_call_from_jinja2 = None thread.additional_info.suspend_type = JINJA2_SUSPEND stop = False #print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop_info", stop_info, \ # "thread.additional_info.suspend_type", thread.additional_info.suspend_type #print "event", event, "farme.locals", frame.f_locals return stop, plugin_stop def cmd_step_over(plugin, pydb, frame, event, args, stop_info, stop): pydb, filename, info, thread = args plugin_stop = False stop_info['jinja2_stop'] = False if _is_jinja2_suspended(thread): stop = False if info.pydev_call_inside_jinja2 is None: if _is_jinja2_render_call(frame): if event == 'call': info.pydev_call_inside_jinja2 = frame.f_back if event in ('line', 'return'): info.pydev_call_inside_jinja2 = frame else: if event == 'line': if _is_jinja2_render_call(frame) and info.pydev_call_inside_jinja2 is frame: stop_info['jinja2_stop'] = True plugin_stop = stop_info['jinja2_stop'] if event == 'return': if frame is info.pydev_call_inside_jinja2 and not dict_contains(frame.f_back.f_locals,'event'): info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame.f_back) return stop, plugin_stop else: if event == 'return' and _is_jinja2_context_call(frame.f_back): #we return from python code to Jinja2 rendering frame info.pydev_call_from_jinja2 = None info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame) thread.additional_info.suspend_type = JINJA2_SUSPEND stop = False return stop, plugin_stop #print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop", stop, "jinja_stop", jinja2_stop, \ # "thread.additional_info.suspend_type", thread.additional_info.suspend_type #print "event", event, "info.pydev_call_inside_jinja2", info.pydev_call_inside_jinja2 #print "frame", frame, "frame.f_back", frame.f_back, "step_stop", info.pydev_step_stop #print "is_context_call", _is_jinja2_context_call(frame) #print "render", _is_jinja2_render_call(frame) #print "-------------" return stop, plugin_stop def stop(plugin, pydb, frame, event, args, stop_info, arg, step_cmd): pydb, filename, info, thread = args if dict_contains(stop_info, 'jinja2_stop') and stop_info['jinja2_stop']: frame = _suspend_jinja2(pydb, thread, frame, step_cmd) if frame: pydb.do_wait_suspend(thread, frame, event, arg) return True return False def get_breakpoint(plugin, pydb, pydb_frame, frame, event, args): pydb, filename, info, thread = args new_frame = None jinja2_breakpoint = None flag = False type = 'jinja2' if event in ('line', 'call') and info.pydev_state != STATE_SUSPEND and \ pydb.jinja2_breakpoints and _is_jinja2_render_call(frame): filename = _get_jinja2_template_filename(frame) jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(filename) new_frame = Jinja2TemplateFrame(frame) if jinja2_breakpoints_for_file: lineno = frame.f_lineno template_lineno = _get_jinja2_template_line(frame) if template_lineno is not None and dict_contains(jinja2_breakpoints_for_file, template_lineno): jinja2_breakpoint = jinja2_breakpoints_for_file[template_lineno] flag = True new_frame = Jinja2TemplateFrame(frame) return flag, jinja2_breakpoint, new_frame, type def suspend(plugin, pydb, thread, frame, bp_type): if bp_type == 'jinja2': return _suspend_jinja2(pydb, thread, frame) return None def exception_break(plugin, pydb, pydb_frame, frame, args, arg): pydb, filename, info, thread = args exception, value, trace = arg if pydb.jinja2_exception_break: exception_type = dict_keys(pydb.jinja2_exception_break)[0] if get_exception_name(exception) in ('UndefinedError', 'TemplateNotFound', 'TemplatesNotFound'): #errors in rendering render_frame = _find_jinja2_render_frame(frame) if render_frame: suspend_frame = _suspend_jinja2(pydb, thread, render_frame, CMD_ADD_EXCEPTION_BREAK, message=exception_type) if suspend_frame: add_exception_to_frame(suspend_frame, (exception, value, trace)) flag = True suspend_frame.f_back = frame frame = suspend_frame return flag, frame elif get_exception_name(exception) in ('TemplateSyntaxError', 'TemplateAssertionError'): #errors in compile time name = frame.f_code.co_name if name in ('template', 'top-level template code', '<module>') or name.startswith('block '): #Jinja2 translates exception info and creates fake frame on his own pydb_frame.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK) add_exception_to_frame(frame, (exception, value, trace)) thread.additional_info.suspend_type = JINJA2_SUSPEND thread.additional_info.pydev_message = str(exception_type) flag = True return flag, frame return None
39.712366
131
0.646314
import traceback from _pydevd_bundle.pydevd_breakpoints import LineBreakpoint, get_exception_name from _pydevd_bundle.pydevd_constants import get_thread_id, STATE_SUSPEND, dict_contains, dict_iter_items, dict_keys, JINJA2_SUSPEND from _pydevd_bundle.pydevd_comm import CMD_SET_BREAK, CMD_ADD_EXCEPTION_BREAK from _pydevd_bundle import pydevd_vars from pydevd_file_utils import get_abs_path_real_path_and_base_from_file from _pydevd_bundle.pydevd_frame_utils import add_exception_to_frame, FCode class Jinja2LineBreakpoint(LineBreakpoint): def __init__(self, file, line, condition, func_name, expression): self.file = file LineBreakpoint.__init__(self, line, condition, func_name, expression) def is_triggered(self, template_frame_file, template_frame_line): return self.file == template_frame_file and self.line == template_frame_line def __str__(self): return "Jinja2LineBreakpoint: %s-%d" %(self.file, self.line) def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, func_name): result = None if type == 'jinja2-line': breakpoint = Jinja2LineBreakpoint(file, line, condition, func_name, expression) if not hasattr(pydb, 'jinja2_breakpoints'): _init_plugin_breaks(pydb) result = breakpoint, pydb.jinja2_breakpoints return result return result def add_exception_breakpoint(plugin, pydb, type, exception): if type == 'jinja2': if not hasattr(pydb, 'jinja2_exception_break'): _init_plugin_breaks(pydb) pydb.jinja2_exception_break[exception] = True pydb.set_tracing_for_untraced_contexts() return True return False def _init_plugin_breaks(pydb): pydb.jinja2_exception_break = {} pydb.jinja2_breakpoints = {} def remove_exception_breakpoint(plugin, pydb, type, exception): if type == 'jinja2': try: del pydb.jinja2_exception_break[exception] return True except: pass return False def get_breakpoints(plugin, pydb, type): if type == 'jinja2-line': return pydb.jinja2_breakpoints return None def _is_jinja2_render_call(frame): try: name = frame.f_code.co_name if dict_contains(frame.f_globals, "__jinja_template__") and name in ("root", "loop", "macro") or name.startswith("block_"): return True return False except: traceback.print_exc() return False def _suspend_jinja2(pydb, thread, frame, cmd=CMD_SET_BREAK, message=None): frame = Jinja2TemplateFrame(frame) if frame.f_lineno is None: return None pydevd_vars.add_additional_frame_by_id(get_thread_id(thread), {id(frame): frame}) pydb.set_suspend(thread, cmd) thread.additional_info.suspend_type = JINJA2_SUSPEND if cmd == CMD_ADD_EXCEPTION_BREAK: if message: message = str(message) thread.additional_info.pydev_message = message return frame def _is_jinja2_suspended(thread): return thread.additional_info.suspend_type == JINJA2_SUSPEND def _is_jinja2_context_call(frame): return dict_contains(frame.f_locals, "_Context__obj") def _is_jinja2_internal_function(frame): return dict_contains(frame.f_locals, 'self') and frame.f_locals['self'].__class__.__name__ in \ ('LoopContext', 'TemplateReference', 'Macro', 'BlockReference') def _find_jinja2_render_frame(frame): while frame is not None and not _is_jinja2_render_call(frame): frame = frame.f_back return frame class Jinja2TemplateFrame: def __init__(self, frame): file_name = _get_jinja2_template_filename(frame) self.back_context = None if 'context' in frame.f_locals: self.back_context = frame.f_locals['context'] self.f_code = FCode('template', file_name) self.f_lineno = _get_jinja2_template_line(frame) self.f_back = frame self.f_globals = {} self.f_locals = self.collect_context(frame) self.f_trace = None def collect_context(self, frame): res = {} for k, v in frame.f_locals.items(): if not k.startswith('l_'): res[k] = v elif v and not _is_missing(v): res[k[2:]] = v if self.back_context is not None: for k, v in self.back_context.items(): res[k] = v return res def _change_variable(self, frame, name, value): in_vars_or_parents = False if 'context' in frame.f_locals: if name in frame.f_locals['context'].parent: self.back_context.parent[name] = value in_vars_or_parents = True if name in frame.f_locals['context'].vars: self.back_context.vars[name] = value in_vars_or_parents = True l_name = 'l_' + name if l_name in frame.f_locals: if in_vars_or_parents: frame.f_locals[l_name] = self.back_context.resolve(name) else: frame.f_locals[l_name] = value def change_variable(plugin, frame, attr, expression): if isinstance(frame, Jinja2TemplateFrame): result = eval(expression, frame.f_globals, frame.f_locals) frame._change_variable(frame.f_back, attr, result) return result return False def _is_missing(item): if item.__class__.__name__ == 'MissingType': return True return False def _find_render_function_frame(frame): #in order to hide internal rendering functions old_frame = frame try: while not (dict_contains(frame.f_locals, 'self') and frame.f_locals['self'].__class__.__name__ == 'Template' and \ frame.f_code.co_name == 'render'): frame = frame.f_back if frame is None: return old_frame return frame except: return old_frame def _get_jinja2_template_line(frame): debug_info = None if dict_contains(frame.f_globals,'__jinja_template__'): _debug_info = frame.f_globals['__jinja_template__']._debug_info if _debug_info != '': #sometimes template contains only plain text debug_info = frame.f_globals['__jinja_template__'].debug_info if debug_info is None: return None lineno = frame.f_lineno for pair in debug_info: if pair[1] == lineno: return pair[0] return None def _get_jinja2_template_filename(frame): if dict_contains(frame.f_globals, '__jinja_template__'): fname = frame.f_globals['__jinja_template__'].filename abs_path_real_path_and_base = get_abs_path_real_path_and_base_from_file(fname) return abs_path_real_path_and_base[1] return None #======================================================================================================================= # Jinja2 Step Commands #======================================================================================================================= def has_exception_breaks(plugin): if len(plugin.main_debugger.jinja2_exception_break) > 0: return True return False def has_line_breaks(plugin): for file, breakpoints in dict_iter_items(plugin.main_debugger.jinja2_breakpoints): if len(breakpoints) > 0: return True return False def can_not_skip(plugin, pydb, pydb_frame, frame): if pydb.jinja2_breakpoints and _is_jinja2_render_call(frame): filename = _get_jinja2_template_filename(frame) jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(filename) if jinja2_breakpoints_for_file: return True return False def cmd_step_into(plugin, pydb, frame, event, args, stop_info, stop): pydb, filename, info, thread = args plugin_stop = False stop_info['jinja2_stop'] = False if _is_jinja2_suspended(thread): stop_info['jinja2_stop'] = event in ('call', 'line') and _is_jinja2_render_call(frame) plugin_stop = stop_info['jinja2_stop'] stop = False if info.pydev_call_from_jinja2 is not None: if _is_jinja2_internal_function(frame): #if internal Jinja2 function was called, we sould continue debugging inside template info.pydev_call_from_jinja2 = None else: #we go into python code from Jinja2 rendering frame stop = True if event == 'call' and _is_jinja2_context_call(frame.f_back): #we called function from context, the next step will be in function info.pydev_call_from_jinja2 = 1 if event == 'return' and _is_jinja2_context_call(frame.f_back): #we return from python code to Jinja2 rendering frame info.pydev_step_stop = info.pydev_call_from_jinja2 info.pydev_call_from_jinja2 = None thread.additional_info.suspend_type = JINJA2_SUSPEND stop = False #print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop_info", stop_info, \ # "thread.additional_info.suspend_type", thread.additional_info.suspend_type #print "event", event, "farme.locals", frame.f_locals return stop, plugin_stop def cmd_step_over(plugin, pydb, frame, event, args, stop_info, stop): pydb, filename, info, thread = args plugin_stop = False stop_info['jinja2_stop'] = False if _is_jinja2_suspended(thread): stop = False if info.pydev_call_inside_jinja2 is None: if _is_jinja2_render_call(frame): if event == 'call': info.pydev_call_inside_jinja2 = frame.f_back if event in ('line', 'return'): info.pydev_call_inside_jinja2 = frame else: if event == 'line': if _is_jinja2_render_call(frame) and info.pydev_call_inside_jinja2 is frame: stop_info['jinja2_stop'] = True plugin_stop = stop_info['jinja2_stop'] if event == 'return': if frame is info.pydev_call_inside_jinja2 and not dict_contains(frame.f_back.f_locals,'event'): info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame.f_back) return stop, plugin_stop else: if event == 'return' and _is_jinja2_context_call(frame.f_back): #we return from python code to Jinja2 rendering frame info.pydev_call_from_jinja2 = None info.pydev_call_inside_jinja2 = _find_jinja2_render_frame(frame) thread.additional_info.suspend_type = JINJA2_SUSPEND stop = False return stop, plugin_stop #print "info.pydev_call_from_jinja2", info.pydev_call_from_jinja2, "stop", stop, "jinja_stop", jinja2_stop, \ # "thread.additional_info.suspend_type", thread.additional_info.suspend_type #print "event", event, "info.pydev_call_inside_jinja2", info.pydev_call_inside_jinja2 #print "frame", frame, "frame.f_back", frame.f_back, "step_stop", info.pydev_step_stop #print "is_context_call", _is_jinja2_context_call(frame) #print "render", _is_jinja2_render_call(frame) #print "-------------" return stop, plugin_stop def stop(plugin, pydb, frame, event, args, stop_info, arg, step_cmd): pydb, filename, info, thread = args if dict_contains(stop_info, 'jinja2_stop') and stop_info['jinja2_stop']: frame = _suspend_jinja2(pydb, thread, frame, step_cmd) if frame: pydb.do_wait_suspend(thread, frame, event, arg) return True return False def get_breakpoint(plugin, pydb, pydb_frame, frame, event, args): pydb, filename, info, thread = args new_frame = None jinja2_breakpoint = None flag = False type = 'jinja2' if event in ('line', 'call') and info.pydev_state != STATE_SUSPEND and \ pydb.jinja2_breakpoints and _is_jinja2_render_call(frame): filename = _get_jinja2_template_filename(frame) jinja2_breakpoints_for_file = pydb.jinja2_breakpoints.get(filename) new_frame = Jinja2TemplateFrame(frame) if jinja2_breakpoints_for_file: lineno = frame.f_lineno template_lineno = _get_jinja2_template_line(frame) if template_lineno is not None and dict_contains(jinja2_breakpoints_for_file, template_lineno): jinja2_breakpoint = jinja2_breakpoints_for_file[template_lineno] flag = True new_frame = Jinja2TemplateFrame(frame) return flag, jinja2_breakpoint, new_frame, type def suspend(plugin, pydb, thread, frame, bp_type): if bp_type == 'jinja2': return _suspend_jinja2(pydb, thread, frame) return None def exception_break(plugin, pydb, pydb_frame, frame, args, arg): pydb, filename, info, thread = args exception, value, trace = arg if pydb.jinja2_exception_break: exception_type = dict_keys(pydb.jinja2_exception_break)[0] if get_exception_name(exception) in ('UndefinedError', 'TemplateNotFound', 'TemplatesNotFound'): #errors in rendering render_frame = _find_jinja2_render_frame(frame) if render_frame: suspend_frame = _suspend_jinja2(pydb, thread, render_frame, CMD_ADD_EXCEPTION_BREAK, message=exception_type) if suspend_frame: add_exception_to_frame(suspend_frame, (exception, value, trace)) flag = True suspend_frame.f_back = frame frame = suspend_frame return flag, frame elif get_exception_name(exception) in ('TemplateSyntaxError', 'TemplateAssertionError'): #errors in compile time name = frame.f_code.co_name if name in ('template', 'top-level template code', '<module>') or name.startswith('block '): #Jinja2 translates exception info and creates fake frame on his own pydb_frame.set_suspend(thread, CMD_ADD_EXCEPTION_BREAK) add_exception_to_frame(frame, (exception, value, trace)) thread.additional_info.suspend_type = JINJA2_SUSPEND thread.additional_info.pydev_message = str(exception_type) flag = True return flag, frame return None
true
true
f71cdcfa194966b2387e234194561a46582002fa
13,923
py
Python
log_casp_inh/model_141.py
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
54a5ef7e868be34289836bbbb024a2963c0c9c86
[ "MIT" ]
null
null
null
log_casp_inh/model_141.py
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
54a5ef7e868be34289836bbbb024a2963c0c9c86
[ "MIT" ]
null
null
null
log_casp_inh/model_141.py
LoLab-VU/Bayesian_Inference_of_Network_Dynamics
54a5ef7e868be34289836bbbb024a2963c0c9c86
[ "MIT" ]
null
null
null
# exported from PySB model 'model' from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD Model() Monomer('C6A', ['C8pro']) Monomer('BaxA', ['BaxM', 'BaxA_1', 'BaxA_2', 'SmacM']) Monomer('Ligand', ['Receptor']) Monomer('C6pro', ['C3A']) Monomer('ParpU', ['C3A']) Monomer('BidU', ['C8A']) Monomer('BidT') Monomer('C3A', ['Xiap', 'ParpU', 'C6pro']) Monomer('BidM', ['BaxM']) Monomer('BaxM', ['BidM', 'BaxA']) Monomer('C8A', ['BidU', 'C3pro']) Monomer('Xiap', ['SmacC', 'C3A']) Monomer('Receptor', ['Ligand', 'Fadd']) Monomer('C3ub') Monomer('Fadd', ['Receptor', 'C8pro']) Monomer('C3pro', ['C8A']) Monomer('SmacM', ['BaxA']) Monomer('SmacC', ['Xiap']) Monomer('C8pro', ['Fadd', 'C6A']) Monomer('ParpC') Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0) Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0) Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0) Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0) Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0) Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0) Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0) Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0) Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0) Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0) Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0) Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0) Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0) Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0) Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0) Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0) Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0) Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0) Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0) Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0) Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0) Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0) Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0) Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0) Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0) Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0) Parameter('pore_formation_0_BaxA_pore_2kf', 1.0) Parameter('pore_formation_0_BaxA_pore_1kr', 1.0) Parameter('pore_formation_1_BaxA_pore_2kf', 1.0) Parameter('pore_formation_1_BaxA_pore_1kr', 1.0) Parameter('pore_formation_2_BaxA_pore_2kf', 1.0) Parameter('pore_formation_2_BaxA_pore_1kr', 1.0) Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0) Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0) Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0) Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0) Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0) Parameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0) Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0) Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0) Parameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0) Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0) Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0) Parameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0) Parameter('C6A_0', 0.0) Parameter('BaxA_0', 0.0) Parameter('Ligand_0', 1000.0) Parameter('C6pro_0', 100.0) Parameter('ParpU_0', 1000000.0) Parameter('BidU_0', 171000.0) Parameter('BidT_0', 0.0) Parameter('C3A_0', 0.0) Parameter('BidM_0', 0.0) Parameter('BaxM_0', 40000.0) Parameter('C8A_0', 0.0) Parameter('Xiap_0', 35250.0) Parameter('Receptor_0', 100.0) Parameter('C3ub_0', 0.0) Parameter('Fadd_0', 130000.0) Parameter('C3pro_0', 21000.0) Parameter('SmacM_0', 100000.0) Parameter('SmacC_0', 0.0) Parameter('C8pro_0', 130000.0) Parameter('ParpC_0', 0.0) Observable('C6A_obs', C6A()) Observable('BaxA_obs', BaxA()) Observable('Ligand_obs', Ligand()) Observable('C6pro_obs', C6pro()) Observable('ParpU_obs', ParpU()) Observable('BidU_obs', BidU()) Observable('BidT_obs', BidT()) Observable('C3A_obs', C3A()) Observable('BidM_obs', BidM()) Observable('BaxM_obs', BaxM()) Observable('C8A_obs', C8A()) Observable('Xiap_obs', Xiap()) Observable('Receptor_obs', Receptor()) Observable('C3ub_obs', C3ub()) Observable('Fadd_obs', Fadd()) Observable('C3pro_obs', C3pro()) Observable('SmacM_obs', SmacM()) Observable('SmacC_obs', SmacC()) Observable('C8pro_obs', C8pro()) Observable('ParpC_obs', ParpC()) Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr) Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr) Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr) Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc) Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr) Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc) Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr) Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr) Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc) Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr) Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc) Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr) Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr) Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc) Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr) Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc) Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) | BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr) Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None) | BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr) Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr) Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr) Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5) % SmacM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc) Rule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr) Rule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc) Rule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr) Rule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc) Rule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr) Rule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc) Initial(C6A(C8pro=None), C6A_0) Initial(BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), BaxA_0) Initial(Ligand(Receptor=None), Ligand_0) Initial(C6pro(C3A=None), C6pro_0) Initial(ParpU(C3A=None), ParpU_0) Initial(BidU(C8A=None), BidU_0) Initial(BidT(), BidT_0) Initial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0) Initial(BidM(BaxM=None), BidM_0) Initial(BaxM(BidM=None, BaxA=None), BaxM_0) Initial(C8A(BidU=None, C3pro=None), C8A_0) Initial(Xiap(SmacC=None, C3A=None), Xiap_0) Initial(Receptor(Ligand=None, Fadd=None), Receptor_0) Initial(C3ub(), C3ub_0) Initial(Fadd(Receptor=None, C8pro=None), Fadd_0) Initial(C3pro(C8A=None), C3pro_0) Initial(SmacM(BaxA=None), SmacM_0) Initial(SmacC(Xiap=None), SmacC_0) Initial(C8pro(Fadd=None, C6A=None), C8pro_0) Initial(ParpC(), ParpC_0)
85.417178
598
0.808518
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD Model() Monomer('C6A', ['C8pro']) Monomer('BaxA', ['BaxM', 'BaxA_1', 'BaxA_2', 'SmacM']) Monomer('Ligand', ['Receptor']) Monomer('C6pro', ['C3A']) Monomer('ParpU', ['C3A']) Monomer('BidU', ['C8A']) Monomer('BidT') Monomer('C3A', ['Xiap', 'ParpU', 'C6pro']) Monomer('BidM', ['BaxM']) Monomer('BaxM', ['BidM', 'BaxA']) Monomer('C8A', ['BidU', 'C3pro']) Monomer('Xiap', ['SmacC', 'C3A']) Monomer('Receptor', ['Ligand', 'Fadd']) Monomer('C3ub') Monomer('Fadd', ['Receptor', 'C8pro']) Monomer('C3pro', ['C8A']) Monomer('SmacM', ['BaxA']) Monomer('SmacC', ['Xiap']) Monomer('C8pro', ['Fadd', 'C6A']) Monomer('ParpC') Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0) Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0) Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0) Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0) Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0) Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0) Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0) Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0) Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0) Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0) Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0) Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0) Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0) Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0) Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0) Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0) Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0) Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0) Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0) Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0) Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0) Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0) Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0) Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0) Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0) Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0) Parameter('pore_formation_0_BaxA_pore_2kf', 1.0) Parameter('pore_formation_0_BaxA_pore_1kr', 1.0) Parameter('pore_formation_1_BaxA_pore_2kf', 1.0) Parameter('pore_formation_1_BaxA_pore_1kr', 1.0) Parameter('pore_formation_2_BaxA_pore_2kf', 1.0) Parameter('pore_formation_2_BaxA_pore_1kr', 1.0) Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0) Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0) Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0) Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0) Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0) Parameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0) Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0) Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0) Parameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0) Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0) Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0) Parameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0) Parameter('C6A_0', 0.0) Parameter('BaxA_0', 0.0) Parameter('Ligand_0', 1000.0) Parameter('C6pro_0', 100.0) Parameter('ParpU_0', 1000000.0) Parameter('BidU_0', 171000.0) Parameter('BidT_0', 0.0) Parameter('C3A_0', 0.0) Parameter('BidM_0', 0.0) Parameter('BaxM_0', 40000.0) Parameter('C8A_0', 0.0) Parameter('Xiap_0', 35250.0) Parameter('Receptor_0', 100.0) Parameter('C3ub_0', 0.0) Parameter('Fadd_0', 130000.0) Parameter('C3pro_0', 21000.0) Parameter('SmacM_0', 100000.0) Parameter('SmacC_0', 0.0) Parameter('C8pro_0', 130000.0) Parameter('ParpC_0', 0.0) Observable('C6A_obs', C6A()) Observable('BaxA_obs', BaxA()) Observable('Ligand_obs', Ligand()) Observable('C6pro_obs', C6pro()) Observable('ParpU_obs', ParpU()) Observable('BidU_obs', BidU()) Observable('BidT_obs', BidT()) Observable('C3A_obs', C3A()) Observable('BidM_obs', BidM()) Observable('BaxM_obs', BaxM()) Observable('C8A_obs', C8A()) Observable('Xiap_obs', Xiap()) Observable('Receptor_obs', Receptor()) Observable('C3ub_obs', C3ub()) Observable('Fadd_obs', Fadd()) Observable('C3pro_obs', C3pro()) Observable('SmacM_obs', SmacM()) Observable('SmacC_obs', SmacC()) Observable('C8pro_obs', C8pro()) Observable('ParpC_obs', ParpC()) Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr) Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr) Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr) Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc) Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr) Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc) Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr) Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr) Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc) Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr) Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc) Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr) Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr) Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc) Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr) Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, BaxA_1=None, BaxA_2=None, SmacM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc) Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) | BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr) Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=None, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=None, SmacM=None) | BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr) Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None) + BaxA(BaxM=None, BaxA_1=3, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr) Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr) Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=5) % SmacM(BaxA=5) >> BaxA(BaxM=None, BaxA_1=4, BaxA_2=1, SmacM=None) % BaxA(BaxM=None, BaxA_1=1, BaxA_2=2, SmacM=None) % BaxA(BaxM=None, BaxA_1=2, BaxA_2=3, SmacM=None) % BaxA(BaxM=None, BaxA_1=3, BaxA_2=4, SmacM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc) Rule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr) Rule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc) Rule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr) Rule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc) Rule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr) Rule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc) Initial(C6A(C8pro=None), C6A_0) Initial(BaxA(BaxM=None, BaxA_1=None, BaxA_2=None, SmacM=None), BaxA_0) Initial(Ligand(Receptor=None), Ligand_0) Initial(C6pro(C3A=None), C6pro_0) Initial(ParpU(C3A=None), ParpU_0) Initial(BidU(C8A=None), BidU_0) Initial(BidT(), BidT_0) Initial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0) Initial(BidM(BaxM=None), BidM_0) Initial(BaxM(BidM=None, BaxA=None), BaxM_0) Initial(C8A(BidU=None, C3pro=None), C8A_0) Initial(Xiap(SmacC=None, C3A=None), Xiap_0) Initial(Receptor(Ligand=None, Fadd=None), Receptor_0) Initial(C3ub(), C3ub_0) Initial(Fadd(Receptor=None, C8pro=None), Fadd_0) Initial(C3pro(C8A=None), C3pro_0) Initial(SmacM(BaxA=None), SmacM_0) Initial(SmacC(Xiap=None), SmacC_0) Initial(C8pro(Fadd=None, C6A=None), C8pro_0) Initial(ParpC(), ParpC_0)
true
true
f71cdebb1cdf3286b45c66f1071264ecfee8c65a
873
py
Python
state/go_home.py
LHGames-2018/espace
ce9ed47fee285d60bf7183132d5686ad8198b70f
[ "MIT" ]
null
null
null
state/go_home.py
LHGames-2018/espace
ce9ed47fee285d60bf7183132d5686ad8198b70f
[ "MIT" ]
null
null
null
state/go_home.py
LHGames-2018/espace
ce9ed47fee285d60bf7183132d5686ad8198b70f
[ "MIT" ]
null
null
null
from state.machine import BaseState from helper import * import state class GoHomeState(BaseState): def action(self, game_state): my_pos = game_state_helper.get_my_position(game_state) poids, next_move = game_state_helper.get_home(game_state) if not next_move: vector = game_state['PlayerInfo'].HouseLocation - my_pos if abs(vector.x) > abs(vector.y): next_move = Point(-1 if vector.x < 0 else 1, 0) else: next_move = Point(0, -1 if vector.y < 0 else 1) if poids == 0: return state.GatherResourcesState(), None print(my_pos, next_move, file=__import__('sys').stderr) tile_content = game_state['parsedGameMap'][(my_pos + next_move).to_tuple()] action = create_move_action(tile_content, next_move) return None, action
30.103448
83
0.635739
from state.machine import BaseState from helper import * import state class GoHomeState(BaseState): def action(self, game_state): my_pos = game_state_helper.get_my_position(game_state) poids, next_move = game_state_helper.get_home(game_state) if not next_move: vector = game_state['PlayerInfo'].HouseLocation - my_pos if abs(vector.x) > abs(vector.y): next_move = Point(-1 if vector.x < 0 else 1, 0) else: next_move = Point(0, -1 if vector.y < 0 else 1) if poids == 0: return state.GatherResourcesState(), None print(my_pos, next_move, file=__import__('sys').stderr) tile_content = game_state['parsedGameMap'][(my_pos + next_move).to_tuple()] action = create_move_action(tile_content, next_move) return None, action
true
true
f71cdecd4a4c849ec91e2ee1edc93e4c72d44a98
1,158
py
Python
google/pubsub_v1/services/subscriber/transports/__init__.py
acocuzzo/python-pubsub
fcb67dd0d8fff5a583ebe0a3a08d0219601df8e9
[ "Apache-2.0" ]
null
null
null
google/pubsub_v1/services/subscriber/transports/__init__.py
acocuzzo/python-pubsub
fcb67dd0d8fff5a583ebe0a3a08d0219601df8e9
[ "Apache-2.0" ]
null
null
null
google/pubsub_v1/services/subscriber/transports/__init__.py
acocuzzo/python-pubsub
fcb67dd0d8fff5a583ebe0a3a08d0219601df8e9
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from collections import OrderedDict from typing import Dict, Type from .base import SubscriberTransport from .grpc import SubscriberGrpcTransport from .grpc_asyncio import SubscriberGrpcAsyncIOTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[SubscriberTransport]] _transport_registry["grpc"] = SubscriberGrpcTransport _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport __all__ = ( "SubscriberTransport", "SubscriberGrpcTransport", "SubscriberGrpcAsyncIOTransport", )
34.058824
81
0.780656
from collections import OrderedDict from typing import Dict, Type from .base import SubscriberTransport from .grpc import SubscriberGrpcTransport from .grpc_asyncio import SubscriberGrpcAsyncIOTransport _transport_registry = OrderedDict() _transport_registry["grpc"] = SubscriberGrpcTransport _transport_registry["grpc_asyncio"] = SubscriberGrpcAsyncIOTransport __all__ = ( "SubscriberTransport", "SubscriberGrpcTransport", "SubscriberGrpcAsyncIOTransport", )
true
true
f71cdf394d949f1c6fa70b3924bafe111b4036a5
692
py
Python
cumulusci/cli/logger.py
jayhatha/CumulusCI
b3864621d3aca72dec669339af08657526582344
[ "BSD-3-Clause" ]
null
null
null
cumulusci/cli/logger.py
jayhatha/CumulusCI
b3864621d3aca72dec669339af08657526582344
[ "BSD-3-Clause" ]
null
null
null
cumulusci/cli/logger.py
jayhatha/CumulusCI
b3864621d3aca72dec669339af08657526582344
[ "BSD-3-Clause" ]
null
null
null
""" CLI logger """ from __future__ import unicode_literals import logging import coloredlogs import requests def init_logger(log_requests=False): """ Initialize the logger """ logger = logging.getLogger(__name__.split(".")[0]) for handler in logger.handlers: # pragma: nocover logger.removeHandler(handler) formatter = coloredlogs.ColoredFormatter(fmt="%(asctime)s: %(message)s") handler = logging.StreamHandler() handler.setLevel(logging.DEBUG) handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) logger.propagate = False if log_requests: requests.packages.urllib3.add_stderr_logger()
25.62963
76
0.722543
from __future__ import unicode_literals import logging import coloredlogs import requests def init_logger(log_requests=False): logger = logging.getLogger(__name__.split(".")[0]) for handler in logger.handlers: logger.removeHandler(handler) formatter = coloredlogs.ColoredFormatter(fmt="%(asctime)s: %(message)s") handler = logging.StreamHandler() handler.setLevel(logging.DEBUG) handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) logger.propagate = False if log_requests: requests.packages.urllib3.add_stderr_logger()
true
true
f71ce1aa0ef3d6fbbb1c1d05ae5adc8f5fcb088b
25,861
py
Python
pint/testsuite/test_issues.py
clarkgwillison/pint
03be0544b749ee55fbd1253d18e2a84151dce716
[ "BSD-3-Clause" ]
2
2020-07-08T20:09:35.000Z
2021-03-05T12:51:30.000Z
pint/testsuite/test_issues.py
clarkgwillison/pint
03be0544b749ee55fbd1253d18e2a84151dce716
[ "BSD-3-Clause" ]
null
null
null
pint/testsuite/test_issues.py
clarkgwillison/pint
03be0544b749ee55fbd1253d18e2a84151dce716
[ "BSD-3-Clause" ]
null
null
null
import copy import math import pprint import unittest import pytest from pint import Context, DimensionalityError, UnitRegistry from pint.compat import np from pint.testsuite import QuantityTestCase, helpers from pint.unit import UnitsContainer from pint.util import ParserHelper ureg = UnitRegistry() class TestIssues(QuantityTestCase): FORCE_NDARRAY = False def setup(self): self.ureg.autoconvert_offset_to_baseunit = False @unittest.expectedFailure def test_issue25(self): x = ParserHelper.from_string("10 %") self.assertEqual(x, ParserHelper(10, {"%": 1})) x = ParserHelper.from_string("10 ‰") self.assertEqual(x, ParserHelper(10, {"‰": 1})) ureg.define("percent = [fraction]; offset: 0 = %") ureg.define("permille = percent / 10 = ‰") x = ureg.parse_expression("10 %") self.assertEqual(x, ureg.Quantity(10, {"%": 1})) y = ureg.parse_expression("10 ‰") self.assertEqual(y, ureg.Quantity(10, {"‰": 1})) self.assertEqual(x.to("‰"), ureg.Quantity(1, {"‰": 1})) def test_issue29(self): t = 4 * ureg("mW") self.assertEqual(t.magnitude, 4) self.assertEqual(t._units, UnitsContainer(milliwatt=1)) self.assertEqual(t.to("joule / second"), 4e-3 * ureg("W")) @unittest.expectedFailure @helpers.requires_numpy() def test_issue37(self): x = np.ma.masked_array([1, 2, 3], mask=[True, True, False]) q = ureg.meter * x self.assertIsInstance(q, ureg.Quantity) np.testing.assert_array_equal(q.magnitude, x) self.assertEqual(q.units, ureg.meter.units) q = x * ureg.meter self.assertIsInstance(q, ureg.Quantity) np.testing.assert_array_equal(q.magnitude, x) self.assertEqual(q.units, ureg.meter.units) m = np.ma.masked_array(2 * np.ones(3, 3)) qq = q * m self.assertIsInstance(qq, ureg.Quantity) np.testing.assert_array_equal(qq.magnitude, x * m) self.assertEqual(qq.units, ureg.meter.units) qq = m * q self.assertIsInstance(qq, ureg.Quantity) np.testing.assert_array_equal(qq.magnitude, x * m) self.assertEqual(qq.units, ureg.meter.units) @unittest.expectedFailure @helpers.requires_numpy() def test_issue39(self): x = np.matrix([[1, 2, 3], [1, 2, 3], [1, 2, 3]]) q = ureg.meter * x self.assertIsInstance(q, ureg.Quantity) np.testing.assert_array_equal(q.magnitude, x) self.assertEqual(q.units, ureg.meter.units) q = x * ureg.meter self.assertIsInstance(q, ureg.Quantity) np.testing.assert_array_equal(q.magnitude, x) self.assertEqual(q.units, ureg.meter.units) m = np.matrix(2 * np.ones(3, 3)) qq = q * m self.assertIsInstance(qq, ureg.Quantity) np.testing.assert_array_equal(qq.magnitude, x * m) self.assertEqual(qq.units, ureg.meter.units) qq = m * q self.assertIsInstance(qq, ureg.Quantity) np.testing.assert_array_equal(qq.magnitude, x * m) self.assertEqual(qq.units, ureg.meter.units) @helpers.requires_numpy() def test_issue44(self): x = 4.0 * ureg.dimensionless np.sqrt(x) self.assertQuantityAlmostEqual( np.sqrt([4.0] * ureg.dimensionless), [2.0] * ureg.dimensionless ) self.assertQuantityAlmostEqual( np.sqrt(4.0 * ureg.dimensionless), 2.0 * ureg.dimensionless ) def test_issue45(self): import math self.assertAlmostEqual(math.sqrt(4 * ureg.m / ureg.cm), math.sqrt(4 * 100)) self.assertAlmostEqual(float(ureg.V / ureg.mV), 1000.0) @helpers.requires_numpy() def test_issue45b(self): self.assertAlmostEqual( np.sin([np.pi / 2] * ureg.m / ureg.m), np.sin([np.pi / 2] * ureg.dimensionless), ) self.assertAlmostEqual( np.sin([np.pi / 2] * ureg.cm / ureg.m), np.sin([np.pi / 2] * ureg.dimensionless * 0.01), ) def test_issue50(self): Q_ = ureg.Quantity self.assertEqual(Q_(100), 100 * ureg.dimensionless) self.assertEqual(Q_("100"), 100 * ureg.dimensionless) def test_issue52(self): u1 = UnitRegistry() u2 = UnitRegistry() q1 = 1 * u1.meter q2 = 1 * u2.meter import operator as op for fun in ( op.add, op.iadd, op.sub, op.isub, op.mul, op.imul, op.floordiv, op.ifloordiv, op.truediv, op.itruediv, ): self.assertRaises(ValueError, fun, q1, q2) def test_issue54(self): self.assertEqual((1 * ureg.km / ureg.m + 1).magnitude, 1001) def test_issue54_related(self): self.assertEqual(ureg.km / ureg.m, 1000) self.assertEqual(1000, ureg.km / ureg.m) self.assertLess(900, ureg.km / ureg.m) self.assertGreater(1100, ureg.km / ureg.m) def test_issue61(self): Q_ = ureg.Quantity for value in ({}, {"a": 3}, None): self.assertRaises(TypeError, Q_, value) self.assertRaises(TypeError, Q_, value, "meter") self.assertRaises(ValueError, Q_, "", "meter") self.assertRaises(ValueError, Q_, "") @helpers.requires_not_numpy() def test_issue61_notNP(self): Q_ = ureg.Quantity for value in ([1, 2, 3], (1, 2, 3)): self.assertRaises(TypeError, Q_, value) self.assertRaises(TypeError, Q_, value, "meter") def test_issue62(self): m = ureg("m**0.5") self.assertEqual(str(m.units), "meter ** 0.5") def test_issue66(self): self.assertEqual( ureg.get_dimensionality(UnitsContainer({"[temperature]": 1})), UnitsContainer({"[temperature]": 1}), ) self.assertEqual( ureg.get_dimensionality(ureg.kelvin), UnitsContainer({"[temperature]": 1}) ) self.assertEqual( ureg.get_dimensionality(ureg.degC), UnitsContainer({"[temperature]": 1}) ) def test_issue66b(self): self.assertEqual( ureg.get_base_units(ureg.kelvin), (1.0, ureg.Unit(UnitsContainer({"kelvin": 1}))), ) self.assertEqual( ureg.get_base_units(ureg.degC), (1.0, ureg.Unit(UnitsContainer({"kelvin": 1}))), ) def test_issue69(self): q = ureg("m").to(ureg("in")) self.assertEqual(q, ureg("m").to("in")) @helpers.requires_numpy() def test_issue74(self): v1 = np.asarray([1.0, 2.0, 3.0]) v2 = np.asarray([3.0, 2.0, 1.0]) q1 = v1 * ureg.ms q2 = v2 * ureg.ms np.testing.assert_array_equal(q1 < q2, v1 < v2) np.testing.assert_array_equal(q1 > q2, v1 > v2) np.testing.assert_array_equal(q1 <= q2, v1 <= v2) np.testing.assert_array_equal(q1 >= q2, v1 >= v2) q2s = np.asarray([0.003, 0.002, 0.001]) * ureg.s v2s = q2s.to("ms").magnitude np.testing.assert_array_equal(q1 < q2s, v1 < v2s) np.testing.assert_array_equal(q1 > q2s, v1 > v2s) np.testing.assert_array_equal(q1 <= q2s, v1 <= v2s) np.testing.assert_array_equal(q1 >= q2s, v1 >= v2s) @helpers.requires_numpy() def test_issue75(self): v1 = np.asarray([1.0, 2.0, 3.0]) v2 = np.asarray([3.0, 2.0, 1.0]) q1 = v1 * ureg.ms q2 = v2 * ureg.ms np.testing.assert_array_equal(q1 == q2, v1 == v2) np.testing.assert_array_equal(q1 != q2, v1 != v2) q2s = np.asarray([0.003, 0.002, 0.001]) * ureg.s v2s = q2s.to("ms").magnitude np.testing.assert_array_equal(q1 == q2s, v1 == v2s) np.testing.assert_array_equal(q1 != q2s, v1 != v2s) @helpers.requires_uncertainties() def test_issue77(self): acc = (5.0 * ureg("m/s/s")).plus_minus(0.25) tim = (37.0 * ureg("s")).plus_minus(0.16) dis = acc * tim ** 2 / 2 self.assertEqual(dis.value, acc.value * tim.value ** 2 / 2) def test_issue85(self): T = 4.0 * ureg.kelvin m = 1.0 * ureg.amu va = 2.0 * ureg.k * T / m va.to_base_units() boltmk = 1.380649e-23 * ureg.J / ureg.K vb = 2.0 * boltmk * T / m self.assertQuantityAlmostEqual(va.to_base_units(), vb.to_base_units()) def test_issue86(self): ureg = self.ureg ureg.autoconvert_offset_to_baseunit = True def parts(q): return q.magnitude, q.units q1 = 10.0 * ureg.degC q2 = 10.0 * ureg.kelvin k1 = q1.to_base_units() q3 = 3.0 * ureg.meter q1m, q1u = parts(q1) q2m, q2u = parts(q2) q3m, q3u = parts(q3) k1m, k1u = parts(k1) self.assertEqual(parts(q2 * q3), (q2m * q3m, q2u * q3u)) self.assertEqual(parts(q2 / q3), (q2m / q3m, q2u / q3u)) self.assertEqual(parts(q3 * q2), (q3m * q2m, q3u * q2u)) self.assertEqual(parts(q3 / q2), (q3m / q2m, q3u / q2u)) self.assertEqual(parts(q2 ** 1), (q2m ** 1, q2u ** 1)) self.assertEqual(parts(q2 ** -1), (q2m ** -1, q2u ** -1)) self.assertEqual(parts(q2 ** 2), (q2m ** 2, q2u ** 2)) self.assertEqual(parts(q2 ** -2), (q2m ** -2, q2u ** -2)) self.assertEqual(parts(q1 * q3), (k1m * q3m, k1u * q3u)) self.assertEqual(parts(q1 / q3), (k1m / q3m, k1u / q3u)) self.assertEqual(parts(q3 * q1), (q3m * k1m, q3u * k1u)) self.assertEqual(parts(q3 / q1), (q3m / k1m, q3u / k1u)) self.assertEqual(parts(q1 ** -1), (k1m ** -1, k1u ** -1)) self.assertEqual(parts(q1 ** 2), (k1m ** 2, k1u ** 2)) self.assertEqual(parts(q1 ** -2), (k1m ** -2, k1u ** -2)) def test_issues86b(self): ureg = self.ureg T1 = 200.0 * ureg.degC T2 = T1.to(ureg.kelvin) m = 132.9054519 * ureg.amu v1 = 2 * ureg.k * T1 / m v2 = 2 * ureg.k * T2 / m self.assertQuantityAlmostEqual(v1, v2) self.assertQuantityAlmostEqual(v1, v2.to_base_units()) self.assertQuantityAlmostEqual(v1.to_base_units(), v2) self.assertQuantityAlmostEqual(v1.to_base_units(), v2.to_base_units()) @unittest.expectedFailure def test_issue86c(self): ureg = self.ureg ureg.autoconvert_offset_to_baseunit = True T = ureg.degC T = 100.0 * T self.assertQuantityAlmostEqual(ureg.k * 2 * T, ureg.k * (2 * T)) def test_issue93(self): x = 5 * ureg.meter self.assertIsInstance(x.magnitude, int) y = 0.1 * ureg.meter self.assertIsInstance(y.magnitude, float) z = 5 * ureg.meter self.assertIsInstance(z.magnitude, int) z += y self.assertIsInstance(z.magnitude, float) self.assertQuantityAlmostEqual(x + y, 5.1 * ureg.meter) self.assertQuantityAlmostEqual(z, 5.1 * ureg.meter) def test_issue104(self): x = [ureg("1 meter"), ureg("1 meter"), ureg("1 meter")] y = [ureg("1 meter")] * 3 def summer(values): if not values: return 0 total = values[0] for v in values[1:]: total += v return total self.assertQuantityAlmostEqual(summer(x), ureg.Quantity(3, "meter")) self.assertQuantityAlmostEqual(x[0], ureg.Quantity(1, "meter")) self.assertQuantityAlmostEqual(summer(y), ureg.Quantity(3, "meter")) self.assertQuantityAlmostEqual(y[0], ureg.Quantity(1, "meter")) def test_issue105(self): func = ureg.parse_unit_name val = list(func("meter")) self.assertEqual(list(func("METER")), []) self.assertEqual(val, list(func("METER", False))) for func in (ureg.get_name, ureg.parse_expression): val = func("meter") with self.assertRaises(AttributeError): func("METER") self.assertEqual(val, func("METER", False)) @helpers.requires_numpy() def test_issue127(self): q = [1.0, 2.0, 3.0, 4.0] * self.ureg.meter q[0] = np.nan self.assertNotEqual(q[0], 1.0) self.assertTrue(math.isnan(q[0].magnitude)) q[1] = float("NaN") self.assertNotEqual(q[1], 2.0) self.assertTrue(math.isnan(q[1].magnitude)) def test_issue170(self): Q_ = UnitRegistry().Quantity q = Q_("1 kHz") / Q_("100 Hz") iq = int(q) self.assertEqual(iq, 10) self.assertIsInstance(iq, int) def test_angstrom_creation(self): ureg.Quantity(2, "Å") def test_alternative_angstrom_definition(self): ureg.Quantity(2, "\u212B") def test_micro_creation(self): ureg.Quantity(2, "µm") @helpers.requires_numpy() def test_issue171_real_imag(self): qr = [1.0, 2.0, 3.0, 4.0] * self.ureg.meter qi = [4.0, 3.0, 2.0, 1.0] * self.ureg.meter q = qr + 1j * qi self.assertQuantityEqual(q.real, qr) self.assertQuantityEqual(q.imag, qi) @helpers.requires_numpy() def test_issue171_T(self): a = np.asarray([[1.0, 2.0, 3.0, 4.0], [4.0, 3.0, 2.0, 1.0]]) q1 = a * self.ureg.meter q2 = a.T * self.ureg.meter self.assertQuantityEqual(q1.T, q2) @helpers.requires_numpy() def test_issue250(self): a = self.ureg.V b = self.ureg.mV self.assertEqual(np.float16(a / b), 1000.0) self.assertEqual(np.float32(a / b), 1000.0) self.assertEqual(np.float64(a / b), 1000.0) if "float128" in dir(np): self.assertEqual(np.float128(a / b), 1000.0) def test_issue252(self): ur = UnitRegistry() q = ur("3 F") t = copy.deepcopy(q) u = t.to(ur.mF) self.assertQuantityEqual(q.to(ur.mF), u) def test_issue323(self): from fractions import Fraction as F self.assertEqual((self.Q_(F(2, 3), "s")).to("ms"), self.Q_(F(2000, 3), "ms")) self.assertEqual((self.Q_(F(2, 3), "m")).to("km"), self.Q_(F(1, 1500), "km")) def test_issue339(self): q1 = self.ureg("") self.assertEqual(q1.magnitude, 1) self.assertEqual(q1.units, self.ureg.dimensionless) q2 = self.ureg("1 dimensionless") self.assertEqual(q1, q2) def test_issue354_356_370(self): self.assertEqual( "{:~}".format(1 * self.ureg.second / self.ureg.millisecond), "1.0 s / ms" ) self.assertEqual("{:~}".format(1 * self.ureg.count), "1 count") self.assertEqual("{:~}".format(1 * self.ureg("MiB")), "1 MiB") def test_issue468(self): @ureg.wraps(("kg"), "meter") def f(x): return x x = ureg.Quantity(1.0, "meter") y = f(x) z = x * y self.assertEqual(z, ureg.Quantity(1.0, "meter * kilogram")) @helpers.requires_numpy() def test_issue482(self): q = self.ureg.Quantity(1, self.ureg.dimensionless) qe = np.exp(q) self.assertIsInstance(qe, self.ureg.Quantity) @helpers.requires_numpy() def test_issue483(self): ureg = self.ureg a = np.asarray([1, 2, 3]) q = [1, 2, 3] * ureg.dimensionless p = (q ** q).m np.testing.assert_array_equal(p, a ** a) def test_issue507(self): # leading underscore in unit works with numbers ureg.define("_100km = 100 * kilometer") battery_ec = 16 * ureg.kWh / ureg._100km # noqa: F841 # ... but not with text ureg.define("_home = 4700 * kWh / year") with self.assertRaises(AttributeError): home_elec_power = 1 * ureg._home # noqa: F841 # ... or with *only* underscores ureg.define("_ = 45 * km") with self.assertRaises(AttributeError): one_blank = 1 * ureg._ # noqa: F841 def test_issue523(self): src, dst = UnitsContainer({"meter": 1}), UnitsContainer({"degF": 1}) value = 10.0 convert = self.ureg.convert self.assertRaises(DimensionalityError, convert, value, src, dst) self.assertRaises(DimensionalityError, convert, value, dst, src) def test_issue532(self): ureg = self.ureg @ureg.check(ureg("")) def f(x): return 2 * x self.assertEqual(f(ureg.Quantity(1, "")), 2) self.assertRaises(DimensionalityError, f, ureg.Quantity(1, "m")) def test_issue625a(self): Q_ = ureg.Quantity from math import sqrt @ureg.wraps(ureg.second, (ureg.meters, ureg.meters / ureg.second ** 2)) def calculate_time_to_fall(height, gravity=Q_(9.8, "m/s^2")): """Calculate time to fall from a height h with a default gravity. By default, the gravity is assumed to be earth gravity, but it can be modified. d = .5 * g * t**2 t = sqrt(2 * d / g) Parameters ---------- height : gravity : (Default value = Q_(9.8) "m/s^2") : Returns ------- """ return sqrt(2 * height / gravity) lunar_module_height = Q_(10, "m") t1 = calculate_time_to_fall(lunar_module_height) print(t1) self.assertAlmostEqual(t1, Q_(1.4285714285714286, "s")) moon_gravity = Q_(1.625, "m/s^2") t2 = calculate_time_to_fall(lunar_module_height, moon_gravity) self.assertAlmostEqual(t2, Q_(3.508232077228117, "s")) def test_issue625b(self): Q_ = ureg.Quantity @ureg.wraps("=A*B", ("=A", "=B")) def get_displacement(time, rate=Q_(1, "m/s")): """Calculates displacement from a duration and default rate. Parameters ---------- time : rate : (Default value = Q_(1) "m/s") : Returns ------- """ return time * rate d1 = get_displacement(Q_(2, "s")) self.assertAlmostEqual(d1, Q_(2, "m")) d2 = get_displacement(Q_(2, "s"), Q_(1, "deg/s")) self.assertAlmostEqual(d2, Q_(2, " deg")) def test_issue625c(self): u = UnitRegistry() @u.wraps("=A*B*C", ("=A", "=B", "=C")) def get_product(a=2 * u.m, b=3 * u.m, c=5 * u.m): return a * b * c self.assertEqual(get_product(a=3 * u.m), 45 * u.m ** 3) self.assertEqual(get_product(b=2 * u.m), 20 * u.m ** 3) self.assertEqual(get_product(c=1 * u.dimensionless), 6 * u.m ** 2) def test_issue655a(self): distance = 1 * ureg.m time = 1 * ureg.s velocity = distance / time self.assertEqual(distance.check("[length]"), True) self.assertEqual(distance.check("[time]"), False) self.assertEqual(velocity.check("[length] / [time]"), True) self.assertEqual(velocity.check("1 / [time] * [length]"), True) def test_issue655b(self): Q_ = ureg.Quantity @ureg.check("[length]", "[length]/[time]^2") def pendulum_period(length, G=Q_(1, "standard_gravity")): print(length) return (2 * math.pi * (length / G) ** 0.5).to("s") length = Q_(1, ureg.m) # Assume earth gravity t = pendulum_period(length) self.assertAlmostEqual(t, Q_("2.0064092925890407 second")) # Use moon gravity moon_gravity = Q_(1.625, "m/s^2") t = pendulum_period(length, moon_gravity) self.assertAlmostEqual(t, Q_("4.928936075204336 second")) def test_issue783(self): assert not ureg("g") == [] def test_issue856(self): ph1 = ParserHelper(scale=123) ph2 = copy.deepcopy(ph1) assert ph2.scale == ph1.scale ureg1 = UnitRegistry() ureg2 = copy.deepcopy(ureg1) # Very basic functionality test assert ureg2("1 t").to("kg").magnitude == 1000 def test_issue856b(self): # Test that, after a deepcopy(), the two UnitRegistries are # independent from each other ureg1 = UnitRegistry() ureg2 = copy.deepcopy(ureg1) ureg1.define("test123 = 123 kg") ureg2.define("test123 = 456 kg") assert ureg1("1 test123").to("kg").magnitude == 123 assert ureg2("1 test123").to("kg").magnitude == 456 def test_issue876(self): # Same hash must not imply equality. # As an implementation detail of CPython, hash(-1) == hash(-2). # This test is useless in potential alternative Python implementations where # hash(-1) != hash(-2); one would need to find hash collisions specific for each # implementation a = UnitsContainer({"[mass]": -1}) b = UnitsContainer({"[mass]": -2}) c = UnitsContainer({"[mass]": -3}) # Guarantee working on alternative Python implementations assert (hash(-1) == hash(-2)) == (hash(a) == hash(b)) assert (hash(-1) == hash(-3)) == (hash(a) == hash(c)) assert a != b assert a != c def test_issue902(self): ureg = UnitRegistry(auto_reduce_dimensions=True) velocity = 1 * ureg.m / ureg.s cross_section = 1 * ureg.um ** 2 result = cross_section / velocity assert result == 1e-12 * ureg.m * ureg.s def test_issue912(self): """pprint.pformat() invokes sorted() on large sets and frozensets and graciously handles TypeError, but not generic Exceptions. This test will fail if pint.DimensionalityError stops being a subclass of TypeError. Parameters ---------- Returns ------- """ meter_units = ureg.get_compatible_units(ureg.meter) hertz_units = ureg.get_compatible_units(ureg.hertz) pprint.pformat(meter_units | hertz_units) def test_issue932(self): q = ureg.Quantity("1 kg") with self.assertRaises(DimensionalityError): q.to("joule") ureg.enable_contexts("energy", *(Context() for _ in range(20))) q.to("joule") ureg.disable_contexts() with self.assertRaises(DimensionalityError): q.to("joule") def test_issue960(self): q = (1 * ureg.nanometer).to_compact("micrometer") assert q.units == ureg.nanometer assert q.magnitude == 1 def test_issue1032(self): class MultiplicativeDictionary(dict): def __rmul__(self, other): return self.__class__( {key: value * other for key, value in self.items()} ) q = 3 * ureg.s d = MultiplicativeDictionary({4: 5, 6: 7}) assert q * d == MultiplicativeDictionary({4: 15 * ureg.s, 6: 21 * ureg.s}) with self.assertRaises(TypeError): d * q @helpers.requires_numpy() def test_issue973(self): """Verify that an empty array Quantity can be created through multiplication.""" q0 = np.array([]) * ureg.m # by Unit q1 = np.array([]) * ureg("m") # by Quantity assert isinstance(q0, ureg.Quantity) assert isinstance(q1, ureg.Quantity) assert len(q0) == len(q1) == 0 def test_issue1062_issue1097(self): # Must not be used by any other tests assert "nanometer" not in ureg._units for i in range(5): ctx = Context.from_lines(["@context _", "cal = 4 J"]) with ureg.context("sp", ctx): q = ureg.Quantity(1, "nm") q.to("J") def test_issue1086(self): # units with prefixes should correctly test as 'in' the registry assert "bits" in ureg assert "gigabits" in ureg assert "meters" in ureg assert "kilometers" in ureg # unknown or incorrect units should test as 'not in' the registry assert "magicbits" not in ureg assert "unknownmeters" not in ureg assert "gigatrees" not in ureg def test_issue1112(self): ureg = UnitRegistry( """ m = [length] g = [mass] s = [time] ft = 0.305 m lb = 454 g @context c1 [time]->[length] : value * 10 m/s @end @context c2 ft = 0.3 m @end @context c3 lb = 500 g @end """.splitlines() ) ureg.enable_contexts("c1") ureg.enable_contexts("c2") ureg.enable_contexts("c3") if np is not None: @pytest.mark.parametrize( "callable", [ lambda x: np.sin(x / x.units), # Issue 399 lambda x: np.cos(x / x.units), # Issue 399 np.isfinite, # Issue 481 np.shape, # Issue 509 np.size, # Issue 509 np.sqrt, # Issue 622 lambda x: x.mean(), # Issue 678 lambda x: x.copy(), # Issue 678 np.array, lambda x: x.conjugate, ], ) @pytest.mark.parametrize( "q", [ pytest.param(ureg.Quantity(1, "m"), id="python scalar int"), pytest.param(ureg.Quantity([1, 2, 3, 4], "m"), id="array int"), pytest.param(ureg.Quantity([1], "m")[0], id="numpy scalar int"), pytest.param(ureg.Quantity(1.0, "m"), id="python scalar float"), pytest.param(ureg.Quantity([1.0, 2.0, 3.0, 4.0], "m"), id="array float"), pytest.param(ureg.Quantity([1.0], "m")[0], id="numpy scalar float"), ], ) def test_issue925(callable, q): # Test for immutability of type type_before = type(q._magnitude) callable(q) assert isinstance(q._magnitude, type_before)
33.369032
88
0.560806
import copy import math import pprint import unittest import pytest from pint import Context, DimensionalityError, UnitRegistry from pint.compat import np from pint.testsuite import QuantityTestCase, helpers from pint.unit import UnitsContainer from pint.util import ParserHelper ureg = UnitRegistry() class TestIssues(QuantityTestCase): FORCE_NDARRAY = False def setup(self): self.ureg.autoconvert_offset_to_baseunit = False @unittest.expectedFailure def test_issue25(self): x = ParserHelper.from_string("10 %") self.assertEqual(x, ParserHelper(10, {"%": 1})) x = ParserHelper.from_string("10 ‰") self.assertEqual(x, ParserHelper(10, {"‰": 1})) ureg.define("percent = [fraction]; offset: 0 = %") ureg.define("permille = percent / 10 = ‰") x = ureg.parse_expression("10 %") self.assertEqual(x, ureg.Quantity(10, {"%": 1})) y = ureg.parse_expression("10 ‰") self.assertEqual(y, ureg.Quantity(10, {"‰": 1})) self.assertEqual(x.to("‰"), ureg.Quantity(1, {"‰": 1})) def test_issue29(self): t = 4 * ureg("mW") self.assertEqual(t.magnitude, 4) self.assertEqual(t._units, UnitsContainer(milliwatt=1)) self.assertEqual(t.to("joule / second"), 4e-3 * ureg("W")) @unittest.expectedFailure @helpers.requires_numpy() def test_issue37(self): x = np.ma.masked_array([1, 2, 3], mask=[True, True, False]) q = ureg.meter * x self.assertIsInstance(q, ureg.Quantity) np.testing.assert_array_equal(q.magnitude, x) self.assertEqual(q.units, ureg.meter.units) q = x * ureg.meter self.assertIsInstance(q, ureg.Quantity) np.testing.assert_array_equal(q.magnitude, x) self.assertEqual(q.units, ureg.meter.units) m = np.ma.masked_array(2 * np.ones(3, 3)) qq = q * m self.assertIsInstance(qq, ureg.Quantity) np.testing.assert_array_equal(qq.magnitude, x * m) self.assertEqual(qq.units, ureg.meter.units) qq = m * q self.assertIsInstance(qq, ureg.Quantity) np.testing.assert_array_equal(qq.magnitude, x * m) self.assertEqual(qq.units, ureg.meter.units) @unittest.expectedFailure @helpers.requires_numpy() def test_issue39(self): x = np.matrix([[1, 2, 3], [1, 2, 3], [1, 2, 3]]) q = ureg.meter * x self.assertIsInstance(q, ureg.Quantity) np.testing.assert_array_equal(q.magnitude, x) self.assertEqual(q.units, ureg.meter.units) q = x * ureg.meter self.assertIsInstance(q, ureg.Quantity) np.testing.assert_array_equal(q.magnitude, x) self.assertEqual(q.units, ureg.meter.units) m = np.matrix(2 * np.ones(3, 3)) qq = q * m self.assertIsInstance(qq, ureg.Quantity) np.testing.assert_array_equal(qq.magnitude, x * m) self.assertEqual(qq.units, ureg.meter.units) qq = m * q self.assertIsInstance(qq, ureg.Quantity) np.testing.assert_array_equal(qq.magnitude, x * m) self.assertEqual(qq.units, ureg.meter.units) @helpers.requires_numpy() def test_issue44(self): x = 4.0 * ureg.dimensionless np.sqrt(x) self.assertQuantityAlmostEqual( np.sqrt([4.0] * ureg.dimensionless), [2.0] * ureg.dimensionless ) self.assertQuantityAlmostEqual( np.sqrt(4.0 * ureg.dimensionless), 2.0 * ureg.dimensionless ) def test_issue45(self): import math self.assertAlmostEqual(math.sqrt(4 * ureg.m / ureg.cm), math.sqrt(4 * 100)) self.assertAlmostEqual(float(ureg.V / ureg.mV), 1000.0) @helpers.requires_numpy() def test_issue45b(self): self.assertAlmostEqual( np.sin([np.pi / 2] * ureg.m / ureg.m), np.sin([np.pi / 2] * ureg.dimensionless), ) self.assertAlmostEqual( np.sin([np.pi / 2] * ureg.cm / ureg.m), np.sin([np.pi / 2] * ureg.dimensionless * 0.01), ) def test_issue50(self): Q_ = ureg.Quantity self.assertEqual(Q_(100), 100 * ureg.dimensionless) self.assertEqual(Q_("100"), 100 * ureg.dimensionless) def test_issue52(self): u1 = UnitRegistry() u2 = UnitRegistry() q1 = 1 * u1.meter q2 = 1 * u2.meter import operator as op for fun in ( op.add, op.iadd, op.sub, op.isub, op.mul, op.imul, op.floordiv, op.ifloordiv, op.truediv, op.itruediv, ): self.assertRaises(ValueError, fun, q1, q2) def test_issue54(self): self.assertEqual((1 * ureg.km / ureg.m + 1).magnitude, 1001) def test_issue54_related(self): self.assertEqual(ureg.km / ureg.m, 1000) self.assertEqual(1000, ureg.km / ureg.m) self.assertLess(900, ureg.km / ureg.m) self.assertGreater(1100, ureg.km / ureg.m) def test_issue61(self): Q_ = ureg.Quantity for value in ({}, {"a": 3}, None): self.assertRaises(TypeError, Q_, value) self.assertRaises(TypeError, Q_, value, "meter") self.assertRaises(ValueError, Q_, "", "meter") self.assertRaises(ValueError, Q_, "") @helpers.requires_not_numpy() def test_issue61_notNP(self): Q_ = ureg.Quantity for value in ([1, 2, 3], (1, 2, 3)): self.assertRaises(TypeError, Q_, value) self.assertRaises(TypeError, Q_, value, "meter") def test_issue62(self): m = ureg("m**0.5") self.assertEqual(str(m.units), "meter ** 0.5") def test_issue66(self): self.assertEqual( ureg.get_dimensionality(UnitsContainer({"[temperature]": 1})), UnitsContainer({"[temperature]": 1}), ) self.assertEqual( ureg.get_dimensionality(ureg.kelvin), UnitsContainer({"[temperature]": 1}) ) self.assertEqual( ureg.get_dimensionality(ureg.degC), UnitsContainer({"[temperature]": 1}) ) def test_issue66b(self): self.assertEqual( ureg.get_base_units(ureg.kelvin), (1.0, ureg.Unit(UnitsContainer({"kelvin": 1}))), ) self.assertEqual( ureg.get_base_units(ureg.degC), (1.0, ureg.Unit(UnitsContainer({"kelvin": 1}))), ) def test_issue69(self): q = ureg("m").to(ureg("in")) self.assertEqual(q, ureg("m").to("in")) @helpers.requires_numpy() def test_issue74(self): v1 = np.asarray([1.0, 2.0, 3.0]) v2 = np.asarray([3.0, 2.0, 1.0]) q1 = v1 * ureg.ms q2 = v2 * ureg.ms np.testing.assert_array_equal(q1 < q2, v1 < v2) np.testing.assert_array_equal(q1 > q2, v1 > v2) np.testing.assert_array_equal(q1 <= q2, v1 <= v2) np.testing.assert_array_equal(q1 >= q2, v1 >= v2) q2s = np.asarray([0.003, 0.002, 0.001]) * ureg.s v2s = q2s.to("ms").magnitude np.testing.assert_array_equal(q1 < q2s, v1 < v2s) np.testing.assert_array_equal(q1 > q2s, v1 > v2s) np.testing.assert_array_equal(q1 <= q2s, v1 <= v2s) np.testing.assert_array_equal(q1 >= q2s, v1 >= v2s) @helpers.requires_numpy() def test_issue75(self): v1 = np.asarray([1.0, 2.0, 3.0]) v2 = np.asarray([3.0, 2.0, 1.0]) q1 = v1 * ureg.ms q2 = v2 * ureg.ms np.testing.assert_array_equal(q1 == q2, v1 == v2) np.testing.assert_array_equal(q1 != q2, v1 != v2) q2s = np.asarray([0.003, 0.002, 0.001]) * ureg.s v2s = q2s.to("ms").magnitude np.testing.assert_array_equal(q1 == q2s, v1 == v2s) np.testing.assert_array_equal(q1 != q2s, v1 != v2s) @helpers.requires_uncertainties() def test_issue77(self): acc = (5.0 * ureg("m/s/s")).plus_minus(0.25) tim = (37.0 * ureg("s")).plus_minus(0.16) dis = acc * tim ** 2 / 2 self.assertEqual(dis.value, acc.value * tim.value ** 2 / 2) def test_issue85(self): T = 4.0 * ureg.kelvin m = 1.0 * ureg.amu va = 2.0 * ureg.k * T / m va.to_base_units() boltmk = 1.380649e-23 * ureg.J / ureg.K vb = 2.0 * boltmk * T / m self.assertQuantityAlmostEqual(va.to_base_units(), vb.to_base_units()) def test_issue86(self): ureg = self.ureg ureg.autoconvert_offset_to_baseunit = True def parts(q): return q.magnitude, q.units q1 = 10.0 * ureg.degC q2 = 10.0 * ureg.kelvin k1 = q1.to_base_units() q3 = 3.0 * ureg.meter q1m, q1u = parts(q1) q2m, q2u = parts(q2) q3m, q3u = parts(q3) k1m, k1u = parts(k1) self.assertEqual(parts(q2 * q3), (q2m * q3m, q2u * q3u)) self.assertEqual(parts(q2 / q3), (q2m / q3m, q2u / q3u)) self.assertEqual(parts(q3 * q2), (q3m * q2m, q3u * q2u)) self.assertEqual(parts(q3 / q2), (q3m / q2m, q3u / q2u)) self.assertEqual(parts(q2 ** 1), (q2m ** 1, q2u ** 1)) self.assertEqual(parts(q2 ** -1), (q2m ** -1, q2u ** -1)) self.assertEqual(parts(q2 ** 2), (q2m ** 2, q2u ** 2)) self.assertEqual(parts(q2 ** -2), (q2m ** -2, q2u ** -2)) self.assertEqual(parts(q1 * q3), (k1m * q3m, k1u * q3u)) self.assertEqual(parts(q1 / q3), (k1m / q3m, k1u / q3u)) self.assertEqual(parts(q3 * q1), (q3m * k1m, q3u * k1u)) self.assertEqual(parts(q3 / q1), (q3m / k1m, q3u / k1u)) self.assertEqual(parts(q1 ** -1), (k1m ** -1, k1u ** -1)) self.assertEqual(parts(q1 ** 2), (k1m ** 2, k1u ** 2)) self.assertEqual(parts(q1 ** -2), (k1m ** -2, k1u ** -2)) def test_issues86b(self): ureg = self.ureg T1 = 200.0 * ureg.degC T2 = T1.to(ureg.kelvin) m = 132.9054519 * ureg.amu v1 = 2 * ureg.k * T1 / m v2 = 2 * ureg.k * T2 / m self.assertQuantityAlmostEqual(v1, v2) self.assertQuantityAlmostEqual(v1, v2.to_base_units()) self.assertQuantityAlmostEqual(v1.to_base_units(), v2) self.assertQuantityAlmostEqual(v1.to_base_units(), v2.to_base_units()) @unittest.expectedFailure def test_issue86c(self): ureg = self.ureg ureg.autoconvert_offset_to_baseunit = True T = ureg.degC T = 100.0 * T self.assertQuantityAlmostEqual(ureg.k * 2 * T, ureg.k * (2 * T)) def test_issue93(self): x = 5 * ureg.meter self.assertIsInstance(x.magnitude, int) y = 0.1 * ureg.meter self.assertIsInstance(y.magnitude, float) z = 5 * ureg.meter self.assertIsInstance(z.magnitude, int) z += y self.assertIsInstance(z.magnitude, float) self.assertQuantityAlmostEqual(x + y, 5.1 * ureg.meter) self.assertQuantityAlmostEqual(z, 5.1 * ureg.meter) def test_issue104(self): x = [ureg("1 meter"), ureg("1 meter"), ureg("1 meter")] y = [ureg("1 meter")] * 3 def summer(values): if not values: return 0 total = values[0] for v in values[1:]: total += v return total self.assertQuantityAlmostEqual(summer(x), ureg.Quantity(3, "meter")) self.assertQuantityAlmostEqual(x[0], ureg.Quantity(1, "meter")) self.assertQuantityAlmostEqual(summer(y), ureg.Quantity(3, "meter")) self.assertQuantityAlmostEqual(y[0], ureg.Quantity(1, "meter")) def test_issue105(self): func = ureg.parse_unit_name val = list(func("meter")) self.assertEqual(list(func("METER")), []) self.assertEqual(val, list(func("METER", False))) for func in (ureg.get_name, ureg.parse_expression): val = func("meter") with self.assertRaises(AttributeError): func("METER") self.assertEqual(val, func("METER", False)) @helpers.requires_numpy() def test_issue127(self): q = [1.0, 2.0, 3.0, 4.0] * self.ureg.meter q[0] = np.nan self.assertNotEqual(q[0], 1.0) self.assertTrue(math.isnan(q[0].magnitude)) q[1] = float("NaN") self.assertNotEqual(q[1], 2.0) self.assertTrue(math.isnan(q[1].magnitude)) def test_issue170(self): Q_ = UnitRegistry().Quantity q = Q_("1 kHz") / Q_("100 Hz") iq = int(q) self.assertEqual(iq, 10) self.assertIsInstance(iq, int) def test_angstrom_creation(self): ureg.Quantity(2, "Å") def test_alternative_angstrom_definition(self): ureg.Quantity(2, "\u212B") def test_micro_creation(self): ureg.Quantity(2, "µm") @helpers.requires_numpy() def test_issue171_real_imag(self): qr = [1.0, 2.0, 3.0, 4.0] * self.ureg.meter qi = [4.0, 3.0, 2.0, 1.0] * self.ureg.meter q = qr + 1j * qi self.assertQuantityEqual(q.real, qr) self.assertQuantityEqual(q.imag, qi) @helpers.requires_numpy() def test_issue171_T(self): a = np.asarray([[1.0, 2.0, 3.0, 4.0], [4.0, 3.0, 2.0, 1.0]]) q1 = a * self.ureg.meter q2 = a.T * self.ureg.meter self.assertQuantityEqual(q1.T, q2) @helpers.requires_numpy() def test_issue250(self): a = self.ureg.V b = self.ureg.mV self.assertEqual(np.float16(a / b), 1000.0) self.assertEqual(np.float32(a / b), 1000.0) self.assertEqual(np.float64(a / b), 1000.0) if "float128" in dir(np): self.assertEqual(np.float128(a / b), 1000.0) def test_issue252(self): ur = UnitRegistry() q = ur("3 F") t = copy.deepcopy(q) u = t.to(ur.mF) self.assertQuantityEqual(q.to(ur.mF), u) def test_issue323(self): from fractions import Fraction as F self.assertEqual((self.Q_(F(2, 3), "s")).to("ms"), self.Q_(F(2000, 3), "ms")) self.assertEqual((self.Q_(F(2, 3), "m")).to("km"), self.Q_(F(1, 1500), "km")) def test_issue339(self): q1 = self.ureg("") self.assertEqual(q1.magnitude, 1) self.assertEqual(q1.units, self.ureg.dimensionless) q2 = self.ureg("1 dimensionless") self.assertEqual(q1, q2) def test_issue354_356_370(self): self.assertEqual( "{:~}".format(1 * self.ureg.second / self.ureg.millisecond), "1.0 s / ms" ) self.assertEqual("{:~}".format(1 * self.ureg.count), "1 count") self.assertEqual("{:~}".format(1 * self.ureg("MiB")), "1 MiB") def test_issue468(self): @ureg.wraps(("kg"), "meter") def f(x): return x x = ureg.Quantity(1.0, "meter") y = f(x) z = x * y self.assertEqual(z, ureg.Quantity(1.0, "meter * kilogram")) @helpers.requires_numpy() def test_issue482(self): q = self.ureg.Quantity(1, self.ureg.dimensionless) qe = np.exp(q) self.assertIsInstance(qe, self.ureg.Quantity) @helpers.requires_numpy() def test_issue483(self): ureg = self.ureg a = np.asarray([1, 2, 3]) q = [1, 2, 3] * ureg.dimensionless p = (q ** q).m np.testing.assert_array_equal(p, a ** a) def test_issue507(self): ureg.define("_100km = 100 * kilometer") battery_ec = 16 * ureg.kWh / ureg._100km ureg.define("_home = 4700 * kWh / year") with self.assertRaises(AttributeError): home_elec_power = 1 * ureg._home ureg.define("_ = 45 * km") with self.assertRaises(AttributeError): one_blank = 1 * ureg._ def test_issue523(self): src, dst = UnitsContainer({"meter": 1}), UnitsContainer({"degF": 1}) value = 10.0 convert = self.ureg.convert self.assertRaises(DimensionalityError, convert, value, src, dst) self.assertRaises(DimensionalityError, convert, value, dst, src) def test_issue532(self): ureg = self.ureg @ureg.check(ureg("")) def f(x): return 2 * x self.assertEqual(f(ureg.Quantity(1, "")), 2) self.assertRaises(DimensionalityError, f, ureg.Quantity(1, "m")) def test_issue625a(self): Q_ = ureg.Quantity from math import sqrt @ureg.wraps(ureg.second, (ureg.meters, ureg.meters / ureg.second ** 2)) def calculate_time_to_fall(height, gravity=Q_(9.8, "m/s^2")): return sqrt(2 * height / gravity) lunar_module_height = Q_(10, "m") t1 = calculate_time_to_fall(lunar_module_height) print(t1) self.assertAlmostEqual(t1, Q_(1.4285714285714286, "s")) moon_gravity = Q_(1.625, "m/s^2") t2 = calculate_time_to_fall(lunar_module_height, moon_gravity) self.assertAlmostEqual(t2, Q_(3.508232077228117, "s")) def test_issue625b(self): Q_ = ureg.Quantity @ureg.wraps("=A*B", ("=A", "=B")) def get_displacement(time, rate=Q_(1, "m/s")): return time * rate d1 = get_displacement(Q_(2, "s")) self.assertAlmostEqual(d1, Q_(2, "m")) d2 = get_displacement(Q_(2, "s"), Q_(1, "deg/s")) self.assertAlmostEqual(d2, Q_(2, " deg")) def test_issue625c(self): u = UnitRegistry() @u.wraps("=A*B*C", ("=A", "=B", "=C")) def get_product(a=2 * u.m, b=3 * u.m, c=5 * u.m): return a * b * c self.assertEqual(get_product(a=3 * u.m), 45 * u.m ** 3) self.assertEqual(get_product(b=2 * u.m), 20 * u.m ** 3) self.assertEqual(get_product(c=1 * u.dimensionless), 6 * u.m ** 2) def test_issue655a(self): distance = 1 * ureg.m time = 1 * ureg.s velocity = distance / time self.assertEqual(distance.check("[length]"), True) self.assertEqual(distance.check("[time]"), False) self.assertEqual(velocity.check("[length] / [time]"), True) self.assertEqual(velocity.check("1 / [time] * [length]"), True) def test_issue655b(self): Q_ = ureg.Quantity @ureg.check("[length]", "[length]/[time]^2") def pendulum_period(length, G=Q_(1, "standard_gravity")): print(length) return (2 * math.pi * (length / G) ** 0.5).to("s") length = Q_(1, ureg.m) t = pendulum_period(length) self.assertAlmostEqual(t, Q_("2.0064092925890407 second")) moon_gravity = Q_(1.625, "m/s^2") t = pendulum_period(length, moon_gravity) self.assertAlmostEqual(t, Q_("4.928936075204336 second")) def test_issue783(self): assert not ureg("g") == [] def test_issue856(self): ph1 = ParserHelper(scale=123) ph2 = copy.deepcopy(ph1) assert ph2.scale == ph1.scale ureg1 = UnitRegistry() ureg2 = copy.deepcopy(ureg1) assert ureg2("1 t").to("kg").magnitude == 1000 def test_issue856b(self): ureg1 = UnitRegistry() ureg2 = copy.deepcopy(ureg1) ureg1.define("test123 = 123 kg") ureg2.define("test123 = 456 kg") assert ureg1("1 test123").to("kg").magnitude == 123 assert ureg2("1 test123").to("kg").magnitude == 456 def test_issue876(self): a = UnitsContainer({"[mass]": -1}) b = UnitsContainer({"[mass]": -2}) c = UnitsContainer({"[mass]": -3}) assert (hash(-1) == hash(-2)) == (hash(a) == hash(b)) assert (hash(-1) == hash(-3)) == (hash(a) == hash(c)) assert a != b assert a != c def test_issue902(self): ureg = UnitRegistry(auto_reduce_dimensions=True) velocity = 1 * ureg.m / ureg.s cross_section = 1 * ureg.um ** 2 result = cross_section / velocity assert result == 1e-12 * ureg.m * ureg.s def test_issue912(self): meter_units = ureg.get_compatible_units(ureg.meter) hertz_units = ureg.get_compatible_units(ureg.hertz) pprint.pformat(meter_units | hertz_units) def test_issue932(self): q = ureg.Quantity("1 kg") with self.assertRaises(DimensionalityError): q.to("joule") ureg.enable_contexts("energy", *(Context() for _ in range(20))) q.to("joule") ureg.disable_contexts() with self.assertRaises(DimensionalityError): q.to("joule") def test_issue960(self): q = (1 * ureg.nanometer).to_compact("micrometer") assert q.units == ureg.nanometer assert q.magnitude == 1 def test_issue1032(self): class MultiplicativeDictionary(dict): def __rmul__(self, other): return self.__class__( {key: value * other for key, value in self.items()} ) q = 3 * ureg.s d = MultiplicativeDictionary({4: 5, 6: 7}) assert q * d == MultiplicativeDictionary({4: 15 * ureg.s, 6: 21 * ureg.s}) with self.assertRaises(TypeError): d * q @helpers.requires_numpy() def test_issue973(self): q0 = np.array([]) * ureg.m q1 = np.array([]) * ureg("m") assert isinstance(q0, ureg.Quantity) assert isinstance(q1, ureg.Quantity) assert len(q0) == len(q1) == 0 def test_issue1062_issue1097(self): assert "nanometer" not in ureg._units for i in range(5): ctx = Context.from_lines(["@context _", "cal = 4 J"]) with ureg.context("sp", ctx): q = ureg.Quantity(1, "nm") q.to("J") def test_issue1086(self): assert "bits" in ureg assert "gigabits" in ureg assert "meters" in ureg assert "kilometers" in ureg assert "magicbits" not in ureg assert "unknownmeters" not in ureg assert "gigatrees" not in ureg def test_issue1112(self): ureg = UnitRegistry( """ m = [length] g = [mass] s = [time] ft = 0.305 m lb = 454 g @context c1 [time]->[length] : value * 10 m/s @end @context c2 ft = 0.3 m @end @context c3 lb = 500 g @end """.splitlines() ) ureg.enable_contexts("c1") ureg.enable_contexts("c2") ureg.enable_contexts("c3") if np is not None: @pytest.mark.parametrize( "callable", [ lambda x: np.sin(x / x.units), lambda x: np.cos(x / x.units), np.isfinite, np.shape, np.size, np.sqrt, lambda x: x.mean(), lambda x: x.copy(), np.array, lambda x: x.conjugate, ], ) @pytest.mark.parametrize( "q", [ pytest.param(ureg.Quantity(1, "m"), id="python scalar int"), pytest.param(ureg.Quantity([1, 2, 3, 4], "m"), id="array int"), pytest.param(ureg.Quantity([1], "m")[0], id="numpy scalar int"), pytest.param(ureg.Quantity(1.0, "m"), id="python scalar float"), pytest.param(ureg.Quantity([1.0, 2.0, 3.0, 4.0], "m"), id="array float"), pytest.param(ureg.Quantity([1.0], "m")[0], id="numpy scalar float"), ], ) def test_issue925(callable, q): type_before = type(q._magnitude) callable(q) assert isinstance(q._magnitude, type_before)
true
true
f71ce24d91b286d5f0da039e4c82f07b1fcc56b0
4,427
py
Python
libs/trans/handler_0_1.py
StrayCamel247/Daily_utils
9feeb09ebcf5ad5d3b91ab3c59e7c16855a51944
[ "Apache-2.0" ]
1
2020-12-11T13:57:46.000Z
2020-12-11T13:57:46.000Z
libs/trans/handler_0_1.py
StrayCamel247/Daily_utils
9feeb09ebcf5ad5d3b91ab3c59e7c16855a51944
[ "Apache-2.0" ]
null
null
null
libs/trans/handler_0_1.py
StrayCamel247/Daily_utils
9feeb09ebcf5ad5d3b91ab3c59e7c16855a51944
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- # __author__ : stray_camel # __description__ : trans_0_1 # __REFERENCES__ : https://blog.csdn.net/qq_42544196/article/details/106468658;https://docs.python.org/3/library/logging.html # __date__: 2020/12/11 15 import datetime import logging from pathlib import Path import random import re import sys import threading import time from functools import wraps from hashlib import md5 from typing import Any import requests def logger_set(): """ 自定义日志格式,保存至对应文件 官方文档:https://docs.python.org/3/library/logging.html """ # 日志文件存储格式 logger = logging.getLogger() logsf_loder = Path('./logs') # 如果目录不存在则创建 logsf_loder.mkdir(parents=True, exist_ok=True) # 储存的文件名,带时间戳后缀 logs_file = logsf_loder / \ "{}.log".format(datetime.datetime.now().strftime('%y-%m-%d')) # 转为绝对路径 fh = logging.FileHandler(logs_file.resolve(), encoding="utf-8", mode="a") logger.setLevel(logging.INFO) fh.setFormatter(logging.Formatter("%(message)s\n")) logger.addHandler(fh) # 打印格式 console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(message)s') console.setFormatter(formatter) logging.getLogger('').addHandler(console) logger_set() class YouDaoFanYi(object): """ 引用于作者:https://blog.csdn.net/qq_42544196 """ def __init__(self): self.url = 'http://fanyi.youdao.com/translate_o?smartresult=dict&smartresult=rule' self.headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/81.0.4044.138 Safari/537.36', 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8', 'Referer': 'http://fanyi.youdao.com/', 'Cookie': 'OUTFOX_SEARCH_USER_ID="-1571440969@10.108.160.19"' } @staticmethod def create_data(e): n = "5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36" t = md5(n.encode()).hexdigest() r = int(time.time() * 1000) i = int(str(r) + str(random.randint(0, 10))) sign = md5(("fanyideskweb" + e + str(i) + "Nw(nmmbP%A-r6U3EUn]Aj").encode()).hexdigest() return {'ts': r, 'bv': t, 'salt': i, 'sign': sign} def fanyi_word(self, word): sys_data = self.create_data(word) data = { 'i': word, 'from': 'AUTO', 'to': 'AUTO', 'smartresult': 'dict', 'client': 'fanyideskweb', 'doctype': 'json', 'version': 2.1, 'keyfrom': 'fanyi.web', 'action': 'FY_BY_REALTlME' } result = requests.post(url=self.url, headers=self.headers, data={ **data, **sys_data}).json() # print(result) return result def main(self, word): self.fanyi_word(word) def thread_handler(func): """ TODO:多线程运行 """ @wraps(func) def wrapper(self, *args, **kwargs): pass return wrapper class fanyi(YouDaoFanYi): """ 定制翻译对象 """ def _fanyi_word(self, word): res = self.fanyi_word(word=word) try: if res.get('translateResult'): smartResults = res.get('smartResult', {}).get('entries', []) results = [ re.sub("[\!\%\\t\\r\\n]", "", res) for res in smartResults if res ] rest = '\n '.join([word]+results) if results else '' return rest except: pass def word_analysis_copy(self, *args: '翻译单个或多个单词', **kwds: Any): """ """ args = [' '.join(_.split('_')) if '_' in _ else _ for _ in set(args)] logging.info('\n'.join(map(self._fanyi_word, args))) def __call__(self, *args: Any, **kwds: Any) -> Any: self.word_analysis_copy(*args) """ --------------------------------------------------------------------------- 代码运行 >>> python .\main.py trans >>> trans >>> n. (Trans) (丹)唐(人名) >>> abbr. 交易;交易行为;交流;事务 (transaction);及物的;(关系)可递的;过度的 (transitive);(尤指职业)翻译;翻译程序;电 >>> 视差频转播机 (translator) >>> adj. 反式的;跨性别的;(有机体)异型结合的 --------------------------------------------------------------------------- """ # fanyi = fanyi() # fanyi(*sys.argv[1:])
29.125
125
0.550486
import datetime import logging from pathlib import Path import random import re import sys import threading import time from functools import wraps from hashlib import md5 from typing import Any import requests def logger_set(): logger = logging.getLogger() logsf_loder = Path('./logs') logsf_loder.mkdir(parents=True, exist_ok=True) logs_file = logsf_loder / \ "{}.log".format(datetime.datetime.now().strftime('%y-%m-%d')) fh = logging.FileHandler(logs_file.resolve(), encoding="utf-8", mode="a") logger.setLevel(logging.INFO) fh.setFormatter(logging.Formatter("%(message)s\n")) logger.addHandler(fh) console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(message)s') console.setFormatter(formatter) logging.getLogger('').addHandler(console) logger_set() class YouDaoFanYi(object): def __init__(self): self.url = 'http://fanyi.youdao.com/translate_o?smartresult=dict&smartresult=rule' self.headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/81.0.4044.138 Safari/537.36', 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8', 'Referer': 'http://fanyi.youdao.com/', 'Cookie': 'OUTFOX_SEARCH_USER_ID="-1571440969@10.108.160.19"' } @staticmethod def create_data(e): n = "5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36" t = md5(n.encode()).hexdigest() r = int(time.time() * 1000) i = int(str(r) + str(random.randint(0, 10))) sign = md5(("fanyideskweb" + e + str(i) + "Nw(nmmbP%A-r6U3EUn]Aj").encode()).hexdigest() return {'ts': r, 'bv': t, 'salt': i, 'sign': sign} def fanyi_word(self, word): sys_data = self.create_data(word) data = { 'i': word, 'from': 'AUTO', 'to': 'AUTO', 'smartresult': 'dict', 'client': 'fanyideskweb', 'doctype': 'json', 'version': 2.1, 'keyfrom': 'fanyi.web', 'action': 'FY_BY_REALTlME' } result = requests.post(url=self.url, headers=self.headers, data={ **data, **sys_data}).json() return result def main(self, word): self.fanyi_word(word) def thread_handler(func): @wraps(func) def wrapper(self, *args, **kwargs): pass return wrapper class fanyi(YouDaoFanYi): def _fanyi_word(self, word): res = self.fanyi_word(word=word) try: if res.get('translateResult'): smartResults = res.get('smartResult', {}).get('entries', []) results = [ re.sub("[\!\%\\t\\r\\n]", "", res) for res in smartResults if res ] rest = '\n '.join([word]+results) if results else '' return rest except: pass def word_analysis_copy(self, *args: '翻译单个或多个单词', **kwds: Any): args = [' '.join(_.split('_')) if '_' in _ else _ for _ in set(args)] logging.info('\n'.join(map(self._fanyi_word, args))) def __call__(self, *args: Any, **kwds: Any) -> Any: self.word_analysis_copy(*args)
true
true
f71ce2dda7e8afdb46370fe9f341989c34e72ab8
5,864
py
Python
tensorflow_datasets/testing/mocking.py
robbjr/datasets
fbb2af9d0e88f8e2ae884e9764fbeff2ee487813
[ "Apache-2.0" ]
1
2019-10-12T08:05:11.000Z
2019-10-12T08:05:11.000Z
tensorflow_datasets/testing/mocking.py
robbjr/datasets
fbb2af9d0e88f8e2ae884e9764fbeff2ee487813
[ "Apache-2.0" ]
null
null
null
tensorflow_datasets/testing/mocking.py
robbjr/datasets
fbb2af9d0e88f8e2ae884e9764fbeff2ee487813
[ "Apache-2.0" ]
1
2019-12-14T00:32:08.000Z
2019-12-14T00:32:08.000Z
# coding=utf-8 # Copyright 2019 The TensorFlow Datasets Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Mock util for tfds. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import contextlib import os import random from absl.testing import absltest import numpy as np import tensorflow as tf from tensorflow_datasets.core import features as features_lib @contextlib.contextmanager def mock_data(num_examples=1, as_dataset_fn=None, data_dir=None): """Mock tfds to generate random data. This function requires the true metadata files (dataset_info.json, label.txt, vocabulary files) to be stored in `data_dir/dataset_name/version`, as they would be for the true dataset. The actual examples will be randomly generated using `builder.info.features.get_tensor_info()`. Download and prepare step will be skipped. Warning: As the mocked builder will use the true metadata (label names,...), the `info.split['train'].num_examples` won't match `len(list(ds_train))`. Usage (automated): ``` with mock_data(num_examples=5): ds = tfds.load('some_dataset', split='train') for ex in ds: # ds will yield randomly generated examples. ex ``` If you want more fine grain control over the generated examples, you can manually overwrite the `DatasetBuilder._as_dataset` method. Usage (manual): ``` def as_dataset(self, *args, **kwargs): return tf.data.Dataset.from_generator( lambda: ({ 'image': np.ones(shape=(28, 28, 1), dtype=np.uint8), 'label': i % 10, } for i in range(num_examples)), output_types=self.info.features.dtype, output_shapes=self.info.features.shape, ) with mock_data(as_dataset_fn=as_dataset): ds = tfds.load('some_dataset', split='train') for ex in ds: # ds will yield the fake data example of 'as_dataset'. ex ``` Args: num_examples: `int`, the number of fake example to generate. as_dataset_fn: if provided, will replace the default random example generator. This function mock the `FileAdapterBuilder._as_dataset` data_dir: `str`, `data_dir` folder from where to load the metadata. Will overwrite `data_dir` kwargs from `tfds.load`. Yields: None """ def mock_download_and_prepare(self, *args, **kwargs): del args del kwargs if not tf.io.gfile.exists(self._data_dir): # pylint: disable=protected-access raise ValueError( 'TFDS has been mocked, but metadata files where not found in {}. ' 'You should copy the real metadata files, so that the dataset ' 'can be loaded properly, or set the data_dir kwarg of' 'tfds.testing.mock_tfds(data_dir=...).' ''.format(self._data_dir) # pylint: disable=protected-access ) def mock_as_dataset(self, *args, **kwargs): del args del kwargs ds = tf.data.Dataset.from_generator( lambda: (_generate_random_example(self) for _ in range(num_examples)), output_types=self.info.features.dtype, output_shapes=self.info.features.shape, ) return ds if not as_dataset_fn: as_dataset_fn = mock_as_dataset if not data_dir: data_dir = os.path.join(os.path.dirname(__file__), 'metadata') download_and_prepare_path = 'tensorflow_datasets.core.dataset_builder.DatasetBuilder.download_and_prepare' as_dataset_path = 'tensorflow_datasets.core.dataset_builder.FileAdapterBuilder._as_dataset' data_dir_path = 'tensorflow_datasets.core.constants.DATA_DIR' with absltest.mock.patch(as_dataset_path, as_dataset_fn), \ absltest.mock.patch( download_and_prepare_path, mock_download_and_prepare), \ absltest.mock.patch(data_dir_path, data_dir): yield def _generate_random_array(feature, tensor_info): """Generates a random tensor for a single feature.""" # TODO(tfds): Could improve the fake generatiion: # * Use the feature statistics (min, max) # * For Sequence features # * For Text shape = [ # Fill dynamic shape with random values np.random.randint(5, 50) if s is None else s for s in tensor_info.shape ] if isinstance(feature, features_lib.ClassLabel): max_value = feature.num_classes elif isinstance(feature, features_lib.Text) and feature.vocab_size: max_value = feature.vocab_size else: max_value = 255 # Generate some random values, depending on the dtype if tensor_info.dtype.is_integer: return np.random.randint(0, max_value, shape) elif tensor_info.dtype.is_floating: return np.random.random_sample(shape) elif tensor_info.dtype == tf.string: return ''.join( random.choice(' abcdefghij') for _ in range(random.randint(10, 20))) else: raise ValueError('Fake generation not supported for {}'.format( tensor_info.dtype)) def _generate_random_example(builder): root_feature = builder.info.features flat_features = root_feature._flatten(root_feature) # pylint: disable=protected-access flat_tensor_info = root_feature._flatten(root_feature.get_tensor_info()) # pylint: disable=protected-access flat_np = [ _generate_random_array(feature, tensor_info) for feature, tensor_info in zip(flat_features, flat_tensor_info) ] return root_feature._nest(flat_np) # pylint: disable=protected-access
34.698225
110
0.722715
from __future__ import absolute_import from __future__ import division from __future__ import print_function import contextlib import os import random from absl.testing import absltest import numpy as np import tensorflow as tf from tensorflow_datasets.core import features as features_lib @contextlib.contextmanager def mock_data(num_examples=1, as_dataset_fn=None, data_dir=None): def mock_download_and_prepare(self, *args, **kwargs): del args del kwargs if not tf.io.gfile.exists(self._data_dir): raise ValueError( 'TFDS has been mocked, but metadata files where not found in {}. ' 'You should copy the real metadata files, so that the dataset ' 'can be loaded properly, or set the data_dir kwarg of' 'tfds.testing.mock_tfds(data_dir=...).' ''.format(self._data_dir) ) def mock_as_dataset(self, *args, **kwargs): del args del kwargs ds = tf.data.Dataset.from_generator( lambda: (_generate_random_example(self) for _ in range(num_examples)), output_types=self.info.features.dtype, output_shapes=self.info.features.shape, ) return ds if not as_dataset_fn: as_dataset_fn = mock_as_dataset if not data_dir: data_dir = os.path.join(os.path.dirname(__file__), 'metadata') download_and_prepare_path = 'tensorflow_datasets.core.dataset_builder.DatasetBuilder.download_and_prepare' as_dataset_path = 'tensorflow_datasets.core.dataset_builder.FileAdapterBuilder._as_dataset' data_dir_path = 'tensorflow_datasets.core.constants.DATA_DIR' with absltest.mock.patch(as_dataset_path, as_dataset_fn), \ absltest.mock.patch( download_and_prepare_path, mock_download_and_prepare), \ absltest.mock.patch(data_dir_path, data_dir): yield def _generate_random_array(feature, tensor_info): shape = [ np.random.randint(5, 50) if s is None else s for s in tensor_info.shape ] if isinstance(feature, features_lib.ClassLabel): max_value = feature.num_classes elif isinstance(feature, features_lib.Text) and feature.vocab_size: max_value = feature.vocab_size else: max_value = 255 if tensor_info.dtype.is_integer: return np.random.randint(0, max_value, shape) elif tensor_info.dtype.is_floating: return np.random.random_sample(shape) elif tensor_info.dtype == tf.string: return ''.join( random.choice(' abcdefghij') for _ in range(random.randint(10, 20))) else: raise ValueError('Fake generation not supported for {}'.format( tensor_info.dtype)) def _generate_random_example(builder): root_feature = builder.info.features flat_features = root_feature._flatten(root_feature) flat_tensor_info = root_feature._flatten(root_feature.get_tensor_info()) flat_np = [ _generate_random_array(feature, tensor_info) for feature, tensor_info in zip(flat_features, flat_tensor_info) ] return root_feature._nest(flat_np)
true
true
f71ce321bd7159819de5bbecec6b6484d3ec718a
12,450
py
Python
spyder/plugins/editor/utils/tests/test_autosave.py
Earthman100/spyder
949ce0f9100a69504c70a5678e8589a05aee7d38
[ "MIT" ]
7,956
2015-02-17T01:19:09.000Z
2022-03-31T21:52:15.000Z
spyder/plugins/editor/utils/tests/test_autosave.py
Earthman100/spyder
949ce0f9100a69504c70a5678e8589a05aee7d38
[ "MIT" ]
16,326
2015-02-16T23:15:21.000Z
2022-03-31T23:34:34.000Z
spyder/plugins/editor/utils/tests/test_autosave.py
Earthman100/spyder
949ce0f9100a69504c70a5678e8589a05aee7d38
[ "MIT" ]
1,918
2015-02-20T19:26:26.000Z
2022-03-31T19:03:25.000Z
# -*- coding: utf-8 -*- # # Copyright © Spyder Project Contributors # Licensed under the terms of the MIT License # """Tests for autosave.py""" # Standard library imports import ast import os.path as osp # Third party imports import pytest # Local imports from spyder.plugins.editor.utils.autosave import (AutosaveForStack, AutosaveForPlugin) def test_autosave_component_set_interval(mocker): """Test that setting the interval does indeed change it and calls do_autosave if enabled.""" mocker.patch.object(AutosaveForPlugin, 'do_autosave') addon = AutosaveForPlugin(None) addon.do_autosave.assert_not_called() addon.interval = 10000 assert addon.interval == 10000 addon.do_autosave.assert_not_called() addon.enabled = True addon.interval = 20000 assert addon.do_autosave.called @pytest.mark.parametrize('enabled', [False, True]) def test_autosave_component_timer_if_enabled(qtbot, mocker, enabled): """Test that AutosaveForPlugin calls do_autosave() on timer if enabled.""" mocker.patch.object(AutosaveForPlugin, 'do_autosave') addon = AutosaveForPlugin(None) addon.do_autosave.assert_not_called() addon.interval = 100 addon.enabled = enabled qtbot.wait(500) if enabled: assert addon.do_autosave.called else: addon.do_autosave.assert_not_called() def test_get_files_to_recover_with_empty_autosave_dir(mocker, tmpdir): """Test get_files_to_recover() when autosave dir contains no files.""" mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) addon = AutosaveForPlugin(None) result = addon.get_files_to_recover() assert result == ([], []) @pytest.mark.parametrize('running,empty', [(True, False), (False, False), (False, True)]) def test_get_files_to_recover_with_one_pid_file(mocker, tmpdir, running, empty): """Test get_files_to_recover() if autosave dir contains one pid file with one autosave file. If running is True, then pretend that the pid file belongs to a running Spyder instance. If empty is True, then the pid file is empty (regression test for spyder-ide/spyder#11375).""" mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) mock_is_spyder_process = mocker.patch( 'spyder.plugins.editor.utils.autosave.is_spyder_process', return_value=running) pidfile = tmpdir.join('pid42.txt') autosavefile = tmpdir.join('foo.py') if empty: pidfile.write('') else: pidfile.write('{"original": ' + repr(str(autosavefile)) + '}') autosavefile.write('bar = 1') addon = AutosaveForPlugin(None) result = addon.get_files_to_recover() if empty: # pid file corrupted so original file name not recorded expected_files = [(None, str(autosavefile))] elif running: # autosave file belongs to running instance expected_files = [] else: expected_files = [('original', str(autosavefile))] expected = (expected_files, [str(pidfile)]) assert result == expected mock_is_spyder_process.assert_called_with(42) def test_get_files_to_recover_with_non_pid_file(mocker, tmpdir): """Test get_files_to_recover() if autosave dir contains no pid file, but one Python file.""" mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) pythonfile = tmpdir.join('foo.py') pythonfile.write('bar = 1') addon = AutosaveForPlugin(None) result = addon.get_files_to_recover() expected = ([(None, str(pythonfile))], []) assert result == expected def test_get_files_to_recover_without_autosave_dir(mocker): """Test that get_files_to_recover() does not break if there is no autosave directory.""" mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value='non-existing-directory') addon = AutosaveForPlugin(None) result = addon.get_files_to_recover() assert result == ([], []) @pytest.mark.parametrize('error_on_remove', [False, True]) def test_try_recover(mocker, tmpdir, error_on_remove): """Test that try_recover_from_autosave() displays a RecoveryDialog, that it stores the files that the user wants to open as reported by the dialog, and that it removes the pid file. If error_on_remove is set, then removing the pid file will raise an OSError; this should be ignored.""" mock_RecoveryDialog = mocker.patch( 'spyder.plugins.editor.utils.autosave.RecoveryDialog') mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) pidfile = tmpdir.join('pid42.txt') autosavefile = tmpdir.join('foo.py') pidfile.write('{"original": ' + repr(str(autosavefile)) + '}') autosavefile.write('bar = 1') addon = AutosaveForPlugin(None) if error_on_remove: mocker.patch('os.remove', side_effect=OSError) addon.try_recover_from_autosave() expected_mapping = [('original', str(autosavefile))] mock_RecoveryDialog.assert_called_with(expected_mapping, parent=None) expected_files_to_open = mock_RecoveryDialog().files_to_open[:] assert addon.recover_files_to_open == expected_files_to_open if not error_on_remove: assert not pidfile.check() @pytest.mark.parametrize('in_mapping,on_disk', [(False, False), (True, False), (False, True)]) def test_create_unique_autosave_filename(mocker, in_mapping, on_disk): """Test that AutosaveForStack.create_unique_autosave_filename() returns a file name in the autosave directory with the same base name as the original file name, unless that already exists in the autosave mapping or on disk.""" def new_exists(path): if path == osp.join('autosave', 'ham.py'): return on_disk else: return False mocker.patch('os.path.exists', side_effect=new_exists) addon = AutosaveForStack(mocker.Mock()) if in_mapping: addon.name_mapping = {osp.join('somedir', 'ham.py'): osp.join('autosave', 'ham.py')} autosave_filename = addon.create_unique_autosave_filename( osp.join('orig', 'ham.py'), 'autosave') if in_mapping or on_disk: assert autosave_filename == osp.join('autosave', 'ham-1.py') else: assert autosave_filename == osp.join('autosave', 'ham.py') @pytest.mark.parametrize('have_hash', [True, False]) def test_autosave(mocker, have_hash): """Test that AutosaveForStack.maybe_autosave writes the contents to the autosave file and updates the file_hashes.""" mock_editor = mocker.Mock() mock_fileinfo = mocker.Mock(editor=mock_editor, filename='orig', newly_created=False) mock_document = mocker.Mock() mock_fileinfo.editor.document.return_value = mock_document mock_stack = mocker.Mock(data=[mock_fileinfo]) addon = AutosaveForStack(mock_stack) addon.name_mapping = {'orig': 'autosave'} addon.file_hashes = {'autosave': 2} if have_hash: addon.file_hashes['orig'] = 1 mock_stack.compute_hash.return_value = 3 addon.maybe_autosave(0) mock_stack._write_to_file.assert_called_with(mock_fileinfo, 'autosave') mock_stack.compute_hash.assert_called_with(mock_fileinfo) if have_hash: assert addon.file_hashes == {'orig': 1, 'autosave': 3} else: assert addon.file_hashes == {'autosave': 3} @pytest.mark.parametrize('latin', [True, False]) def test_save_autosave_mapping_with_nonempty_mapping(mocker, tmpdir, latin): """Test that save_autosave_mapping() writes the current autosave mapping to the correct file if the mapping is not empty.""" mocker.patch('os.getpid', return_value=42) mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) addon = AutosaveForStack(None) if latin: addon.name_mapping = {'orig': 'autosave'} else: addon.name_mapping = {'原件': 'autosave'} addon.save_autosave_mapping() pidfile = tmpdir.join('pid42.txt') assert ast.literal_eval(pidfile.read()) == addon.name_mapping @pytest.mark.parametrize('pidfile_exists', [False, True]) def test_save_autosave_mapping_with_empty_mapping(mocker, tmpdir, pidfile_exists): """Test that save_autosave_mapping() does not write the pidfile if the mapping is empty, and that is removes the pidfile if it exists.""" mocker.patch('os.getpid', return_value=42) mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) addon = AutosaveForStack(None) addon.name_mapping = {} pidfile = tmpdir.join('pid42.txt') if pidfile_exists: pidfile.write('This is an ex-parrot!') addon.save_autosave_mapping() assert not pidfile.check() @pytest.mark.parametrize('exception', [False, True]) def test_autosave_remove_autosave_file(mocker, exception): """Test that AutosaveForStack.remove_autosave_file removes the autosave file, that an error dialog is displayed if an exception is raised, and that the autosave file is removed from `name_mapping` and `file_hashes`.""" mock_remove = mocker.patch('os.remove') if exception: mock_remove.side_effect = OSError() mock_dialog = mocker.patch( 'spyder.plugins.editor.utils.autosave.AutosaveErrorDialog') mock_stack = mocker.Mock() fileinfo = mocker.Mock() fileinfo.filename = 'orig' addon = AutosaveForStack(mock_stack) addon.name_mapping = {'orig': 'autosave'} addon.file_hashes = {'autosave': 42} addon.remove_autosave_file(fileinfo.filename) assert addon.name_mapping == {} assert addon.file_hashes == {} mock_remove.assert_any_call('autosave') assert mock_dialog.called == exception def test_get_autosave_filename(mocker, tmpdir): """Test that AutosaveForStack.get_autosave_filename returns a consistent and unique name for the autosave file is returned.""" addon = AutosaveForStack(mocker.Mock()) mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) expected = str(tmpdir.join('foo.py')) assert addon.get_autosave_filename('foo.py') == expected expected2 = str(tmpdir.join('foo-1.py')) assert addon.get_autosave_filename('foo.py') == expected assert addon.get_autosave_filename('ham/foo.py') == expected2 @pytest.mark.parametrize('have_hash', [True, False]) def test_autosave_file_renamed(mocker, tmpdir, have_hash): """Test that AutosaveForStack.file_renamed removes the old autosave file, creates a new one, and updates `name_mapping` and `file_hashes`.""" mock_remove = mocker.patch('os.remove') mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) mock_editor = mocker.Mock() mock_fileinfo = mocker.Mock(editor=mock_editor, filename='new_foo.py', newly_created=False) mock_document = mocker.Mock() mock_fileinfo.editor.document.return_value = mock_document mock_stack = mocker.Mock(data=[mock_fileinfo]) mock_stack.has_filename.return_value = 0 mock_stack.compute_hash.return_value = 3 addon = AutosaveForStack(mock_stack) old_autosavefile = str(tmpdir.join('old_foo.py')) new_autosavefile = str(tmpdir.join('new_foo.py')) addon.name_mapping = {'old_foo.py': old_autosavefile} addon.file_hashes = {'old_foo.py': 1, old_autosavefile: 42} if have_hash: addon.file_hashes = {'old_foo.py': 1, old_autosavefile: 42} else: addon.file_hashes = {old_autosavefile: 42} addon.file_renamed('old_foo.py', 'new_foo.py') mock_remove.assert_any_call(old_autosavefile) mock_stack._write_to_file.assert_called_with( mock_fileinfo, new_autosavefile) assert addon.name_mapping == {'new_foo.py': new_autosavefile} if have_hash: assert addon.file_hashes == {'new_foo.py': 1, new_autosavefile: 3} else: assert addon.file_hashes == {new_autosavefile: 3} if __name__ == "__main__": pytest.main()
38.544892
78
0.693574
import ast import os.path as osp import pytest from spyder.plugins.editor.utils.autosave import (AutosaveForStack, AutosaveForPlugin) def test_autosave_component_set_interval(mocker): mocker.patch.object(AutosaveForPlugin, 'do_autosave') addon = AutosaveForPlugin(None) addon.do_autosave.assert_not_called() addon.interval = 10000 assert addon.interval == 10000 addon.do_autosave.assert_not_called() addon.enabled = True addon.interval = 20000 assert addon.do_autosave.called @pytest.mark.parametrize('enabled', [False, True]) def test_autosave_component_timer_if_enabled(qtbot, mocker, enabled): mocker.patch.object(AutosaveForPlugin, 'do_autosave') addon = AutosaveForPlugin(None) addon.do_autosave.assert_not_called() addon.interval = 100 addon.enabled = enabled qtbot.wait(500) if enabled: assert addon.do_autosave.called else: addon.do_autosave.assert_not_called() def test_get_files_to_recover_with_empty_autosave_dir(mocker, tmpdir): mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) addon = AutosaveForPlugin(None) result = addon.get_files_to_recover() assert result == ([], []) @pytest.mark.parametrize('running,empty', [(True, False), (False, False), (False, True)]) def test_get_files_to_recover_with_one_pid_file(mocker, tmpdir, running, empty): mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) mock_is_spyder_process = mocker.patch( 'spyder.plugins.editor.utils.autosave.is_spyder_process', return_value=running) pidfile = tmpdir.join('pid42.txt') autosavefile = tmpdir.join('foo.py') if empty: pidfile.write('') else: pidfile.write('{"original": ' + repr(str(autosavefile)) + '}') autosavefile.write('bar = 1') addon = AutosaveForPlugin(None) result = addon.get_files_to_recover() if empty: expected_files = [(None, str(autosavefile))] elif running: expected_files = [] else: expected_files = [('original', str(autosavefile))] expected = (expected_files, [str(pidfile)]) assert result == expected mock_is_spyder_process.assert_called_with(42) def test_get_files_to_recover_with_non_pid_file(mocker, tmpdir): mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) pythonfile = tmpdir.join('foo.py') pythonfile.write('bar = 1') addon = AutosaveForPlugin(None) result = addon.get_files_to_recover() expected = ([(None, str(pythonfile))], []) assert result == expected def test_get_files_to_recover_without_autosave_dir(mocker): mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value='non-existing-directory') addon = AutosaveForPlugin(None) result = addon.get_files_to_recover() assert result == ([], []) @pytest.mark.parametrize('error_on_remove', [False, True]) def test_try_recover(mocker, tmpdir, error_on_remove): mock_RecoveryDialog = mocker.patch( 'spyder.plugins.editor.utils.autosave.RecoveryDialog') mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) pidfile = tmpdir.join('pid42.txt') autosavefile = tmpdir.join('foo.py') pidfile.write('{"original": ' + repr(str(autosavefile)) + '}') autosavefile.write('bar = 1') addon = AutosaveForPlugin(None) if error_on_remove: mocker.patch('os.remove', side_effect=OSError) addon.try_recover_from_autosave() expected_mapping = [('original', str(autosavefile))] mock_RecoveryDialog.assert_called_with(expected_mapping, parent=None) expected_files_to_open = mock_RecoveryDialog().files_to_open[:] assert addon.recover_files_to_open == expected_files_to_open if not error_on_remove: assert not pidfile.check() @pytest.mark.parametrize('in_mapping,on_disk', [(False, False), (True, False), (False, True)]) def test_create_unique_autosave_filename(mocker, in_mapping, on_disk): def new_exists(path): if path == osp.join('autosave', 'ham.py'): return on_disk else: return False mocker.patch('os.path.exists', side_effect=new_exists) addon = AutosaveForStack(mocker.Mock()) if in_mapping: addon.name_mapping = {osp.join('somedir', 'ham.py'): osp.join('autosave', 'ham.py')} autosave_filename = addon.create_unique_autosave_filename( osp.join('orig', 'ham.py'), 'autosave') if in_mapping or on_disk: assert autosave_filename == osp.join('autosave', 'ham-1.py') else: assert autosave_filename == osp.join('autosave', 'ham.py') @pytest.mark.parametrize('have_hash', [True, False]) def test_autosave(mocker, have_hash): mock_editor = mocker.Mock() mock_fileinfo = mocker.Mock(editor=mock_editor, filename='orig', newly_created=False) mock_document = mocker.Mock() mock_fileinfo.editor.document.return_value = mock_document mock_stack = mocker.Mock(data=[mock_fileinfo]) addon = AutosaveForStack(mock_stack) addon.name_mapping = {'orig': 'autosave'} addon.file_hashes = {'autosave': 2} if have_hash: addon.file_hashes['orig'] = 1 mock_stack.compute_hash.return_value = 3 addon.maybe_autosave(0) mock_stack._write_to_file.assert_called_with(mock_fileinfo, 'autosave') mock_stack.compute_hash.assert_called_with(mock_fileinfo) if have_hash: assert addon.file_hashes == {'orig': 1, 'autosave': 3} else: assert addon.file_hashes == {'autosave': 3} @pytest.mark.parametrize('latin', [True, False]) def test_save_autosave_mapping_with_nonempty_mapping(mocker, tmpdir, latin): mocker.patch('os.getpid', return_value=42) mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) addon = AutosaveForStack(None) if latin: addon.name_mapping = {'orig': 'autosave'} else: addon.name_mapping = {'原件': 'autosave'} addon.save_autosave_mapping() pidfile = tmpdir.join('pid42.txt') assert ast.literal_eval(pidfile.read()) == addon.name_mapping @pytest.mark.parametrize('pidfile_exists', [False, True]) def test_save_autosave_mapping_with_empty_mapping(mocker, tmpdir, pidfile_exists): mocker.patch('os.getpid', return_value=42) mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) addon = AutosaveForStack(None) addon.name_mapping = {} pidfile = tmpdir.join('pid42.txt') if pidfile_exists: pidfile.write('This is an ex-parrot!') addon.save_autosave_mapping() assert not pidfile.check() @pytest.mark.parametrize('exception', [False, True]) def test_autosave_remove_autosave_file(mocker, exception): mock_remove = mocker.patch('os.remove') if exception: mock_remove.side_effect = OSError() mock_dialog = mocker.patch( 'spyder.plugins.editor.utils.autosave.AutosaveErrorDialog') mock_stack = mocker.Mock() fileinfo = mocker.Mock() fileinfo.filename = 'orig' addon = AutosaveForStack(mock_stack) addon.name_mapping = {'orig': 'autosave'} addon.file_hashes = {'autosave': 42} addon.remove_autosave_file(fileinfo.filename) assert addon.name_mapping == {} assert addon.file_hashes == {} mock_remove.assert_any_call('autosave') assert mock_dialog.called == exception def test_get_autosave_filename(mocker, tmpdir): addon = AutosaveForStack(mocker.Mock()) mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) expected = str(tmpdir.join('foo.py')) assert addon.get_autosave_filename('foo.py') == expected expected2 = str(tmpdir.join('foo-1.py')) assert addon.get_autosave_filename('foo.py') == expected assert addon.get_autosave_filename('ham/foo.py') == expected2 @pytest.mark.parametrize('have_hash', [True, False]) def test_autosave_file_renamed(mocker, tmpdir, have_hash): mock_remove = mocker.patch('os.remove') mocker.patch('spyder.plugins.editor.utils.autosave.get_conf_path', return_value=str(tmpdir)) mock_editor = mocker.Mock() mock_fileinfo = mocker.Mock(editor=mock_editor, filename='new_foo.py', newly_created=False) mock_document = mocker.Mock() mock_fileinfo.editor.document.return_value = mock_document mock_stack = mocker.Mock(data=[mock_fileinfo]) mock_stack.has_filename.return_value = 0 mock_stack.compute_hash.return_value = 3 addon = AutosaveForStack(mock_stack) old_autosavefile = str(tmpdir.join('old_foo.py')) new_autosavefile = str(tmpdir.join('new_foo.py')) addon.name_mapping = {'old_foo.py': old_autosavefile} addon.file_hashes = {'old_foo.py': 1, old_autosavefile: 42} if have_hash: addon.file_hashes = {'old_foo.py': 1, old_autosavefile: 42} else: addon.file_hashes = {old_autosavefile: 42} addon.file_renamed('old_foo.py', 'new_foo.py') mock_remove.assert_any_call(old_autosavefile) mock_stack._write_to_file.assert_called_with( mock_fileinfo, new_autosavefile) assert addon.name_mapping == {'new_foo.py': new_autosavefile} if have_hash: assert addon.file_hashes == {'new_foo.py': 1, new_autosavefile: 3} else: assert addon.file_hashes == {new_autosavefile: 3} if __name__ == "__main__": pytest.main()
true
true
f71ce3a01442e4158b8f21fcbecc9788b785237a
2,662
py
Python
rocon_client_sdk_py/virtual_core/actions/action_dock.py
boklae/rocon_client_sdk_py
47ffd3a466fbbcb43f77338eddd9a9fa32b3a763
[ "MIT" ]
null
null
null
rocon_client_sdk_py/virtual_core/actions/action_dock.py
boklae/rocon_client_sdk_py
47ffd3a466fbbcb43f77338eddd9a9fa32b3a763
[ "MIT" ]
1
2021-06-08T21:01:30.000Z
2021-06-08T21:01:30.000Z
rocon_client_sdk_py/virtual_core/actions/action_dock.py
boklae/rocon_client_sdk_py
47ffd3a466fbbcb43f77338eddd9a9fa32b3a763
[ "MIT" ]
null
null
null
from rocon_client_sdk_py.virtual_core.actions.base import Action import asyncio import pydash from rocon_client_sdk_py.virtual_core.path_planner import PathPlanner class Dock(Action): def __init__(self): self.name = 'Dock' self.func_name = 'dock' async def on_define(self, context): print('define action of ' + self.name) api_config = context.api_configuration result = await api_config.get_stations() domain_station = [] def cb(station): domain_station.append({'alias': station['name']+'('+str(station['marker_value'])+')', 'value': station['id']}) pydash.map_(result, cb) return { 'name': self.name, 'func_name': self.func_name, 'args': [ { 'key': 'station', 'type': 'number', 'default': domain_station[len(domain_station) -1], 'domain': domain_station } ] } async def on_perform(self, context, args): station_id = pydash.find(args, {'key': 'station'})['value'] station = await context.api_configuration.get_stations(station_id) if station is None: print('failed to get station') worker = context.blackboard.get_worker() worker_location = pydash.get(worker, 'type_specific.location') path_planner = PathPlanner(context) await path_planner.init_map() path = path_planner.get_path(worker_location['map'], worker_location['pose2d'], station['pose']) trajectory = path_planner.path_to_trajectory(path, 1, 1000) print('start to moving robot on path') for point in trajectory: worker = context.blackboard.get_worker() updated_type_specific = worker['type_specific'] if 'theta' in point: pass else: point['theta'] = pydash.get(worker, 'type_specific.location.pose2d.theta') updated_type_specific['location'] = pydash.assign({}, updated_type_specific['location'], { 'map': worker_location['map'], 'pose2d': point }) context.blackboard.set_worker({'type_specific': updated_type_specific}) await context.blackboard.sync_worker() await asyncio.sleep(1) updated_type_specific['location']['pose2d']['theta'] = station['pose']['theta'] context.blackboard.set_worker({'type_specific': updated_type_specific}) await context.blackboard.sync_worker() await asyncio.sleep(1) return True
35.026316
122
0.597295
from rocon_client_sdk_py.virtual_core.actions.base import Action import asyncio import pydash from rocon_client_sdk_py.virtual_core.path_planner import PathPlanner class Dock(Action): def __init__(self): self.name = 'Dock' self.func_name = 'dock' async def on_define(self, context): print('define action of ' + self.name) api_config = context.api_configuration result = await api_config.get_stations() domain_station = [] def cb(station): domain_station.append({'alias': station['name']+'('+str(station['marker_value'])+')', 'value': station['id']}) pydash.map_(result, cb) return { 'name': self.name, 'func_name': self.func_name, 'args': [ { 'key': 'station', 'type': 'number', 'default': domain_station[len(domain_station) -1], 'domain': domain_station } ] } async def on_perform(self, context, args): station_id = pydash.find(args, {'key': 'station'})['value'] station = await context.api_configuration.get_stations(station_id) if station is None: print('failed to get station') worker = context.blackboard.get_worker() worker_location = pydash.get(worker, 'type_specific.location') path_planner = PathPlanner(context) await path_planner.init_map() path = path_planner.get_path(worker_location['map'], worker_location['pose2d'], station['pose']) trajectory = path_planner.path_to_trajectory(path, 1, 1000) print('start to moving robot on path') for point in trajectory: worker = context.blackboard.get_worker() updated_type_specific = worker['type_specific'] if 'theta' in point: pass else: point['theta'] = pydash.get(worker, 'type_specific.location.pose2d.theta') updated_type_specific['location'] = pydash.assign({}, updated_type_specific['location'], { 'map': worker_location['map'], 'pose2d': point }) context.blackboard.set_worker({'type_specific': updated_type_specific}) await context.blackboard.sync_worker() await asyncio.sleep(1) updated_type_specific['location']['pose2d']['theta'] = station['pose']['theta'] context.blackboard.set_worker({'type_specific': updated_type_specific}) await context.blackboard.sync_worker() await asyncio.sleep(1) return True
true
true
f71ce3f2ed68163326db9d66587e4654492691b3
3,152
py
Python
backend/chat/models.py
crowdbotics-apps/dsfs-28863
fea2672275927bd37d23e2267273e0eae54340d2
[ "FTL", "AML", "RSA-MD" ]
null
null
null
backend/chat/models.py
crowdbotics-apps/dsfs-28863
fea2672275927bd37d23e2267273e0eae54340d2
[ "FTL", "AML", "RSA-MD" ]
null
null
null
backend/chat/models.py
crowdbotics-apps/dsfs-28863
fea2672275927bd37d23e2267273e0eae54340d2
[ "FTL", "AML", "RSA-MD" ]
null
null
null
from django.conf import settings from django.db import models class ForwardedMessage(models.Model): "Generated Model" message = models.ForeignKey( "chat.Message", on_delete=models.CASCADE, related_name="forwardedmessage_message", ) forwarded_by = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="forwardedmessage_forwarded_by", ) forwarded_to = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="forwardedmessage_forwarded_to", ) timestamp_forwarded = models.DateTimeField( auto_now_add=True, ) class Message(models.Model): "Generated Model" message = models.TextField() thread = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="message_thread", ) sent_by = models.ForeignKey( "chat.ThreadMember", on_delete=models.CASCADE, related_name="message_sent_by", ) attachment = models.URLField() is_draft = models.BooleanField() is_delivered = models.BooleanField() is_read = models.BooleanField() timestamp_created = models.DateTimeField( auto_now_add=True, ) timestamp_delivered = models.DateTimeField() timestamp_read = models.DateTimeField() class Thread(models.Model): "Generated Model" name = models.CharField( max_length=255, ) thread_photo = models.URLField() timestamp_created = models.DateTimeField( auto_now_add=True, ) class ThreadAction(models.Model): "Generated Model" action = models.CharField( max_length=7, ) thread = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="threadaction_thread", ) profile = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="threadaction_profile", ) timestamp_action = models.DateTimeField( auto_now_add=True, ) class MessageAction(models.Model): "Generated Model" action = models.CharField( max_length=7, ) message = models.ForeignKey( "chat.Message", on_delete=models.CASCADE, related_name="messageaction_message", ) profile = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="messageaction_profile", ) timestamp_action = models.DateTimeField( auto_now_add=True, ) class ThreadMember(models.Model): "Generated Model" profile = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="threadmember_profile", ) thread = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="threadmember_thread", ) is_admin = models.BooleanField() timestamp_joined = models.DateTimeField( auto_now_add=True, ) timestamp_left = models.DateTimeField() last_rejoined = models.DateTimeField() # Create your models here.
25.626016
53
0.659581
from django.conf import settings from django.db import models class ForwardedMessage(models.Model): message = models.ForeignKey( "chat.Message", on_delete=models.CASCADE, related_name="forwardedmessage_message", ) forwarded_by = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="forwardedmessage_forwarded_by", ) forwarded_to = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="forwardedmessage_forwarded_to", ) timestamp_forwarded = models.DateTimeField( auto_now_add=True, ) class Message(models.Model): message = models.TextField() thread = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="message_thread", ) sent_by = models.ForeignKey( "chat.ThreadMember", on_delete=models.CASCADE, related_name="message_sent_by", ) attachment = models.URLField() is_draft = models.BooleanField() is_delivered = models.BooleanField() is_read = models.BooleanField() timestamp_created = models.DateTimeField( auto_now_add=True, ) timestamp_delivered = models.DateTimeField() timestamp_read = models.DateTimeField() class Thread(models.Model): name = models.CharField( max_length=255, ) thread_photo = models.URLField() timestamp_created = models.DateTimeField( auto_now_add=True, ) class ThreadAction(models.Model): action = models.CharField( max_length=7, ) thread = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="threadaction_thread", ) profile = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="threadaction_profile", ) timestamp_action = models.DateTimeField( auto_now_add=True, ) class MessageAction(models.Model): action = models.CharField( max_length=7, ) message = models.ForeignKey( "chat.Message", on_delete=models.CASCADE, related_name="messageaction_message", ) profile = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="messageaction_profile", ) timestamp_action = models.DateTimeField( auto_now_add=True, ) class ThreadMember(models.Model): profile = models.ForeignKey( "chat_user_profile.Profile", on_delete=models.CASCADE, related_name="threadmember_profile", ) thread = models.ForeignKey( "chat.Thread", on_delete=models.CASCADE, related_name="threadmember_thread", ) is_admin = models.BooleanField() timestamp_joined = models.DateTimeField( auto_now_add=True, ) timestamp_left = models.DateTimeField() last_rejoined = models.DateTimeField()
true
true
f71ce4194dd771aa309e312526fdfe217a9bdfea
2,299
py
Python
src/models/vectorTraining.py
Will03/NVSM_pytorch
45e91efa6e4571a955c0f76807f2d6b5d7ffa66a
[ "MIT" ]
null
null
null
src/models/vectorTraining.py
Will03/NVSM_pytorch
45e91efa6e4571a955c0f76807f2d6b5d7ffa66a
[ "MIT" ]
null
null
null
src/models/vectorTraining.py
Will03/NVSM_pytorch
45e91efa6e4571a955c0f76807f2d6b5d7ffa66a
[ "MIT" ]
1
2020-06-04T06:45:43.000Z
2020-06-04T06:45:43.000Z
import numpy as np from sklearn.metrics.pairwise import cosine_similarity from sklearn.feature_extraction.text import TfidfVectorizer import os dataPath = '../../Willll/' # Relative path of homework data # r=root, d=directories, f = files DocList = [] QueryList = [] DocData = [] QueryData = [] def articleParser(myPath): with open(myPath, 'r') as fp: docData = fp.read().replace('\n', '') return docData # read Query List with open(dataPath+'test/query_list.txt', 'r') as fp: tmpLine = fp.readline() while tmpLine: tmpLine = tmpLine.strip('\n') if tmpLine != '': QueryList.append(tmpLine) tmpLine = fp.readline() # Read query data for eachQ in QueryList: QueryData.append(articleParser(dataPath+'test/query/%s'%eachQ)) for r, d, f in os.walk(dataPath+'doc'): for file in f: DocList.append(file) for eachD in DocList: DocData.append(articleParser(dataPath+'doc/'+eachD)) # TF-IDF max_df = 0.95 # Ignore words with high df. (Similar effect to stopword filtering) min_df = 5 # Ignore words with low df. smooth_idf = True # Smooth idf weights by adding 1 to df. sublinear_tf = True # Replace tf with 1 + log(tf). # Rocchio (Below is a param set called Ide Dec-Hi) alpha = 1 beta = 0.75 gamma = 0.15 rel_count = 5 # Use top-5 relevant documents to update query vector. nrel_count = 1 # Use only the most non-relevant document to update query vector. iters = 5 print('start train') # Build TF-IDF vectors of docs and queries vectorizer = TfidfVectorizer(max_df=max_df, min_df=min_df, smooth_idf=smooth_idf, sublinear_tf=sublinear_tf) doc_tfidfs = vectorizer.fit_transform(DocData).toarray() query_vecs = vectorizer.transform(QueryData).toarray() print('start count simi') # Rank documents based on cosine similarity cos_sim = cosine_similarity(query_vecs, doc_tfidfs) rankings = np.flip(cos_sim.argsort(), axis=1) print('start write file') limit = 600 for query_name, ranking in zip(QueryList, rankings): ranked_docs='' index = 0 for idx in ranking: if index >=600: break ranked_docs += '%s,'%DocList[idx] with open('../../Willll/%s.txt'%query_name, mode='w') as file: file.write('%s' % (ranked_docs))
28.7375
88
0.675511
import numpy as np from sklearn.metrics.pairwise import cosine_similarity from sklearn.feature_extraction.text import TfidfVectorizer import os dataPath = '../../Willll/' DocList = [] QueryList = [] DocData = [] QueryData = [] def articleParser(myPath): with open(myPath, 'r') as fp: docData = fp.read().replace('\n', '') return docData with open(dataPath+'test/query_list.txt', 'r') as fp: tmpLine = fp.readline() while tmpLine: tmpLine = tmpLine.strip('\n') if tmpLine != '': QueryList.append(tmpLine) tmpLine = fp.readline() for eachQ in QueryList: QueryData.append(articleParser(dataPath+'test/query/%s'%eachQ)) for r, d, f in os.walk(dataPath+'doc'): for file in f: DocList.append(file) for eachD in DocList: DocData.append(articleParser(dataPath+'doc/'+eachD)) max_df = 0.95 min_df = 5 smooth_idf = True sublinear_tf = True alpha = 1 beta = 0.75 gamma = 0.15 rel_count = 5 nrel_count = 1 iters = 5 print('start train') vectorizer = TfidfVectorizer(max_df=max_df, min_df=min_df, smooth_idf=smooth_idf, sublinear_tf=sublinear_tf) doc_tfidfs = vectorizer.fit_transform(DocData).toarray() query_vecs = vectorizer.transform(QueryData).toarray() print('start count simi') cos_sim = cosine_similarity(query_vecs, doc_tfidfs) rankings = np.flip(cos_sim.argsort(), axis=1) print('start write file') limit = 600 for query_name, ranking in zip(QueryList, rankings): ranked_docs='' index = 0 for idx in ranking: if index >=600: break ranked_docs += '%s,'%DocList[idx] with open('../../Willll/%s.txt'%query_name, mode='w') as file: file.write('%s' % (ranked_docs))
true
true
f71ce456b76c3630058e368b142c7aeace1c9037
4,589
py
Python
Final.min.py
faiyazsamin/FaceRecognition
9c0bd65f300784910a923f446cf33bacfc502b52
[ "MIT" ]
1
2019-01-27T11:05:11.000Z
2019-01-27T11:05:11.000Z
Final.min.py
faiyazsamin/FaceRecognition
9c0bd65f300784910a923f446cf33bacfc502b52
[ "MIT" ]
null
null
null
Final.min.py
faiyazsamin/FaceRecognition
9c0bd65f300784910a923f446cf33bacfc502b52
[ "MIT" ]
null
null
null
import cv2 import numpy as np import os subjects = ["","Mama","Samin","Delwar"] def detect_faces(colored_img, scaleFactor=1.06): img_copy = colored_img.copy() gray = cv2.cvtColor(img_copy, cv2.COLOR_BGR2GRAY) f_cascade = cv2.CascadeClassifier('data/lbpcascade_frontalface.xml') faces = f_cascade.detectMultiScale(gray, scaleFactor=scaleFactor, minNeighbors=5); if len(faces) == 0: return None, None (x, y, w, h) = faces[0] return gray[y:y+w, x:x+h], faces[0] def prepare_training_data(data_folder_path): dirs = os.listdir(data_folder_path) faces = [] labels = [] for dir_name in dirs: if not dir_name.startswith("s"): continue label = int(dir_name.replace("s", "")) subject_dir_path = data_folder_path + "/" + dir_name subject_images_names = os.listdir(subject_dir_path) for image_name in subject_images_names: if image_name.startswith("."): continue image_path = subject_dir_path + "/" + image_name image = cv2.imread(image_path) cv2.imshow("Training on image...", cv2.resize(image, (400, 500))) cv2.waitKey(10) face, rect = detect_faces(image) if face is not None: faces.append(face) labels.append(label) cv2.destroyAllWindows() cv2.waitKey(1) cv2.destroyAllWindows() print("Total faces: ", len(faces)) print("Total labels: ", len(labels)) return faces, labels def trainData(trainingDataPath, output_path): face_recognizer = cv2.face.LBPHFaceRecognizer_create() faces, labels = prepare_training_data(trainingDataPath) face_recognizer.train(faces, np.array(labels)) face_recognizer.write(output_path) def loadTrainedData(path): recognizer = cv2.face.LBPHFaceRecognizer_create() recognizer.read(path) return recognizer def predictStaticImage(test_img,trainer_file): img = test_img.copy() face, rect = detect_faces(img) lt = loadTrainedData(trainer_file) label, confidence = lt.predict(face) label_text = subjects[label] (x, y, w, h) = rect cv2.rectangle(img, (x, y), (x + w, y + h), (0, 255, 0), 2) cv2.putText(img, label_text, (rect[0], rect[1] - 5), cv2.FONT_HERSHEY_PLAIN, 1.5, (0, 255, 0), 2) print("Confidence =",confidence) return img def showImage(image): cv2.imshow('Frame', image) cv2.waitKey(0) cv2.destroyAllWindows() def camToFile(framesToCapture,output_dir): cam = cv2.VideoCapture(1) detector = cv2.CascadeClassifier('data/haarcascade_frontalface_alt.xml') sampleNum = 0 while True: ret, img = cam.read() gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) face = detector.detectMultiScale(gray, 1.5, 5) for (x, y, w, h) in face: cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2) sampleNum = sampleNum + 1 if sampleNum%(100/framesToCapture) == 0: print("Frames Captured:", int(sampleNum/(100/framesToCapture))) cv2.imwrite(output_dir+"/"+ str(int(sampleNum/(100/framesToCapture))) + ".jpg", gray[y:y + h, x:x + w]) cv2.imshow('frame', img) if cv2.waitKey(100) & 0xFF == ord('q'): break elif sampleNum >= 100: break def detectFace(trainer_file): recognizer = cv2.face.LBPHFaceRecognizer_create() recognizer.read(trainer_file) faceCascade = cv2.CascadeClassifier("data/haarcascade_frontalface_alt.xml") cam = cv2.VideoCapture(1) font = cv2.FONT_HERSHEY_DUPLEX while True: ret, im = cam.read() gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY) faces = faceCascade.detectMultiScale(gray, scaleFactor=1.2, minNeighbors=5, minSize=(100, 100), flags=cv2.CASCADE_SCALE_IMAGE) for (x, y, w, h) in faces: nbr_predicted, conf = recognizer.predict(gray[y:y + h, x:x + w]) cv2.rectangle(im, (x - 50, y - 50), (x + w + 50, y + h + 50), (0, 225, 0), 2) nbr_predicted = subjects[nbr_predicted] cv2.putText(im, str(nbr_predicted), (x + 30, y + h + 30), font, 1, (0, 0, 225)) # Draw the text cv2.imshow('FaceDetector', im) if cv2.waitKey(1) & 0xFF == ord('q'): break cam.release() cv2.destroyAllWindows() #trainData('training-data','test.yml') detectFace('test.yml') #showImage(predictStaticImage(cv2.imread("test-data/4.jpg"),'test3.yml')) #camToFile(20,'training-data/s7')
32.546099
119
0.619961
import cv2 import numpy as np import os subjects = ["","Mama","Samin","Delwar"] def detect_faces(colored_img, scaleFactor=1.06): img_copy = colored_img.copy() gray = cv2.cvtColor(img_copy, cv2.COLOR_BGR2GRAY) f_cascade = cv2.CascadeClassifier('data/lbpcascade_frontalface.xml') faces = f_cascade.detectMultiScale(gray, scaleFactor=scaleFactor, minNeighbors=5); if len(faces) == 0: return None, None (x, y, w, h) = faces[0] return gray[y:y+w, x:x+h], faces[0] def prepare_training_data(data_folder_path): dirs = os.listdir(data_folder_path) faces = [] labels = [] for dir_name in dirs: if not dir_name.startswith("s"): continue label = int(dir_name.replace("s", "")) subject_dir_path = data_folder_path + "/" + dir_name subject_images_names = os.listdir(subject_dir_path) for image_name in subject_images_names: if image_name.startswith("."): continue image_path = subject_dir_path + "/" + image_name image = cv2.imread(image_path) cv2.imshow("Training on image...", cv2.resize(image, (400, 500))) cv2.waitKey(10) face, rect = detect_faces(image) if face is not None: faces.append(face) labels.append(label) cv2.destroyAllWindows() cv2.waitKey(1) cv2.destroyAllWindows() print("Total faces: ", len(faces)) print("Total labels: ", len(labels)) return faces, labels def trainData(trainingDataPath, output_path): face_recognizer = cv2.face.LBPHFaceRecognizer_create() faces, labels = prepare_training_data(trainingDataPath) face_recognizer.train(faces, np.array(labels)) face_recognizer.write(output_path) def loadTrainedData(path): recognizer = cv2.face.LBPHFaceRecognizer_create() recognizer.read(path) return recognizer def predictStaticImage(test_img,trainer_file): img = test_img.copy() face, rect = detect_faces(img) lt = loadTrainedData(trainer_file) label, confidence = lt.predict(face) label_text = subjects[label] (x, y, w, h) = rect cv2.rectangle(img, (x, y), (x + w, y + h), (0, 255, 0), 2) cv2.putText(img, label_text, (rect[0], rect[1] - 5), cv2.FONT_HERSHEY_PLAIN, 1.5, (0, 255, 0), 2) print("Confidence =",confidence) return img def showImage(image): cv2.imshow('Frame', image) cv2.waitKey(0) cv2.destroyAllWindows() def camToFile(framesToCapture,output_dir): cam = cv2.VideoCapture(1) detector = cv2.CascadeClassifier('data/haarcascade_frontalface_alt.xml') sampleNum = 0 while True: ret, img = cam.read() gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) face = detector.detectMultiScale(gray, 1.5, 5) for (x, y, w, h) in face: cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2) sampleNum = sampleNum + 1 if sampleNum%(100/framesToCapture) == 0: print("Frames Captured:", int(sampleNum/(100/framesToCapture))) cv2.imwrite(output_dir+"/"+ str(int(sampleNum/(100/framesToCapture))) + ".jpg", gray[y:y + h, x:x + w]) cv2.imshow('frame', img) if cv2.waitKey(100) & 0xFF == ord('q'): break elif sampleNum >= 100: break def detectFace(trainer_file): recognizer = cv2.face.LBPHFaceRecognizer_create() recognizer.read(trainer_file) faceCascade = cv2.CascadeClassifier("data/haarcascade_frontalface_alt.xml") cam = cv2.VideoCapture(1) font = cv2.FONT_HERSHEY_DUPLEX while True: ret, im = cam.read() gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY) faces = faceCascade.detectMultiScale(gray, scaleFactor=1.2, minNeighbors=5, minSize=(100, 100), flags=cv2.CASCADE_SCALE_IMAGE) for (x, y, w, h) in faces: nbr_predicted, conf = recognizer.predict(gray[y:y + h, x:x + w]) cv2.rectangle(im, (x - 50, y - 50), (x + w + 50, y + h + 50), (0, 225, 0), 2) nbr_predicted = subjects[nbr_predicted] cv2.putText(im, str(nbr_predicted), (x + 30, y + h + 30), font, 1, (0, 0, 225)) cv2.imshow('FaceDetector', im) if cv2.waitKey(1) & 0xFF == ord('q'): break cam.release() cv2.destroyAllWindows() detectFace('test.yml')
true
true
f71ce46ec0f8659d23bb2988ab53f559e7e4f0b9
838
py
Python
oscar/lib/python2.7/site-packages/phonenumbers/data/region_883.py
AMuratTuran/mkn
557086426773ced10d82c969304bd349414a601e
[ "BSD-3-Clause" ]
4
2018-10-19T04:36:20.000Z
2020-02-13T16:14:09.000Z
oscar/lib/python2.7/site-packages/phonenumbers/data/region_883.py
AMuratTuran/mkn
557086426773ced10d82c969304bd349414a601e
[ "BSD-3-Clause" ]
5
2020-03-24T16:37:25.000Z
2021-06-10T21:24:54.000Z
upibo-venv/Lib/site-packages/phonenumbers/data/region_883.py
smbpgroup/upibo
625dcda9f9692c62aeb9fe8f7123a5d407c610ae
[ "BSD-3-Clause" ]
null
null
null
"""Auto-generated file, do not edit by hand. 883 metadata""" from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata PHONE_METADATA_883 = PhoneMetadata(id='001', country_code=883, international_prefix=None, general_desc=PhoneNumberDesc(national_number_pattern='51\\d{7}(?:\\d{3})?', possible_length=(9, 12)), voip=PhoneNumberDesc(national_number_pattern='51(?:00\\d{5}(?:\\d{3})?|[13]0\\d{8})', example_number='510012345', possible_length=(9, 12)), number_format=[NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['510']), NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['510']), NumberFormat(pattern='(\\d{4})(\\d{4})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['51[13]'])])
83.8
143
0.650358
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata PHONE_METADATA_883 = PhoneMetadata(id='001', country_code=883, international_prefix=None, general_desc=PhoneNumberDesc(national_number_pattern='51\\d{7}(?:\\d{3})?', possible_length=(9, 12)), voip=PhoneNumberDesc(national_number_pattern='51(?:00\\d{5}(?:\\d{3})?|[13]0\\d{8})', example_number='510012345', possible_length=(9, 12)), number_format=[NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['510']), NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{3})(\\d{3})', format='\\1 \\2 \\3 \\4', leading_digits_pattern=['510']), NumberFormat(pattern='(\\d{4})(\\d{4})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['51[13]'])])
true
true
f71ce4ec1624e15791c2a9cbe2ce17cdd55390d6
733
py
Python
mycloud/drive/filesystem/file_version.py
ThomasGassmann/swisscom-my-cloud-backup
97e222c45a54197c82c8f3a5d59aa20bf3382ed8
[ "MIT" ]
4
2019-11-28T22:10:43.000Z
2022-01-23T15:18:26.000Z
mycloud/drive/filesystem/file_version.py
ThomasGassmann/swisscom-my-cloud-backup
97e222c45a54197c82c8f3a5d59aa20bf3382ed8
[ "MIT" ]
18
2019-01-20T22:30:48.000Z
2020-06-09T21:16:07.000Z
mycloud/drive/filesystem/file_version.py
thomasgassmann/mycloud-cli
97e222c45a54197c82c8f3a5d59aa20bf3382ed8
[ "MIT" ]
null
null
null
from abc import ABC, abstractmethod from mycloud.common import sha256_file from mycloud.constants import VERSION_HASH_LENGTH class CalculatableVersion(ABC): @abstractmethod def calculate_version(self): raise NotImplementedError() class BasicStringVersion(CalculatableVersion): def __init__(self, version: str): self._version = version def calculate_version(self): return self._version class HashCalculatedVersion(CalculatableVersion): def __init__(self, local_file: str): self.local_file = local_file def calculate_version(self): return sha256_file(self.local_file)[:VERSION_HASH_LENGTH] def get_hash(self): return sha256_file(self.local_file)
22.212121
65
0.740791
from abc import ABC, abstractmethod from mycloud.common import sha256_file from mycloud.constants import VERSION_HASH_LENGTH class CalculatableVersion(ABC): @abstractmethod def calculate_version(self): raise NotImplementedError() class BasicStringVersion(CalculatableVersion): def __init__(self, version: str): self._version = version def calculate_version(self): return self._version class HashCalculatedVersion(CalculatableVersion): def __init__(self, local_file: str): self.local_file = local_file def calculate_version(self): return sha256_file(self.local_file)[:VERSION_HASH_LENGTH] def get_hash(self): return sha256_file(self.local_file)
true
true
f71ce668eb491f3489debb1b15bbb8e0d468f1c4
4,724
py
Python
pyzoo/test/zoo/zouwu/autots/test_auto_ts.py
ankitdobhal/analytics-zoo
b8374bcd6c73bba49fe0b0ab075528cdd94cf2af
[ "Apache-2.0" ]
null
null
null
pyzoo/test/zoo/zouwu/autots/test_auto_ts.py
ankitdobhal/analytics-zoo
b8374bcd6c73bba49fe0b0ab075528cdd94cf2af
[ "Apache-2.0" ]
1
2020-04-17T02:41:28.000Z
2020-04-20T02:37:41.000Z
pyzoo/test/zoo/zouwu/autots/test_auto_ts.py
ankitdobhal/analytics-zoo
b8374bcd6c73bba49fe0b0ab075528cdd94cf2af
[ "Apache-2.0" ]
1
2020-12-21T11:48:49.000Z
2020-12-21T11:48:49.000Z
# # Copyright 2018 Analytics Zoo Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import pytest import numpy as np from test.zoo.pipeline.utils.test_utils import ZooTestCase from zoo.automl.config.recipe import LSTMGridRandomRecipe, MTNetGridRandomRecipe from zoo.zouwu.autots.forecast import AutoTSTrainer from zoo.zouwu.autots.forecast import TSPipeline import pandas as pd @pytest.mark.usefixtures("init_ray_context_fixture") class TestZouwuAutoTS(ZooTestCase): def setup_method(self, method): # super(TestZouwuAutoTS, self).setup_method(method) self.create_data() def teardown_method(self, method): pass def create_data(self): sample_num = np.random.randint(100, 200) self.train_df = pd.DataFrame({"datetime": pd.date_range( '1/1/2019', periods=sample_num), "value": np.random.randn(sample_num)}) val_sample_num = np.random.randint(20, 30) self.validation_df = pd.DataFrame({"datetime": pd.date_range( '1/1/2019', periods=val_sample_num), "value": np.random.randn(val_sample_num)}) def test_AutoTSTrainer_smoke(self): horizon = np.random.randint(1, 6) tsp = AutoTSTrainer(dt_col="datetime", target_col="value", horizon=horizon, extra_features_col=None ) pipeline = tsp.fit(self.train_df) assert isinstance(pipeline, TSPipeline) assert pipeline.internal.config is not None evaluate_result = pipeline.evaluate(self.validation_df) if horizon > 1: assert evaluate_result[0].shape[0] == horizon else: assert evaluate_result[0] predict_df = pipeline.predict(self.validation_df) assert not predict_df.empty def test_AutoTrainer_LstmRecipe(self): horizon = np.random.randint(1, 6) tsp = AutoTSTrainer(dt_col="datetime", target_col="value", horizon=horizon, extra_features_col=None ) pipeline = tsp.fit(self.train_df, self.validation_df, recipe=LSTMGridRandomRecipe( num_rand_samples=5, batch_size=[1024], lstm_2_units=[8], training_iteration=1, epochs=1 )) assert isinstance(pipeline, TSPipeline) assert pipeline.internal.config is not None evaluate_result = pipeline.evaluate(self.validation_df) if horizon > 1: assert evaluate_result[0].shape[0] == horizon else: assert evaluate_result[0] predict_df = pipeline.predict(self.validation_df) assert not predict_df.empty def test_AutoTrainer_MTNetRecipe(self): horizon = np.random.randint(1, 6) tsp = AutoTSTrainer(dt_col="datetime", target_col="value", horizon=horizon, extra_features_col=None ) pipeline = tsp.fit(self.train_df, self.validation_df, recipe=MTNetGridRandomRecipe( num_rand_samples=5, time_step=[5], long_num=[2], batch_size=[1024], cnn_hid_size=[32, 50], training_iteration=1, epochs=1 )) assert isinstance(pipeline, TSPipeline) assert pipeline.internal.config is not None evaluate_result = pipeline.evaluate(self.validation_df) if horizon > 1: assert evaluate_result[0].shape[0] == horizon else: assert evaluate_result[0] predict_df = pipeline.predict(self.validation_df) assert not predict_df.empty if __name__ == "__main__": pytest.main([__file__])
39.041322
91
0.575783
import pytest import numpy as np from test.zoo.pipeline.utils.test_utils import ZooTestCase from zoo.automl.config.recipe import LSTMGridRandomRecipe, MTNetGridRandomRecipe from zoo.zouwu.autots.forecast import AutoTSTrainer from zoo.zouwu.autots.forecast import TSPipeline import pandas as pd @pytest.mark.usefixtures("init_ray_context_fixture") class TestZouwuAutoTS(ZooTestCase): def setup_method(self, method): self.create_data() def teardown_method(self, method): pass def create_data(self): sample_num = np.random.randint(100, 200) self.train_df = pd.DataFrame({"datetime": pd.date_range( '1/1/2019', periods=sample_num), "value": np.random.randn(sample_num)}) val_sample_num = np.random.randint(20, 30) self.validation_df = pd.DataFrame({"datetime": pd.date_range( '1/1/2019', periods=val_sample_num), "value": np.random.randn(val_sample_num)}) def test_AutoTSTrainer_smoke(self): horizon = np.random.randint(1, 6) tsp = AutoTSTrainer(dt_col="datetime", target_col="value", horizon=horizon, extra_features_col=None ) pipeline = tsp.fit(self.train_df) assert isinstance(pipeline, TSPipeline) assert pipeline.internal.config is not None evaluate_result = pipeline.evaluate(self.validation_df) if horizon > 1: assert evaluate_result[0].shape[0] == horizon else: assert evaluate_result[0] predict_df = pipeline.predict(self.validation_df) assert not predict_df.empty def test_AutoTrainer_LstmRecipe(self): horizon = np.random.randint(1, 6) tsp = AutoTSTrainer(dt_col="datetime", target_col="value", horizon=horizon, extra_features_col=None ) pipeline = tsp.fit(self.train_df, self.validation_df, recipe=LSTMGridRandomRecipe( num_rand_samples=5, batch_size=[1024], lstm_2_units=[8], training_iteration=1, epochs=1 )) assert isinstance(pipeline, TSPipeline) assert pipeline.internal.config is not None evaluate_result = pipeline.evaluate(self.validation_df) if horizon > 1: assert evaluate_result[0].shape[0] == horizon else: assert evaluate_result[0] predict_df = pipeline.predict(self.validation_df) assert not predict_df.empty def test_AutoTrainer_MTNetRecipe(self): horizon = np.random.randint(1, 6) tsp = AutoTSTrainer(dt_col="datetime", target_col="value", horizon=horizon, extra_features_col=None ) pipeline = tsp.fit(self.train_df, self.validation_df, recipe=MTNetGridRandomRecipe( num_rand_samples=5, time_step=[5], long_num=[2], batch_size=[1024], cnn_hid_size=[32, 50], training_iteration=1, epochs=1 )) assert isinstance(pipeline, TSPipeline) assert pipeline.internal.config is not None evaluate_result = pipeline.evaluate(self.validation_df) if horizon > 1: assert evaluate_result[0].shape[0] == horizon else: assert evaluate_result[0] predict_df = pipeline.predict(self.validation_df) assert not predict_df.empty if __name__ == "__main__": pytest.main([__file__])
true
true
f71ce6ddebc57474b11797d83db4b3270ece5414
2,689
py
Python
src/reports_api/models/phase_code.py
dinesh-aot/eao-project-reports
f2741d381371fb9a65b6b9f9909161eb333a7b21
[ "Apache-2.0" ]
null
null
null
src/reports_api/models/phase_code.py
dinesh-aot/eao-project-reports
f2741d381371fb9a65b6b9f9909161eb333a7b21
[ "Apache-2.0" ]
8
2022-02-14T23:21:52.000Z
2022-03-30T20:04:19.000Z
src/reports_api/models/phase_code.py
dinesh-aot/eao-project-reports
f2741d381371fb9a65b6b9f9909161eb333a7b21
[ "Apache-2.0" ]
4
2022-02-14T23:22:50.000Z
2022-02-16T17:40:28.000Z
# Copyright © 2019 Province of British Columbia # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Model to handle all operations related to Payment Disbursement status code.""" from sqlalchemy import Boolean, Column, ForeignKey, Integer, String from sqlalchemy.orm import relationship from .code_table import CodeTable from .db import db class PhaseCode(db.Model, CodeTable): """Model class for Phase.""" __tablename__ = 'phase_codes' id = Column(Integer, primary_key=True, autoincrement=True) # TODO check how it can be inherited from parent work_type_id = Column(ForeignKey('work_types.id'), nullable=False) ea_act_id = Column(ForeignKey('ea_acts.id'), nullable=False) start_event = Column(String()) end_event = Column(String) duration = Column(Integer()) legislated = Column(Boolean()) sort_order = Column(Integer()) color = Column(String(15)) work_type = relationship('WorkType', foreign_keys=[work_type_id], lazy='select') ea_act = relationship('EAAct', foreign_keys=[ea_act_id], lazy='select') milestones = relationship("Milestone", primaryjoin="PhaseCode.id==Milestone.phase_id", back_populates="phase") def as_dict(self): """Return Json representation.""" return { 'id': self.id, 'name': self.name, 'sort_order': self.sort_order, 'start_event': self.start_event, 'end_event': self.end_event, 'duration': self.duration, 'legislated': self.legislated, 'work_type': self.work_type.as_dict(), 'ea_act': self.ea_act.as_dict(), 'milestones': [milestone.as_dict() for milestone in self.milestones], 'color': self.color } @classmethod def find_by_ea_act_and_work_type(cls, _ea_act_id, _work_type_id): """Given a id, this will return code master details.""" code_table = db.session.query(PhaseCode).filter_by(work_type_id=_work_type_id, ea_act_id=_ea_act_id).all() # pylint: disable=no-member return code_table
39.544118
115
0.661212
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String from sqlalchemy.orm import relationship from .code_table import CodeTable from .db import db class PhaseCode(db.Model, CodeTable): __tablename__ = 'phase_codes' id = Column(Integer, primary_key=True, autoincrement=True) work_type_id = Column(ForeignKey('work_types.id'), nullable=False) ea_act_id = Column(ForeignKey('ea_acts.id'), nullable=False) start_event = Column(String()) end_event = Column(String) duration = Column(Integer()) legislated = Column(Boolean()) sort_order = Column(Integer()) color = Column(String(15)) work_type = relationship('WorkType', foreign_keys=[work_type_id], lazy='select') ea_act = relationship('EAAct', foreign_keys=[ea_act_id], lazy='select') milestones = relationship("Milestone", primaryjoin="PhaseCode.id==Milestone.phase_id", back_populates="phase") def as_dict(self): return { 'id': self.id, 'name': self.name, 'sort_order': self.sort_order, 'start_event': self.start_event, 'end_event': self.end_event, 'duration': self.duration, 'legislated': self.legislated, 'work_type': self.work_type.as_dict(), 'ea_act': self.ea_act.as_dict(), 'milestones': [milestone.as_dict() for milestone in self.milestones], 'color': self.color } @classmethod def find_by_ea_act_and_work_type(cls, _ea_act_id, _work_type_id): code_table = db.session.query(PhaseCode).filter_by(work_type_id=_work_type_id, ea_act_id=_ea_act_id).all() return code_table
true
true
f71ce75531d655af08723237dc7e58101caae903
554
py
Python
Python/leetcode.252.meeting-rooms.py
tedye/leetcode
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
[ "MIT" ]
4
2015-10-10T00:30:55.000Z
2020-07-27T19:45:54.000Z
Python/leetcode.252.meeting-rooms.py
tedye/leetcode
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
[ "MIT" ]
null
null
null
Python/leetcode.252.meeting-rooms.py
tedye/leetcode
975d7e3b8cb9b6be9e80e07febf4bcf6414acd46
[ "MIT" ]
null
null
null
# Definition for an interval. # class Interval(object): # def __init__(self, s=0, e=0): # self.start = s # self.end = e class Solution(object): def canAttendMeetings(self, intervals): """ :type intervals: List[Interval] :rtype: bool """ if len(intervals) < 2: return True intervals.sort(key = lambda x: x.start) for i in xrange(1, len(intervals)): if intervals[i-1].end > intervals[i].start: return False return True
27.7
55
0.534296
class Solution(object): def canAttendMeetings(self, intervals): if len(intervals) < 2: return True intervals.sort(key = lambda x: x.start) for i in xrange(1, len(intervals)): if intervals[i-1].end > intervals[i].start: return False return True
true
true
f71ce7d27a7503b6d207689cfb75ccb7b36f12cd
9,482
py
Python
code/pytorch/methods/SSAC.py
hzm2016/assistive-gym-robosuite
5c529f4444cc386383618bfa584341740a8468f9
[ "MIT" ]
1
2021-11-22T07:45:28.000Z
2021-11-22T07:45:28.000Z
code/pytorch/methods/SSAC.py
hzm2016/assistive-gym-robosuite
5c529f4444cc386383618bfa584341740a8468f9
[ "MIT" ]
null
null
null
code/pytorch/methods/SSAC.py
hzm2016/assistive-gym-robosuite
5c529f4444cc386383618bfa584341740a8468f9
[ "MIT" ]
null
null
null
import os import torch import torch.nn.functional as F import glob import numpy as np from torch.optim import Adam from utils.utils import soft_update, hard_update from utils.model import GaussianPolicy, QNetwork, DeterministicPolicy from keras.models import Sequential, Model from keras.layers import Dense, Dropout, Input, merge, Lambda, Activation from keras.layers.merge import Add, Multiply, Concatenate, concatenate from keras.initializers import RandomUniform from keras.optimizers import Adam import keras.backend as K from keras import metrics def weighted_entropy(p, w_norm): # w = tf.divide(tf.exp(A - np.max(A)), prob) # w_norm = w / K.sum(w) return K.sum(w_norm * p * K.log(p + 1e-8)) def weighted_mean(p, w_norm): # w = tf.exp(A- np.max(A)) # w_norm = w / K.sum(w) p_weighted = np.multiply(w_norm, p) return K.mean(p_weighted, axis=0) def weighted_mse(Q_target, Q_pred, w_norm): # w = tf.exp(A- np.max(A)) # w_norm = w / K.sum(w) error = K.square(Q_target - Q_pred) return K.mean(w_norm * error) def softmax(x): col = x.shape[1] x_max = np.reshape(np.amax(x, axis=1), (-1, 1)) e_x = np.exp(x - np.matlib.repmat(x_max, 1, col) ) e_x_sum = np.reshape( np.sum(e_x, axis=1), (-1, 1)) out = e_x / np.matlib.repmat(e_x_sum, 1, col) return out def weighted_mean_array(x, weights): weights_mean = np.mean(weights, axis=1) x_weighted = np.multiply(x, weights) mean_weighted = np.divide(np.mean(x_weighted, axis=1), weights_mean) return np.reshape(mean_weighted, (-1, 1)) def p_sample(p): row, col = p.shape p_sum = np.reshape(np.sum(p, axis=1), (row, 1)) p_normalized = p/np.matlib.repmat(p_sum, 1, col) p_cumsum = np.matrix(np.cumsum( p_normalized, axis=1)) # print(p_cumsum[0]) rand = np.matlib.repmat(np.random.random((row, 1)), 1, col) # print(rand[0]) o_softmax = np.argmax(p_cumsum >= rand, axis=1) return o_softmax def entropy(p): return K.sum(p * K.log((p + 1e-8))) def add_normal(x_input, outshape, at_eps): """ add normal noise to the input """ epsilon = K.random_normal(shape=outshape, mean=0., stddev=1.) x_out = x_input + at_eps * np.multiply(epsilon, np.absolute(x_input)) return x_out def kl(p, q): return K.sum(p * K.log((p + 1e-8) / (q + 1e-8))) class Multi_SAC(object): def __init__(self, state_dim, action_dim, option_dim, max_action, action_space): self.alpha = 0.2 self.lr = 0.0003 self.option_num = option_dim self.policy_type = "Gaussian" self.target_update_interval = 1 self.automatic_entropy_tuning = True self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu") """ critic network """ self.critic = QNetwork(state_dim, action_dim, 400).to(device=self.device) self.critic_optim = Adam(self.critic.parameters(), lr=self.lr) self.critic_target = QNetwork(state_dim, action_dim, 400).to(self.device) hard_update(self.critic_target, self.critic) self.sampling_prob = torch.FloatTensor(state).to(self.device) # ===================================================================== # # Option Model # # ===================================================================== # self.option_state_input, self.option_action_input, self.option_input_concat, self.option_out_dec, \ self.option_out, self.option_out_noise, self.option_model = self.create_option_model() Advantage = np.stop_gradient(self.target_q_value - self.predicted_v_value) Weight = np.divide(np.exp(Advantage - np.max(Advantage)), self.sampling_prob) W_norm = Weight/K.mean(Weight) critic_conditional_entropy = weighted_entropy(self.option_out, tf.stop_gradient(W_norm)) p_weighted_ave = weighted_mean(self.option_out, tf.stop_gradient(W_norm)) self.critic_entropy = critic_conditional_entropy - self.c_ent * entropy(p_weighted_ave) self.vat_loss = kl(self.option_out, self.option_out_noise) self.reg_loss = metrics.mean_absolute_error(self.option_input_concat, self.option_out_dec) self.option_loss = self.reg_loss + self.entropy_coeff * (self.critic_entropy) + self.c_reg * self.vat_loss self.option_optimize = tf.train.AdamOptimizer(self.option_lr).minimize(self.option_loss) """ option network """ self.it = 0 if self.policy_type == "Gaussian": # Target Entropy = −dim(A) (e.g. , -6 for HalfCheetah-v2) as given in the paper if self.automatic_entropy_tuning == True: self.target_entropy = -torch.prod(torch.Tensor(action_space.shape).to(self.device)).item() self.log_alpha = torch.zeros(1, requires_grad=True, device=self.device) self.alpha_optim = Adam([self.log_alpha], lr=self.lr) self.policy = GaussianPolicy(state_dim, action_dim, 400, max_action).to(self.device) self.policy_optim = Adam(self.policy.parameters(), lr=self.lr) elif self.policy_type == "Multi_Gaussian": if self.automatic_entropy_tuning == True: self.target_entropy = -torch.prod(torch.Tensor(action_space.shape).to(self.device)).item() self.log_alpha = torch.zeros(1, requires_grad=True, device=self.device) self.alpha_optim = Adam([self.log_alpha], lr=self.lr) self.policy = GaussianPolicy(state_dim, action_dim, 400, max_action).to(self.device) self.policy_optim = Adam(self.policy.parameters(), lr=self.lr) else: self.alpha = 0 self.automatic_entropy_tuning = False self.policy = DeterministicPolicy(state_dim, action_dim, 400, max_action).to(self.device) self.policy_optim = Adam(self.policy.parameters(), lr=self.lr) def select_action(self, state, eval=True): state = torch.FloatTensor(state).to(self.device).unsqueeze(0) if eval == False: action, _, _ = self.policy.sample(state) else: _, _, action = self.policy.sample(state) return action.detach().cpu().numpy()[0] def train_actor_option(self, inputs, a_gradient, option): self.sess.run(self.actor_optimizer_list[option], feed_dict={ self.actor_state_input_list[option]: inputs, self.action_gradient_list[option]: a_gradient }) def train_critic(self, inputs, action, target_q_value, predicted_v_value, sampling_prob): return self.sess.run([self.critic_optimize], feed_dict={ self.critic_state_input: inputs, self.critic_action_input: action, self.target_q_value: target_q_value, self.predicted_v_value: predicted_v_value, self.sampling_prob: sampling_prob }) def train_option(self, inputs, action, target_q_value, predicted_v_value, sampling_prob): return self.sess.run([self.option_optimize], feed_dict={ self.option_state_input: inputs, self.option_action_input: action, self.target_q_value: target_q_value, self.predicted_v_value: predicted_v_value, self.sampling_prob: sampling_prob }) def max_option(self, inputs): Q_predict = [] n = inputs.shape[0] for o in range(int(self.option_num)): action_i = self.predict_actor_target(inputs, o) Q_predict_i, _ = self.predict_critic_target(inputs, action_i) if o == 0: Q_predict = np.reshape(Q_predict_i, (-1, 1)) else: Q_predict = np.concatenate((Q_predict, np.reshape(Q_predict_i, (-1, 1))), axis=1) o_max = np.argmax(Q_predict, axis=1) Q_max = np.max(Q_predict, axis=1) return o_max, Q_max, Q_predict def softmax_option_target(self, inputs): Q_predict = [] n = inputs.shape[0] for o in range(int(self.option_num)): action_i = self.predict_actor_target(inputs, o) Q_predict_i, _ = self.predict_critic_target(inputs, action_i) if o == 0: Q_predict = np.reshape( Q_predict_i, (-1, 1) ) else: Q_predict = np.concatenate((Q_predict, np.reshape(Q_predict_i, (-1, 1)) ), axis= 1) p = softmax(Q_predict) o_softmax = p_sample(p) n = Q_predict.shape[0] Q_softmax = Q_predict[np.arange(n), o_softmax.flatten()] return o_softmax, np.reshape(Q_softmax, (n, 1)), Q_predict def predict_actor_option(self, inputs, option): return self.sess.run(self.actor_out_list[option], feed_dict={self.actor_state_input_list[option]: inputs}) def predict_actor(self, inputs, options): action_list = [] for o in range(self.option_num): action_o = self.predict_actor_option(inputs, o) action_list.append(action_o) n = inputs.shape[0] action = 0 if n == 1 or np.isscalar(options): action = action_list[options] # calculate the action else: for i in range(n): if i == 0: action = action_list[int(options[i])][i, :] else: action = np.vstack((action, action_list[int(options[i])][i, :])) return action
39.508333
118
0.626977
import os import torch import torch.nn.functional as F import glob import numpy as np from torch.optim import Adam from utils.utils import soft_update, hard_update from utils.model import GaussianPolicy, QNetwork, DeterministicPolicy from keras.models import Sequential, Model from keras.layers import Dense, Dropout, Input, merge, Lambda, Activation from keras.layers.merge import Add, Multiply, Concatenate, concatenate from keras.initializers import RandomUniform from keras.optimizers import Adam import keras.backend as K from keras import metrics def weighted_entropy(p, w_norm): return K.sum(w_norm * p * K.log(p + 1e-8)) def weighted_mean(p, w_norm): p_weighted = np.multiply(w_norm, p) return K.mean(p_weighted, axis=0) def weighted_mse(Q_target, Q_pred, w_norm): error = K.square(Q_target - Q_pred) return K.mean(w_norm * error) def softmax(x): col = x.shape[1] x_max = np.reshape(np.amax(x, axis=1), (-1, 1)) e_x = np.exp(x - np.matlib.repmat(x_max, 1, col) ) e_x_sum = np.reshape( np.sum(e_x, axis=1), (-1, 1)) out = e_x / np.matlib.repmat(e_x_sum, 1, col) return out def weighted_mean_array(x, weights): weights_mean = np.mean(weights, axis=1) x_weighted = np.multiply(x, weights) mean_weighted = np.divide(np.mean(x_weighted, axis=1), weights_mean) return np.reshape(mean_weighted, (-1, 1)) def p_sample(p): row, col = p.shape p_sum = np.reshape(np.sum(p, axis=1), (row, 1)) p_normalized = p/np.matlib.repmat(p_sum, 1, col) p_cumsum = np.matrix(np.cumsum( p_normalized, axis=1)) rand = np.matlib.repmat(np.random.random((row, 1)), 1, col) o_softmax = np.argmax(p_cumsum >= rand, axis=1) return o_softmax def entropy(p): return K.sum(p * K.log((p + 1e-8))) def add_normal(x_input, outshape, at_eps): epsilon = K.random_normal(shape=outshape, mean=0., stddev=1.) x_out = x_input + at_eps * np.multiply(epsilon, np.absolute(x_input)) return x_out def kl(p, q): return K.sum(p * K.log((p + 1e-8) / (q + 1e-8))) class Multi_SAC(object): def __init__(self, state_dim, action_dim, option_dim, max_action, action_space): self.alpha = 0.2 self.lr = 0.0003 self.option_num = option_dim self.policy_type = "Gaussian" self.target_update_interval = 1 self.automatic_entropy_tuning = True self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu") self.critic = QNetwork(state_dim, action_dim, 400).to(device=self.device) self.critic_optim = Adam(self.critic.parameters(), lr=self.lr) self.critic_target = QNetwork(state_dim, action_dim, 400).to(self.device) hard_update(self.critic_target, self.critic) self.sampling_prob = torch.FloatTensor(state).to(self.device) self.option_state_input, self.option_action_input, self.option_input_concat, self.option_out_dec, \ self.option_out, self.option_out_noise, self.option_model = self.create_option_model() Advantage = np.stop_gradient(self.target_q_value - self.predicted_v_value) Weight = np.divide(np.exp(Advantage - np.max(Advantage)), self.sampling_prob) W_norm = Weight/K.mean(Weight) critic_conditional_entropy = weighted_entropy(self.option_out, tf.stop_gradient(W_norm)) p_weighted_ave = weighted_mean(self.option_out, tf.stop_gradient(W_norm)) self.critic_entropy = critic_conditional_entropy - self.c_ent * entropy(p_weighted_ave) self.vat_loss = kl(self.option_out, self.option_out_noise) self.reg_loss = metrics.mean_absolute_error(self.option_input_concat, self.option_out_dec) self.option_loss = self.reg_loss + self.entropy_coeff * (self.critic_entropy) + self.c_reg * self.vat_loss self.option_optimize = tf.train.AdamOptimizer(self.option_lr).minimize(self.option_loss) self.it = 0 if self.policy_type == "Gaussian": if self.automatic_entropy_tuning == True: self.target_entropy = -torch.prod(torch.Tensor(action_space.shape).to(self.device)).item() self.log_alpha = torch.zeros(1, requires_grad=True, device=self.device) self.alpha_optim = Adam([self.log_alpha], lr=self.lr) self.policy = GaussianPolicy(state_dim, action_dim, 400, max_action).to(self.device) self.policy_optim = Adam(self.policy.parameters(), lr=self.lr) elif self.policy_type == "Multi_Gaussian": if self.automatic_entropy_tuning == True: self.target_entropy = -torch.prod(torch.Tensor(action_space.shape).to(self.device)).item() self.log_alpha = torch.zeros(1, requires_grad=True, device=self.device) self.alpha_optim = Adam([self.log_alpha], lr=self.lr) self.policy = GaussianPolicy(state_dim, action_dim, 400, max_action).to(self.device) self.policy_optim = Adam(self.policy.parameters(), lr=self.lr) else: self.alpha = 0 self.automatic_entropy_tuning = False self.policy = DeterministicPolicy(state_dim, action_dim, 400, max_action).to(self.device) self.policy_optim = Adam(self.policy.parameters(), lr=self.lr) def select_action(self, state, eval=True): state = torch.FloatTensor(state).to(self.device).unsqueeze(0) if eval == False: action, _, _ = self.policy.sample(state) else: _, _, action = self.policy.sample(state) return action.detach().cpu().numpy()[0] def train_actor_option(self, inputs, a_gradient, option): self.sess.run(self.actor_optimizer_list[option], feed_dict={ self.actor_state_input_list[option]: inputs, self.action_gradient_list[option]: a_gradient }) def train_critic(self, inputs, action, target_q_value, predicted_v_value, sampling_prob): return self.sess.run([self.critic_optimize], feed_dict={ self.critic_state_input: inputs, self.critic_action_input: action, self.target_q_value: target_q_value, self.predicted_v_value: predicted_v_value, self.sampling_prob: sampling_prob }) def train_option(self, inputs, action, target_q_value, predicted_v_value, sampling_prob): return self.sess.run([self.option_optimize], feed_dict={ self.option_state_input: inputs, self.option_action_input: action, self.target_q_value: target_q_value, self.predicted_v_value: predicted_v_value, self.sampling_prob: sampling_prob }) def max_option(self, inputs): Q_predict = [] n = inputs.shape[0] for o in range(int(self.option_num)): action_i = self.predict_actor_target(inputs, o) Q_predict_i, _ = self.predict_critic_target(inputs, action_i) if o == 0: Q_predict = np.reshape(Q_predict_i, (-1, 1)) else: Q_predict = np.concatenate((Q_predict, np.reshape(Q_predict_i, (-1, 1))), axis=1) o_max = np.argmax(Q_predict, axis=1) Q_max = np.max(Q_predict, axis=1) return o_max, Q_max, Q_predict def softmax_option_target(self, inputs): Q_predict = [] n = inputs.shape[0] for o in range(int(self.option_num)): action_i = self.predict_actor_target(inputs, o) Q_predict_i, _ = self.predict_critic_target(inputs, action_i) if o == 0: Q_predict = np.reshape( Q_predict_i, (-1, 1) ) else: Q_predict = np.concatenate((Q_predict, np.reshape(Q_predict_i, (-1, 1)) ), axis= 1) p = softmax(Q_predict) o_softmax = p_sample(p) n = Q_predict.shape[0] Q_softmax = Q_predict[np.arange(n), o_softmax.flatten()] return o_softmax, np.reshape(Q_softmax, (n, 1)), Q_predict def predict_actor_option(self, inputs, option): return self.sess.run(self.actor_out_list[option], feed_dict={self.actor_state_input_list[option]: inputs}) def predict_actor(self, inputs, options): action_list = [] for o in range(self.option_num): action_o = self.predict_actor_option(inputs, o) action_list.append(action_o) n = inputs.shape[0] action = 0 if n == 1 or np.isscalar(options): action = action_list[options] else: for i in range(n): if i == 0: action = action_list[int(options[i])][i, :] else: action = np.vstack((action, action_list[int(options[i])][i, :])) return action
true
true
f71ce7dd49a7ba035fc78f709cfd97e3e48d60f3
1,963
py
Python
tests/integration/test_image.py
youngjun0627/backend.ai-client-py
be7c174ab73e112fdb8be61e6affc20fc72f7d59
[ "MIT" ]
7
2019-01-18T08:08:42.000Z
2022-02-10T00:36:24.000Z
tests/integration/test_image.py
youngjun0627/backend.ai-client-py
be7c174ab73e112fdb8be61e6affc20fc72f7d59
[ "MIT" ]
179
2017-09-07T04:54:44.000Z
2022-03-29T11:30:47.000Z
tests/integration/test_image.py
youngjun0627/backend.ai-client-py
be7c174ab73e112fdb8be61e6affc20fc72f7d59
[ "MIT" ]
13
2017-09-08T05:37:44.000Z
2021-09-14T23:35:31.000Z
import pytest from ai.backend.client.exceptions import BackendAPIError from ai.backend.client.session import Session # module-level marker pytestmark = pytest.mark.integration @pytest.mark.asyncio async def test_list_images_by_admin(): with Session() as sess: images = sess.Image.list() image = images[0] assert len(images) > 0 assert 'name' in image assert 'tag' in image assert 'hash' in image @pytest.mark.asyncio async def test_list_images_by_user(userconfig): with Session() as sess: images = sess.Image.list() image = images[0] assert len(images) > 0 assert 'name' in image assert 'tag' in image assert 'hash' in image # This is invasive... # async def test_rescan_images(): # pass @pytest.mark.asyncio async def test_alias_dealias_image_by_admin(): with Session() as sess: def get_test_image_info(): items = sess.Image.list( fields=('name', 'registry', 'tag', 'aliases')) for item in items: if 'lua' in item['name'] and '5.1-alpine3.8' in item['tag']: return item img_info = get_test_image_info() test_alias = 'testalias-b9f1ce136f584ca892d5fef3e78dd11d' test_target = img_info['registry'] + '/' + img_info['name'] + ':' + \ img_info['tag'] sess.Image.aliasImage(test_alias, test_target) assert get_test_image_info()['aliases'] == [test_alias] sess.Image.dealiasImage(test_alias) assert len(get_test_image_info()['aliases']) == 0 @pytest.mark.asyncio async def test_user_cannot_mutate_alias_dealias(userconfig): with Session() as sess: test_alias = 'testalias-b9f1ce136f584ca892d5fef3e78dd11d' with pytest.raises(BackendAPIError): sess.Image.aliasImage(test_alias, 'lua:5.1-alpine3.8') with pytest.raises(BackendAPIError): sess.Image.dealiasImage(test_alias)
30.2
77
0.65512
import pytest from ai.backend.client.exceptions import BackendAPIError from ai.backend.client.session import Session pytestmark = pytest.mark.integration @pytest.mark.asyncio async def test_list_images_by_admin(): with Session() as sess: images = sess.Image.list() image = images[0] assert len(images) > 0 assert 'name' in image assert 'tag' in image assert 'hash' in image @pytest.mark.asyncio async def test_list_images_by_user(userconfig): with Session() as sess: images = sess.Image.list() image = images[0] assert len(images) > 0 assert 'name' in image assert 'tag' in image assert 'hash' in image @pytest.mark.asyncio async def test_alias_dealias_image_by_admin(): with Session() as sess: def get_test_image_info(): items = sess.Image.list( fields=('name', 'registry', 'tag', 'aliases')) for item in items: if 'lua' in item['name'] and '5.1-alpine3.8' in item['tag']: return item img_info = get_test_image_info() test_alias = 'testalias-b9f1ce136f584ca892d5fef3e78dd11d' test_target = img_info['registry'] + '/' + img_info['name'] + ':' + \ img_info['tag'] sess.Image.aliasImage(test_alias, test_target) assert get_test_image_info()['aliases'] == [test_alias] sess.Image.dealiasImage(test_alias) assert len(get_test_image_info()['aliases']) == 0 @pytest.mark.asyncio async def test_user_cannot_mutate_alias_dealias(userconfig): with Session() as sess: test_alias = 'testalias-b9f1ce136f584ca892d5fef3e78dd11d' with pytest.raises(BackendAPIError): sess.Image.aliasImage(test_alias, 'lua:5.1-alpine3.8') with pytest.raises(BackendAPIError): sess.Image.dealiasImage(test_alias)
true
true
f71ce86c8da0066e9a06c434d7f057156813ebaa
1,012
bzl
Python
third_party/llvm/workspace.bzl
Georgeiva/tensorflow
1c0b85a0d49c27a5a5beec26c3e9ffceebb89652
[ "Apache-2.0" ]
2
2021-06-17T21:26:38.000Z
2021-06-20T18:25:57.000Z
third_party/llvm/workspace.bzl
craymichael/tensorflow
b5de565c9c57fa7ca02d42bcfe6f470ecf117ba5
[ "Apache-2.0" ]
null
null
null
third_party/llvm/workspace.bzl
craymichael/tensorflow
b5de565c9c57fa7ca02d42bcfe6f470ecf117ba5
[ "Apache-2.0" ]
null
null
null
"""Provides the repository macro to import LLVM.""" load("//third_party:repo.bzl", "tf_http_archive") def repo(name): """Imports LLVM.""" LLVM_COMMIT = "366df11a35392c946678f1af94038945c23f06c8" LLVM_SHA256 = "cd720387229e8ee74cc9d7d685a298c709fb2bdb2063301e509f40dacbdbaaea" tf_http_archive( name = name, sha256 = LLVM_SHA256, strip_prefix = "llvm-project-" + LLVM_COMMIT, urls = [ "https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), "https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), ], link_files = { "//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD", "//third_party/mlir:BUILD": "mlir/BUILD", "//third_party/mlir:test.BUILD": "mlir/test/BUILD", }, patch_file = "//third_party/llvm:disable_parallelism_in_verifier.patch", )
40.48
149
0.655138
load("//third_party:repo.bzl", "tf_http_archive") def repo(name): LLVM_COMMIT = "366df11a35392c946678f1af94038945c23f06c8" LLVM_SHA256 = "cd720387229e8ee74cc9d7d685a298c709fb2bdb2063301e509f40dacbdbaaea" tf_http_archive( name = name, sha256 = LLVM_SHA256, strip_prefix = "llvm-project-" + LLVM_COMMIT, urls = [ "https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), "https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT), ], link_files = { "//third_party/llvm:llvm.autogenerated.BUILD": "llvm/BUILD", "//third_party/mlir:BUILD": "mlir/BUILD", "//third_party/mlir:test.BUILD": "mlir/test/BUILD", }, patch_file = "//third_party/llvm:disable_parallelism_in_verifier.patch", )
true
true
f71ce9fb21782d5e8358c36fd801ad537ff4f7fd
4,469
py
Python
src/mockdown/instantiation/prolog/logic.py
MangoTeam/mockdown
6f42395b07a3a83d5a3703d30985ef5a5068bf09
[ "MIT" ]
null
null
null
src/mockdown/instantiation/prolog/logic.py
MangoTeam/mockdown
6f42395b07a3a83d5a3703d30985ef5a5068bf09
[ "MIT" ]
2
2022-01-13T03:52:58.000Z
2022-03-12T01:03:41.000Z
src/mockdown/instantiation/prolog/logic.py
MangoTeam/mockdown
6f42395b07a3a83d5a3703d30985ef5a5068bf09
[ "MIT" ]
null
null
null
import operator from importlib import resources from typing import List, Tuple, Generator from pyswip import Prolog # type: ignore from mockdown.constraint import ConstraintKind from mockdown.constraint.factory import ConstraintFactory from mockdown.model import Attribute, IView, IAnchor, AnchorID from mockdown.constraint import * from mockdown.types import NT def valid_constraints(root: IView[NT], visibilities: List[Tuple[IAnchor[NT], IAnchor[NT]]], debug: bool = True) \ -> Generator[IConstraint, None, None]: """ Computes the valid constraint pairs (or singletons) for various types of constraint. """ outfile = "debug.pl" # Note: Prolog is a singleton! prolog = Prolog() try: with open(outfile, 'w') as dbfile: # Load static terms/predicates. with resources.path(__package__, 'logic.pl') as path: prolog.consult(str(path)) # Add dynamic terms/predicates. prolog.dynamic('view/1') prolog.dynamic('parent/2') prolog.dynamic('visible/2') for view in root: prolog.assertz(f"view('{view.name}')") if debug: dbfile.write(f"view('{view.name}').\n") for child in view.children: prolog.assertz(f"parent('{view.name}', '{child.name}')") if debug: dbfile.write(f"parent('{view.name}', '{child.name}').\n") for vis in visibilities: [a1, a2] = vis a1_term = f"anchor('{a1.view.name}', '{a1.attribute.value}')" a2_term = f"anchor('{a2.view.name}', '{a2.attribute.value}')" prolog.assertz(f"visible({a1_term}, {a2_term})") if debug: dbfile.write(f"visible({a1_term}, {a2_term}).\n") # todo: Post-process output? Necessary? # ops = [operator.le, operator.ge, operator.eq] ops = [operator.eq] for answer in prolog.query("aspect_ratio_size(V)"): v, = [answer[k] for k in ('V',)] yield ConstraintFactory.create(kind=ConstraintKind.SIZE_ASPECT_RATIO, x_id=AnchorID(v, Attribute('height')), y_id=AnchorID(v, Attribute('width')), op=operator.eq) for answer in prolog.query("absolute_size(V, A)"): v, a = [answer[k] for k in ('V', 'A')] for op in ops: yield ConstraintFactory.create(kind=ConstraintKind.SIZE_CONSTANT, x_id=None, y_id=AnchorID(v, Attribute(a)), op=op) for answer in prolog.query("parent_relative_size(V, A, W, B)"): v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')] yield ConstraintFactory.create(kind=ConstraintKind.SIZE_RATIO, x_id=AnchorID(v, Attribute(a)), y_id=AnchorID(w, Attribute(b)), op=operator.eq) for answer in prolog.query("spacing(V, A, W, B)"): v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')] for op in ops: yield ConstraintFactory.create(kind=ConstraintKind.POS_LTRB_OFFSET, x_id=AnchorID(v, Attribute(a)), y_id=AnchorID(w, Attribute(b)), op=op) for answer in prolog.query("alignment(V, A, W, B)"): v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')] for op in ops: yield ConstraintFactory.create(kind=ConstraintKind.POS_LTRB_OFFSET, x_id=AnchorID(v, Attribute(a)), y_id=AnchorID(w, Attribute(b)), op=op) finally: # Cleanup dynamic predicates to avoid subsequent calls running in a # polluted Prolog namespace. prolog.retractall('view(_)') prolog.retractall('parent(_,_)') prolog.retractall('visible(_,_)') pass
44.247525
113
0.492504
import operator from importlib import resources from typing import List, Tuple, Generator from pyswip import Prolog from mockdown.constraint import ConstraintKind from mockdown.constraint.factory import ConstraintFactory from mockdown.model import Attribute, IView, IAnchor, AnchorID from mockdown.constraint import * from mockdown.types import NT def valid_constraints(root: IView[NT], visibilities: List[Tuple[IAnchor[NT], IAnchor[NT]]], debug: bool = True) \ -> Generator[IConstraint, None, None]: outfile = "debug.pl" prolog = Prolog() try: with open(outfile, 'w') as dbfile: with resources.path(__package__, 'logic.pl') as path: prolog.consult(str(path)) prolog.dynamic('view/1') prolog.dynamic('parent/2') prolog.dynamic('visible/2') for view in root: prolog.assertz(f"view('{view.name}')") if debug: dbfile.write(f"view('{view.name}').\n") for child in view.children: prolog.assertz(f"parent('{view.name}', '{child.name}')") if debug: dbfile.write(f"parent('{view.name}', '{child.name}').\n") for vis in visibilities: [a1, a2] = vis a1_term = f"anchor('{a1.view.name}', '{a1.attribute.value}')" a2_term = f"anchor('{a2.view.name}', '{a2.attribute.value}')" prolog.assertz(f"visible({a1_term}, {a2_term})") if debug: dbfile.write(f"visible({a1_term}, {a2_term}).\n") ops = [operator.eq] for answer in prolog.query("aspect_ratio_size(V)"): v, = [answer[k] for k in ('V',)] yield ConstraintFactory.create(kind=ConstraintKind.SIZE_ASPECT_RATIO, x_id=AnchorID(v, Attribute('height')), y_id=AnchorID(v, Attribute('width')), op=operator.eq) for answer in prolog.query("absolute_size(V, A)"): v, a = [answer[k] for k in ('V', 'A')] for op in ops: yield ConstraintFactory.create(kind=ConstraintKind.SIZE_CONSTANT, x_id=None, y_id=AnchorID(v, Attribute(a)), op=op) for answer in prolog.query("parent_relative_size(V, A, W, B)"): v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')] yield ConstraintFactory.create(kind=ConstraintKind.SIZE_RATIO, x_id=AnchorID(v, Attribute(a)), y_id=AnchorID(w, Attribute(b)), op=operator.eq) for answer in prolog.query("spacing(V, A, W, B)"): v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')] for op in ops: yield ConstraintFactory.create(kind=ConstraintKind.POS_LTRB_OFFSET, x_id=AnchorID(v, Attribute(a)), y_id=AnchorID(w, Attribute(b)), op=op) for answer in prolog.query("alignment(V, A, W, B)"): v, a, w, b = [answer[k] for k in ('V', 'A', 'W', 'B')] for op in ops: yield ConstraintFactory.create(kind=ConstraintKind.POS_LTRB_OFFSET, x_id=AnchorID(v, Attribute(a)), y_id=AnchorID(w, Attribute(b)), op=op) finally: prolog.retractall('view(_)') prolog.retractall('parent(_,_)') prolog.retractall('visible(_,_)') pass
true
true
f71cea51ce51965931a25dc006e4538270b7dc22
723
py
Python
QCodes/paliandromLinkedListStack.py
Rakeshgsekhar/DataStructure
8c7eb4ec02cdba7975b834180c0c66269595bd13
[ "MIT" ]
null
null
null
QCodes/paliandromLinkedListStack.py
Rakeshgsekhar/DataStructure
8c7eb4ec02cdba7975b834180c0c66269595bd13
[ "MIT" ]
null
null
null
QCodes/paliandromLinkedListStack.py
Rakeshgsekhar/DataStructure
8c7eb4ec02cdba7975b834180c0c66269595bd13
[ "MIT" ]
null
null
null
# Definition for singly-linked list. # class ListNode: # def __init__(self, val=0, next=None): # self.val = val # self.next = next class Solution: def isPalindrome(self, head: ListNode) : num = [] temp = head isPalin = True # if head is not None and head.next is None: # return True while temp is not None: num.append(temp.val) temp = temp.next while head is not None: stackVal = num.pop() if head.val == stackVal: isPalin = True else: isPalin = False break head = head.next return isPalin
27.807692
52
0.485477
class Solution: def isPalindrome(self, head: ListNode) : num = [] temp = head isPalin = True while temp is not None: num.append(temp.val) temp = temp.next while head is not None: stackVal = num.pop() if head.val == stackVal: isPalin = True else: isPalin = False break head = head.next return isPalin
true
true
f71ceadeedb16d3ad0d95606d9452ed423823bb7
6,060
py
Python
galaxy/corals_database/galaxy_integration/version_1909/lib/galaxy/model/orm/scripts.py
skitchen19/galaxy_tools
b935f36cfe430263564503ebb71f78dc79315acb
[ "MIT" ]
3
2017-04-05T18:01:59.000Z
2019-05-03T14:15:31.000Z
galaxy/corals_database/galaxy_integration/version_1909/lib/galaxy/model/orm/scripts.py
skitchen19/galaxy_tools
b935f36cfe430263564503ebb71f78dc79315acb
[ "MIT" ]
6
2019-02-27T15:45:58.000Z
2021-01-12T15:18:50.000Z
galaxy/corals_database/galaxy_integration/version_1909/lib/galaxy/model/orm/scripts.py
skitchen19/galaxy_tools
b935f36cfe430263564503ebb71f78dc79315acb
[ "MIT" ]
2
2018-10-26T18:36:39.000Z
2019-01-28T15:12:39.000Z
""" Code to support database helper scripts (create_db.py, manage_db.py, etc...). """ import argparse import logging import os import sys from migrate.versioning.shell import main as migrate_main from galaxy.util.path import get_ext from galaxy.util.properties import find_config_file, get_data_dir, load_app_properties from galaxy.util.script import populate_config_args log = logging.getLogger(__name__) DEFAULT_CONFIG_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'config', 'sample')) DEFAULT_CONFIG_NAMES = ['galaxy', 'universe_wsgi'] DEFAULT_CONFIG_PREFIX = '' DEFAULT_DATABASE = 'galaxy' DATABASE = { "galaxy": { 'repo': 'galaxy/model/migrate', 'default_sqlite_file': 'universe.sqlite', 'config_override': 'GALAXY_CONFIG_', }, "tools": { 'repo': 'tool_shed/galaxy_install/migrate', 'default_sqlite_file': 'universe.sqlite', 'config_override': 'GALAXY_CONFIG_', }, "tool_shed": { 'repo': 'galaxy/webapps/tool_shed/model/migrate', 'config_names': ['tool_shed', 'tool_shed_wsgi'], 'default_sqlite_file': 'community.sqlite', 'config_override': 'TOOL_SHED_CONFIG_', 'config_section': 'tool_shed', }, "install": { 'repo': 'galaxy/model/tool_shed_install/migrate', 'config_prefix': 'install_', 'default_sqlite_file': 'install.sqlite', 'config_override': 'GALAXY_INSTALL_CONFIG_', }, "corals": { 'repo': 'lib/galaxy/model/corals/migrate', 'config_prefix': 'corals', 'default_sqlite_file': './database/stag.sqlite', 'config_override': 'GALAXY_CORALS_CONFIG_', }, } def _read_model_arguments(argv, use_argparse=False): if use_argparse: parser = argparse.ArgumentParser() parser.add_argument('database', metavar='DATABASE', type=str, default="galaxy", nargs='?', help='database to target (galaxy, tool_shed, install)') populate_config_args(parser) args = parser.parse_args(argv[1:] if argv else []) return args.config_file, args.config_section, args.database else: config_file = None for arg in ["-c", "--config", "--config-file"]: if arg in argv: pos = argv.index(arg) argv.pop(pos) config_file = argv.pop(pos) config_section = None if "--config-section" in argv: pos = argv.index("--config-section") argv.pop(pos) config_section = argv.pop(pos) if argv and (argv[-1] in DATABASE): database = argv.pop() # database name tool_shed, galaxy, or install. else: database = 'galaxy' return config_file, config_section, database def get_config(argv, use_argparse=True, cwd=None): """ Read sys.argv and parse out repository of migrations and database url. >>> import os >>> from six.moves.configparser import SafeConfigParser >>> from shutil import rmtree >>> from tempfile import mkdtemp >>> config_dir = mkdtemp() >>> os.makedirs(os.path.join(config_dir, 'config')) >>> def write_ini(path, property, value): ... p = SafeConfigParser() ... p.add_section('app:main') ... p.set('app:main', property, value) ... with open(os.path.join(config_dir, 'config', path), 'w') as f: p.write(f) >>> write_ini('tool_shed.ini', 'database_connection', 'sqlite:///pg/testdb1') >>> config = get_config(['manage_db.py', 'tool_shed'], cwd=config_dir) >>> config['repo'].endswith('galaxy/webapps/tool_shed/model/migrate') True >>> config['db_url'] 'sqlite:///pg/testdb1' >>> write_ini('galaxy.ini', 'data_dir', '/moo') >>> config = get_config(['manage_db.py'], cwd=config_dir) >>> uri_with_env = os.getenv("GALAXY_TEST_DBURI", "sqlite:////moo/universe.sqlite?isolation_level=IMMEDIATE") >>> config['db_url'] == uri_with_env True >>> config['repo'].endswith('galaxy/model/migrate') True >>> rmtree(config_dir) """ config_file, config_section, database = _read_model_arguments(argv, use_argparse=use_argparse) database_defaults = DATABASE[database] if config_file is None: config_names = database_defaults.get('config_names', DEFAULT_CONFIG_NAMES) if cwd: cwd = [cwd, os.path.join(cwd, 'config')] else: cwd = [DEFAULT_CONFIG_DIR] config_file = find_config_file(config_names, dirs=cwd) repo = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, database_defaults['repo']) config_prefix = database_defaults.get('config_prefix', DEFAULT_CONFIG_PREFIX) config_override = database_defaults.get('config_override', 'GALAXY_CONFIG_') default_sqlite_file = database_defaults['default_sqlite_file'] if config_section is None: if not config_file or get_ext(config_file, ignore='sample') == 'yaml': config_section = database_defaults.get('config_section', None) else: # Just use the default found by load_app_properties. config_section = None properties = load_app_properties(config_file=config_file, config_prefix=config_override, config_section=config_section) if ("%sdatabase_connection" % config_prefix) in properties: db_url = properties["%sdatabase_connection" % config_prefix] else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % os.path.join(get_data_dir(properties), default_sqlite_file) return dict(db_url=db_url, repo=repo, config_file=config_file, database=database) def manage_db(): # Migrate has its own args, so cannot use argparse config = get_config(sys.argv, use_argparse=False, cwd=os.getcwd()) migrate_main(repository=config['repo'], url=config['db_url'])
39.350649
123
0.637129
import argparse import logging import os import sys from migrate.versioning.shell import main as migrate_main from galaxy.util.path import get_ext from galaxy.util.properties import find_config_file, get_data_dir, load_app_properties from galaxy.util.script import populate_config_args log = logging.getLogger(__name__) DEFAULT_CONFIG_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'config', 'sample')) DEFAULT_CONFIG_NAMES = ['galaxy', 'universe_wsgi'] DEFAULT_CONFIG_PREFIX = '' DEFAULT_DATABASE = 'galaxy' DATABASE = { "galaxy": { 'repo': 'galaxy/model/migrate', 'default_sqlite_file': 'universe.sqlite', 'config_override': 'GALAXY_CONFIG_', }, "tools": { 'repo': 'tool_shed/galaxy_install/migrate', 'default_sqlite_file': 'universe.sqlite', 'config_override': 'GALAXY_CONFIG_', }, "tool_shed": { 'repo': 'galaxy/webapps/tool_shed/model/migrate', 'config_names': ['tool_shed', 'tool_shed_wsgi'], 'default_sqlite_file': 'community.sqlite', 'config_override': 'TOOL_SHED_CONFIG_', 'config_section': 'tool_shed', }, "install": { 'repo': 'galaxy/model/tool_shed_install/migrate', 'config_prefix': 'install_', 'default_sqlite_file': 'install.sqlite', 'config_override': 'GALAXY_INSTALL_CONFIG_', }, "corals": { 'repo': 'lib/galaxy/model/corals/migrate', 'config_prefix': 'corals', 'default_sqlite_file': './database/stag.sqlite', 'config_override': 'GALAXY_CORALS_CONFIG_', }, } def _read_model_arguments(argv, use_argparse=False): if use_argparse: parser = argparse.ArgumentParser() parser.add_argument('database', metavar='DATABASE', type=str, default="galaxy", nargs='?', help='database to target (galaxy, tool_shed, install)') populate_config_args(parser) args = parser.parse_args(argv[1:] if argv else []) return args.config_file, args.config_section, args.database else: config_file = None for arg in ["-c", "--config", "--config-file"]: if arg in argv: pos = argv.index(arg) argv.pop(pos) config_file = argv.pop(pos) config_section = None if "--config-section" in argv: pos = argv.index("--config-section") argv.pop(pos) config_section = argv.pop(pos) if argv and (argv[-1] in DATABASE): database = argv.pop() else: database = 'galaxy' return config_file, config_section, database def get_config(argv, use_argparse=True, cwd=None): config_file, config_section, database = _read_model_arguments(argv, use_argparse=use_argparse) database_defaults = DATABASE[database] if config_file is None: config_names = database_defaults.get('config_names', DEFAULT_CONFIG_NAMES) if cwd: cwd = [cwd, os.path.join(cwd, 'config')] else: cwd = [DEFAULT_CONFIG_DIR] config_file = find_config_file(config_names, dirs=cwd) repo = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir, database_defaults['repo']) config_prefix = database_defaults.get('config_prefix', DEFAULT_CONFIG_PREFIX) config_override = database_defaults.get('config_override', 'GALAXY_CONFIG_') default_sqlite_file = database_defaults['default_sqlite_file'] if config_section is None: if not config_file or get_ext(config_file, ignore='sample') == 'yaml': config_section = database_defaults.get('config_section', None) else: config_section = None properties = load_app_properties(config_file=config_file, config_prefix=config_override, config_section=config_section) if ("%sdatabase_connection" % config_prefix) in properties: db_url = properties["%sdatabase_connection" % config_prefix] else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % os.path.join(get_data_dir(properties), default_sqlite_file) return dict(db_url=db_url, repo=repo, config_file=config_file, database=database) def manage_db(): config = get_config(sys.argv, use_argparse=False, cwd=os.getcwd()) migrate_main(repository=config['repo'], url=config['db_url'])
true
true
f71ceba5ece68ddf805973aced707d2db383d881
2,136
py
Python
test/IDL/IDLSUFFIXES.py
andrewyoung1991/scons
7517c277e23bc04e3809a9bf0793cdfe00097a58
[ "MIT" ]
1
2015-11-04T22:22:10.000Z
2015-11-04T22:22:10.000Z
test/IDL/IDLSUFFIXES.py
azverkan/scons
704ddb9270e14c7771d0c58c04c7afa7bc009603
[ "MIT" ]
null
null
null
test/IDL/IDLSUFFIXES.py
azverkan/scons
704ddb9270e14c7771d0c58c04c7afa7bc009603
[ "MIT" ]
null
null
null
#!/usr/bin/env python # # __COPYRIGHT__ # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" """ Test that we can add filesuffixes to $IDLSUFFIXES. """ import TestSCons test = TestSCons.TestSCons() test.write('SConstruct', """ import SCons.Scanner.IDL env = Environment(CPPPATH=['.']) env.Append(SCANNERS = [ SCons.Scanner.IDL.IDLScan() ], IDLSUFFIXES = ['.x']) env.InstallAs('foo_idl', 'foo.idl') env.InstallAs('foo_x', 'foo.x') """) test.write('foo.idl', """\ import <foo.h> """) test.write('foo.x', """\ #include <foo.h> """) test.write('foo.h', "foo.h 1\n") test.run(arguments='.', stdout=test.wrap_stdout("""\ Install file: "foo.idl" as "foo_idl" Install file: "foo.x" as "foo_x" """)) test.up_to_date(arguments='.') test.write('foo.h', "foo.h 2\n") test.run(arguments='.', stdout=test.wrap_stdout("""\ Install file: "foo.idl" as "foo_idl" Install file: "foo.x" as "foo_x" """)) test.up_to_date(arguments='.') test.pass_test() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
27.74026
73
0.719101
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__" import TestSCons test = TestSCons.TestSCons() test.write('SConstruct', """ import SCons.Scanner.IDL env = Environment(CPPPATH=['.']) env.Append(SCANNERS = [ SCons.Scanner.IDL.IDLScan() ], IDLSUFFIXES = ['.x']) env.InstallAs('foo_idl', 'foo.idl') env.InstallAs('foo_x', 'foo.x') """) test.write('foo.idl', """\ import <foo.h> """) test.write('foo.x', """\ #include <foo.h> """) test.write('foo.h', "foo.h 1\n") test.run(arguments='.', stdout=test.wrap_stdout("""\ Install file: "foo.idl" as "foo_idl" Install file: "foo.x" as "foo_x" """)) test.up_to_date(arguments='.') test.write('foo.h', "foo.h 2\n") test.run(arguments='.', stdout=test.wrap_stdout("""\ Install file: "foo.idl" as "foo_idl" Install file: "foo.x" as "foo_x" """)) test.up_to_date(arguments='.') test.pass_test()
true
true
f71cec55b1240dbdb58f8cf34afefd358b086e1e
4,909
py
Python
src/cicflowmeter/flow_session.py
ZhuMon/cicflowmeter
ca31b0d4461754f421610a5ce75eb8914753d74e
[ "MIT" ]
null
null
null
src/cicflowmeter/flow_session.py
ZhuMon/cicflowmeter
ca31b0d4461754f421610a5ce75eb8914753d74e
[ "MIT" ]
null
null
null
src/cicflowmeter/flow_session.py
ZhuMon/cicflowmeter
ca31b0d4461754f421610a5ce75eb8914753d74e
[ "MIT" ]
null
null
null
import csv import time from collections import defaultdict from scapy.sessions import DefaultSession from scapy.all import wrpcap from .features.context.packet_direction import PacketDirection from .features.context.packet_flow_key import get_packet_flow_key from .flow import Flow EXPIRED_UPDATE = 40 MACHINE_LEARNING_API = "http://localhost:8000/predict" GARBAGE_COLLECT_PACKETS = 100 class FlowSession(DefaultSession): """Creates a list of network flows.""" def __init__(self, *args, **kwargs): self.flows = {} self.csv_line = 0 if self.output_mode == "flow": output = open(self.output_file, "w") self.csv_writer = csv.writer(output) self.pcap_file = self.output_file[:-4]+'.pcap' self.packets_count = 0 self.clumped_flows_per_label = defaultdict(list) super(FlowSession, self).__init__(*args, **kwargs) def toPacketList(self): # Sniffer finished all the packets it needed to sniff. # It is not a good place for this, we need to somehow define a finish signal for AsyncSniffer self.garbage_collect(None) return super(FlowSession, self).toPacketList() def on_packet_received(self, packet): count = 0 direction = PacketDirection.FORWARD if self.output_mode != "flow": if "TCP" not in packet: return elif "UDP" not in packet: return try: # Creates a key variable to check packet_flow_key = get_packet_flow_key(packet, direction) flow = self.flows.get((packet_flow_key, count)) except Exception: return self.packets_count += 1 wrpcap(self.pcap_file, packet, append=True) # If there is no forward flow with a count of 0 if flow is None: # There might be one of it in reverse direction = PacketDirection.REVERSE packet_flow_key = get_packet_flow_key(packet, direction) flow = self.flows.get((packet_flow_key, count)) if flow is None: # If no flow exists create a new flow direction = PacketDirection.FORWARD flow = Flow(packet, direction) packet_flow_key = get_packet_flow_key(packet, direction) self.flows[(packet_flow_key, count)] = flow elif (packet.time - flow.latest_timestamp) > EXPIRED_UPDATE: # If the packet exists in the flow but the packet is sent # after too much of a delay than it is a part of a new flow. expired = EXPIRED_UPDATE while (packet.time - flow.latest_timestamp) > expired: count += 1 expired += EXPIRED_UPDATE flow = self.flows.get((packet_flow_key, count)) if flow is None: flow = Flow(packet, direction) self.flows[(packet_flow_key, count)] = flow break elif "TCP" in packet and "F" in str(packet.flags): # If it has FIN flag then early collect flow and continue flow.add_packet(packet, direction) self.garbage_collect(packet.time) return flow.add_packet(packet, direction) if not self.url_model: GARBAGE_COLLECT_PACKETS = 10000 if self.packets_count % GARBAGE_COLLECT_PACKETS == 0 or ( flow.duration > 120 and self.output_mode == "flow" ): self.garbage_collect(packet.time) def get_flows(self) -> list: return self.flows.values() def garbage_collect(self, latest_time) -> None: localtime = time.asctime( time.localtime(time.time()) ) print(localtime, latest_time) # TODO: Garbage Collection / Feature Extraction should have a separate thread if not self.url_model: print("Garbage Collection Began. Flows = {}".format(len(self.flows))) keys = list(self.flows.keys()) for k in keys: flow = self.flows.get(k) if ( latest_time is None or latest_time - flow.latest_timestamp > EXPIRED_UPDATE or flow.duration > 90 ): data = flow.get_data() if self.csv_line == 0: self.csv_writer.writerow(data.keys()) self.csv_writer.writerow(data.values()) self.csv_line += 1 del self.flows[k] if not self.url_model: print("Garbage Collection Finished. Flows = {}".format(len(self.flows))) def generate_session_class(output_mode, output_file, url_model): return type( "NewFlowSession", (FlowSession,), { "output_mode": output_mode, "output_file": output_file, "url_model": url_model, }, )
33.855172
101
0.598696
import csv import time from collections import defaultdict from scapy.sessions import DefaultSession from scapy.all import wrpcap from .features.context.packet_direction import PacketDirection from .features.context.packet_flow_key import get_packet_flow_key from .flow import Flow EXPIRED_UPDATE = 40 MACHINE_LEARNING_API = "http://localhost:8000/predict" GARBAGE_COLLECT_PACKETS = 100 class FlowSession(DefaultSession): def __init__(self, *args, **kwargs): self.flows = {} self.csv_line = 0 if self.output_mode == "flow": output = open(self.output_file, "w") self.csv_writer = csv.writer(output) self.pcap_file = self.output_file[:-4]+'.pcap' self.packets_count = 0 self.clumped_flows_per_label = defaultdict(list) super(FlowSession, self).__init__(*args, **kwargs) def toPacketList(self): self.garbage_collect(None) return super(FlowSession, self).toPacketList() def on_packet_received(self, packet): count = 0 direction = PacketDirection.FORWARD if self.output_mode != "flow": if "TCP" not in packet: return elif "UDP" not in packet: return try: packet_flow_key = get_packet_flow_key(packet, direction) flow = self.flows.get((packet_flow_key, count)) except Exception: return self.packets_count += 1 wrpcap(self.pcap_file, packet, append=True) if flow is None: direction = PacketDirection.REVERSE packet_flow_key = get_packet_flow_key(packet, direction) flow = self.flows.get((packet_flow_key, count)) if flow is None: direction = PacketDirection.FORWARD flow = Flow(packet, direction) packet_flow_key = get_packet_flow_key(packet, direction) self.flows[(packet_flow_key, count)] = flow elif (packet.time - flow.latest_timestamp) > EXPIRED_UPDATE: expired = EXPIRED_UPDATE while (packet.time - flow.latest_timestamp) > expired: count += 1 expired += EXPIRED_UPDATE flow = self.flows.get((packet_flow_key, count)) if flow is None: flow = Flow(packet, direction) self.flows[(packet_flow_key, count)] = flow break elif "TCP" in packet and "F" in str(packet.flags): flow.add_packet(packet, direction) self.garbage_collect(packet.time) return flow.add_packet(packet, direction) if not self.url_model: GARBAGE_COLLECT_PACKETS = 10000 if self.packets_count % GARBAGE_COLLECT_PACKETS == 0 or ( flow.duration > 120 and self.output_mode == "flow" ): self.garbage_collect(packet.time) def get_flows(self) -> list: return self.flows.values() def garbage_collect(self, latest_time) -> None: localtime = time.asctime( time.localtime(time.time()) ) print(localtime, latest_time) if not self.url_model: print("Garbage Collection Began. Flows = {}".format(len(self.flows))) keys = list(self.flows.keys()) for k in keys: flow = self.flows.get(k) if ( latest_time is None or latest_time - flow.latest_timestamp > EXPIRED_UPDATE or flow.duration > 90 ): data = flow.get_data() if self.csv_line == 0: self.csv_writer.writerow(data.keys()) self.csv_writer.writerow(data.values()) self.csv_line += 1 del self.flows[k] if not self.url_model: print("Garbage Collection Finished. Flows = {}".format(len(self.flows))) def generate_session_class(output_mode, output_file, url_model): return type( "NewFlowSession", (FlowSession,), { "output_mode": output_mode, "output_file": output_file, "url_model": url_model, }, )
true
true
f71cec880a8d2569d0b6f4835d13dbc7f5354126
47,163
py
Python
python/ccxt/binance.py
ivankia/phpbot
78e68a748171b68408f86a436188baafd71135bc
[ "MIT" ]
13
2019-01-26T14:41:37.000Z
2022-03-26T03:33:12.000Z
python/ccxt/binance.py
tiancai110a/ccxt
2521001c6c3ba4078b580b692f3256319198b10a
[ "MIT" ]
null
null
null
python/ccxt/binance.py
tiancai110a/ccxt
2521001c6c3ba4078b580b692f3256319198b10a
[ "MIT" ]
12
2018-12-24T02:19:02.000Z
2022-03-26T05:04:25.000Z
# -*- coding: utf-8 -*- # PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN: # https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code from ccxt.base.exchange import Exchange import math import json from ccxt.base.errors import ExchangeError from ccxt.base.errors import AuthenticationError from ccxt.base.errors import ArgumentsRequired from ccxt.base.errors import InsufficientFunds from ccxt.base.errors import InvalidOrder from ccxt.base.errors import OrderNotFound from ccxt.base.errors import DDoSProtection from ccxt.base.errors import ExchangeNotAvailable from ccxt.base.errors import InvalidNonce class binance (Exchange): def describe(self): return self.deep_extend(super(binance, self).describe(), { 'id': 'binance', 'name': 'Binance', 'countries': ['JP'], # Japan 'rateLimit': 500, 'certified': True, # new metainfo interface 'has': { 'fetchDepositAddress': True, 'CORS': False, 'fetchBidsAsks': True, 'fetchTickers': True, 'fetchOHLCV': True, 'fetchMyTrades': True, 'fetchOrder': True, 'fetchOrders': True, 'fetchOpenOrders': True, 'fetchClosedOrders': True, 'withdraw': True, 'fetchFundingFees': True, 'fetchDeposits': True, 'fetchWithdrawals': True, 'fetchTransactions': False, }, 'timeframes': { '1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M', }, 'urls': { 'logo': 'https://user-images.githubusercontent.com/1294454/29604020-d5483cdc-87ee-11e7-94c7-d1a8d9169293.jpg', 'api': { 'web': 'https://www.binance.com', 'wapi': 'https://api.binance.com/wapi/v3', 'public': 'https://api.binance.com/api/v1', 'private': 'https://api.binance.com/api/v3', 'v3': 'https://api.binance.com/api/v3', 'v1': 'https://api.binance.com/api/v1', }, 'www': 'https://www.binance.com', 'referral': 'https://www.binance.com/?ref=10205187', 'doc': 'https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md', 'fees': 'https://www.binance.com/en/fee/schedule', }, 'api': { 'web': { 'get': [ 'exchange/public/product', 'assetWithdraw/getAllAsset.html', ], }, 'wapi': { 'post': [ 'withdraw', ], 'get': [ 'depositHistory', 'withdrawHistory', 'depositAddress', 'accountStatus', 'systemStatus', 'userAssetDribbletLog', 'tradeFee', 'assetDetail', ], }, 'v3': { 'get': [ 'ticker/price', 'ticker/bookTicker', ], }, 'public': { 'get': [ 'exchangeInfo', 'ping', 'time', 'depth', 'aggTrades', 'klines', 'ticker/24hr', 'ticker/allPrices', 'ticker/allBookTickers', 'ticker/price', 'ticker/bookTicker', 'exchangeInfo', ], 'put': ['userDataStream'], 'post': ['userDataStream'], 'delete': ['userDataStream'], }, 'private': { 'get': [ 'order', 'openOrders', 'allOrders', 'account', 'myTrades', ], 'post': [ 'order', 'order/test', ], 'delete': [ 'order', ], }, }, 'fees': { 'trading': { 'tierBased': False, 'percentage': True, 'taker': 0.001, 'maker': 0.001, }, # should be deleted, these are outdated and inaccurate 'funding': { 'tierBased': False, 'percentage': False, 'withdraw': { 'ADA': 1.0, 'ADX': 4.7, 'AION': 1.9, 'AMB': 11.4, 'APPC': 6.5, 'ARK': 0.1, 'ARN': 3.1, 'AST': 10.0, 'BAT': 18.0, 'BCD': 1.0, 'BCH': 0.001, 'BCPT': 10.2, 'BCX': 1.0, 'BNB': 0.7, 'BNT': 1.5, 'BQX': 1.6, 'BRD': 6.4, 'BTC': 0.001, 'BTG': 0.001, 'BTM': 5.0, 'BTS': 1.0, 'CDT': 67.0, 'CMT': 37.0, 'CND': 47.0, 'CTR': 5.4, 'DASH': 0.002, 'DGD': 0.06, 'DLT': 11.7, 'DNT': 51.0, 'EDO': 2.5, 'ELF': 6.5, 'ENG': 2.1, 'ENJ': 42.0, 'EOS': 1.0, 'ETC': 0.01, 'ETF': 1.0, 'ETH': 0.01, 'EVX': 2.5, 'FUEL': 45.0, 'FUN': 85.0, 'GAS': 0, 'GTO': 20.0, 'GVT': 0.53, 'GXS': 0.3, 'HCC': 0.0005, 'HSR': 0.0001, 'ICN': 3.5, 'ICX': 1.3, 'INS': 1.5, 'IOTA': 0.5, 'KMD': 0.002, 'KNC': 2.6, 'LEND': 54.0, 'LINK': 12.8, 'LLT': 54.0, 'LRC': 9.1, 'LSK': 0.1, 'LTC': 0.01, 'LUN': 0.29, 'MANA': 74.0, 'MCO': 0.86, 'MDA': 4.7, 'MOD': 2.0, 'MTH': 34.0, 'MTL': 1.9, 'NAV': 0.2, 'NEBL': 0.01, 'NEO': 0.0, 'NULS': 2.1, 'OAX': 8.3, 'OMG': 0.57, 'OST': 17.0, 'POE': 88.0, 'POWR': 8.6, 'PPT': 0.25, 'QSP': 21.0, 'QTUM': 0.01, 'RCN': 35.0, 'RDN': 2.2, 'REQ': 18.1, 'RLC': 4.1, 'SALT': 1.3, 'SBTC': 1.0, 'SNGLS': 42, 'SNM': 29.0, 'SNT': 32.0, 'STORJ': 5.9, 'STRAT': 0.1, 'SUB': 7.4, 'TNB': 82.0, 'TNT': 47.0, 'TRIG': 6.7, 'TRX': 129.0, 'USDT': 23.0, 'VEN': 1.8, 'VIB': 28.0, 'VIBE': 7.2, 'WABI': 3.5, 'WAVES': 0.002, 'WINGS': 9.3, 'WTC': 0.5, 'XLM': 0.01, 'XMR': 0.04, 'XRP': 0.25, 'XVG': 0.1, 'XZC': 0.02, 'YOYOW': 39.0, 'ZEC': 0.005, 'ZRX': 5.7, }, 'deposit': {}, }, }, 'commonCurrencies': { 'YOYO': 'YOYOW', 'BCC': 'BCH', }, # exchange-specific options 'options': { 'defaultTimeInForce': 'GTC', # 'GTC' = Good To Cancel(default), 'IOC' = Immediate Or Cancel 'defaultLimitOrderType': 'limit', # or 'limit_maker' 'hasAlreadyAuthenticatedSuccessfully': False, 'warnOnFetchOpenOrdersWithoutSymbol': True, 'recvWindow': 5 * 1000, # 5 sec, binance default 'timeDifference': 0, # the difference between system clock and Binance clock 'adjustForTimeDifference': False, # controls the adjustment logic upon instantiation 'parseOrderToPrecision': False, # force amounts and costs in parseOrder to precision 'newOrderRespType': 'RESULT', # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills }, 'exceptions': { '-1000': ExchangeNotAvailable, # {"code":-1000,"msg":"An unknown error occured while processing the request."} '-1013': InvalidOrder, # createOrder -> 'invalid quantity'/'invalid price'/MIN_NOTIONAL '-1021': InvalidNonce, # 'your time is ahead of server' '-1022': AuthenticationError, # {"code":-1022,"msg":"Signature for self request is not valid."} '-1100': InvalidOrder, # createOrder(symbol, 1, asdf) -> 'Illegal characters found in parameter 'price' '-1104': ExchangeError, # Not all sent parameters were read, read 8 parameters but was sent 9 '-1128': ExchangeError, # {"code":-1128,"msg":"Combination of optional parameters invalid."} '-2010': ExchangeError, # generic error code for createOrder -> 'Account has insufficient balance for requested action.', {"code":-2010,"msg":"Rest API trading is not enabled."}, etc... '-2011': OrderNotFound, # cancelOrder(1, 'BTC/USDT') -> 'UNKNOWN_ORDER' '-2013': OrderNotFound, # fetchOrder(1, 'BTC/USDT') -> 'Order does not exist' '-2014': AuthenticationError, # {"code":-2014, "msg": "API-key format invalid."} '-2015': AuthenticationError, # "Invalid API-key, IP, or permissions for action." }, }) def nonce(self): return self.milliseconds() - self.options['timeDifference'] def load_time_difference(self): response = self.publicGetTime() after = self.milliseconds() self.options['timeDifference'] = int(after - response['serverTime']) return self.options['timeDifference'] def fetch_markets(self): response = self.publicGetExchangeInfo() if self.options['adjustForTimeDifference']: self.load_time_difference() markets = response['symbols'] result = [] for i in range(0, len(markets)): market = markets[i] id = market['symbol'] # "123456" is a "test symbol/market" if id == '123456': continue baseId = market['baseAsset'] quoteId = market['quoteAsset'] base = self.common_currency_code(baseId) quote = self.common_currency_code(quoteId) symbol = base + '/' + quote filters = self.index_by(market['filters'], 'filterType') precision = { 'base': market['baseAssetPrecision'], 'quote': market['quotePrecision'], 'amount': market['baseAssetPrecision'], 'price': market['quotePrecision'], } active = (market['status'] == 'TRADING') entry = { 'id': id, 'symbol': symbol, 'base': base, 'quote': quote, 'baseId': baseId, 'quoteId': quoteId, 'info': market, 'active': active, 'precision': precision, 'limits': { 'amount': { 'min': math.pow(10, -precision['amount']), 'max': None, }, 'price': { 'min': math.pow(10, -precision['price']), 'max': None, }, 'cost': { 'min': -1 * math.log10(precision['amount']), 'max': None, }, }, } if 'PRICE_FILTER' in filters: filter = filters['PRICE_FILTER'] entry['precision']['price'] = self.precision_from_string(filter['tickSize']) entry['limits']['price'] = { 'min': self.safe_float(filter, 'minPrice'), 'max': self.safe_float(filter, 'maxPrice'), } if 'LOT_SIZE' in filters: filter = filters['LOT_SIZE'] entry['precision']['amount'] = self.precision_from_string(filter['stepSize']) entry['limits']['amount'] = { 'min': self.safe_float(filter, 'minQty'), 'max': self.safe_float(filter, 'maxQty'), } if 'MIN_NOTIONAL' in filters: entry['limits']['cost']['min'] = float(filters['MIN_NOTIONAL']['minNotional']) result.append(entry) return result def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}): market = self.markets[symbol] key = 'quote' rate = market[takerOrMaker] cost = float(self.cost_to_precision(symbol, amount * rate)) if side == 'sell': cost *= price else: key = 'base' return { 'type': takerOrMaker, 'currency': market[key], 'rate': rate, 'cost': float(self.fee_to_precision(symbol, cost)), } def fetch_balance(self, params={}): self.load_markets() response = self.privateGetAccount(params) result = {'info': response} balances = response['balances'] for i in range(0, len(balances)): balance = balances[i] currency = balance['asset'] if currency in self.currencies_by_id: currency = self.currencies_by_id[currency]['code'] account = { 'free': float(balance['free']), 'used': float(balance['locked']), 'total': 0.0, } account['total'] = self.sum(account['free'], account['used']) result[currency] = account return self.parse_balance(result) def fetch_order_book(self, symbol, limit=None, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } if limit is not None: request['limit'] = limit # default = maximum = 100 response = self.publicGetDepth(self.extend(request, params)) orderbook = self.parse_order_book(response) orderbook['nonce'] = self.safe_integer(response, 'lastUpdateId') return orderbook def parse_ticker(self, ticker, market=None): timestamp = self.safe_integer(ticker, 'closeTime') iso8601 = None if (timestamp is None) else self.iso8601(timestamp) symbol = self.find_symbol(self.safe_string(ticker, 'symbol'), market) last = self.safe_float(ticker, 'lastPrice') return { 'symbol': symbol, 'timestamp': timestamp, 'datetime': iso8601, 'high': self.safe_float(ticker, 'highPrice'), 'low': self.safe_float(ticker, 'lowPrice'), 'bid': self.safe_float(ticker, 'bidPrice'), 'bidVolume': self.safe_float(ticker, 'bidQty'), 'ask': self.safe_float(ticker, 'askPrice'), 'askVolume': self.safe_float(ticker, 'askQty'), 'vwap': self.safe_float(ticker, 'weightedAvgPrice'), 'open': self.safe_float(ticker, 'openPrice'), 'close': last, 'last': last, 'previousClose': self.safe_float(ticker, 'prevClosePrice'), # previous day close 'change': self.safe_float(ticker, 'priceChange'), 'percentage': self.safe_float(ticker, 'priceChangePercent'), 'average': None, 'baseVolume': self.safe_float(ticker, 'volume'), 'quoteVolume': self.safe_float(ticker, 'quoteVolume'), 'info': ticker, } def fetch_ticker(self, symbol, params={}): self.load_markets() market = self.market(symbol) response = self.publicGetTicker24hr(self.extend({ 'symbol': market['id'], }, params)) return self.parse_ticker(response, market) def parse_tickers(self, rawTickers, symbols=None): tickers = [] for i in range(0, len(rawTickers)): tickers.append(self.parse_ticker(rawTickers[i])) return self.filter_by_array(tickers, 'symbol', symbols) def fetch_bids_asks(self, symbols=None, params={}): self.load_markets() rawTickers = self.publicGetTickerBookTicker(params) return self.parse_tickers(rawTickers, symbols) def fetch_tickers(self, symbols=None, params={}): self.load_markets() rawTickers = self.publicGetTicker24hr(params) return self.parse_tickers(rawTickers, symbols) def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None): return [ ohlcv[0], float(ohlcv[1]), float(ohlcv[2]), float(ohlcv[3]), float(ohlcv[4]), float(ohlcv[5]), ] def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], 'interval': self.timeframes[timeframe], } if since is not None: request['startTime'] = since if limit is not None: request['limit'] = limit # default == max == 500 response = self.publicGetKlines(self.extend(request, params)) return self.parse_ohlcvs(response, market, timeframe, since, limit) def parse_trade(self, trade, market=None): timestampField = 'T' if ('T' in list(trade.keys())) else 'time' timestamp = self.safe_integer(trade, timestampField) priceField = 'p' if ('p' in list(trade.keys())) else 'price' price = self.safe_float(trade, priceField) amountField = 'q' if ('q' in list(trade.keys())) else 'qty' amount = self.safe_float(trade, amountField) idField = 'a' if ('a' in list(trade.keys())) else 'id' id = self.safe_string(trade, idField) side = None order = None if 'orderId' in trade: order = self.safe_string(trade, 'orderId') if 'm' in trade: side = 'sell' if trade['m'] else 'buy' # self is reversed intentionally else: if 'isBuyer' in trade: side = 'buy' if (trade['isBuyer']) else 'sell' # self is a True side fee = None if 'commission' in trade: fee = { 'cost': self.safe_float(trade, 'commission'), 'currency': self.common_currency_code(trade['commissionAsset']), } takerOrMaker = None if 'isMaker' in trade: takerOrMaker = 'maker' if trade['isMaker'] else 'taker' symbol = None if market is None: marketId = self.safe_string(trade, 'symbol') market = self.safe_value(self.markets_by_id, marketId) if market is not None: symbol = market['symbol'] return { 'info': trade, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'symbol': symbol, 'id': id, 'order': order, 'type': None, 'takerOrMaker': takerOrMaker, 'side': side, 'price': price, 'cost': price * amount, 'amount': amount, 'fee': fee, } def fetch_trades(self, symbol, since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } if since is not None: request['startTime'] = since request['endTime'] = self.sum(since, 3600000) if limit is not None: request['limit'] = limit # 'fromId': 123, # ID to get aggregate trades from INCLUSIVE. # 'startTime': 456, # Timestamp in ms to get aggregate trades from INCLUSIVE. # 'endTime': 789, # Timestamp in ms to get aggregate trades until INCLUSIVE. # 'limit': 500, # default = 500, maximum = 1000 # # Caveats: # - default limit(500) applies only if no other parameters set, trades up # to the maximum limit may be returned to satisfy other parameters # - if both limit and time window is set and time window contains more # trades than the limit then the last trades from the window are returned # - 'tradeId' accepted and returned by self method is "aggregate" trade id # which is different from actual trade id # - setting both fromId and time window results in error response = self.publicGetAggTrades(self.extend(request, params)) return self.parse_trades(response, market, since, limit) def parse_order_status(self, status): statuses = { 'NEW': 'open', 'PARTIALLY_FILLED': 'open', 'FILLED': 'closed', 'CANCELED': 'canceled', } return statuses[status] if (status in list(statuses.keys())) else status def parse_order(self, order, market=None): status = self.parse_order_status(self.safe_string(order, 'status')) symbol = self.find_symbol(self.safe_string(order, 'symbol'), market) timestamp = None if 'time' in order: timestamp = order['time'] elif 'transactTime' in order: timestamp = order['transactTime'] price = self.safe_float(order, 'price') amount = self.safe_float(order, 'origQty') filled = self.safe_float(order, 'executedQty') remaining = None cost = self.safe_float(order, 'cummulativeQuoteQty') if filled is not None: if amount is not None: remaining = amount - filled if self.options['parseOrderToPrecision']: remaining = float(self.amount_to_precision(symbol, remaining)) remaining = max(remaining, 0.0) if price is not None: if cost is None: cost = price * filled id = self.safe_string(order, 'orderId') type = self.safe_string(order, 'type') if type is not None: type = type.lower() if type == 'market': if price == 0.0: if (cost is not None) and(filled is not None): if (cost > 0) and(filled > 0): price = cost / filled side = self.safe_string(order, 'side') if side is not None: side = side.lower() fee = None trades = None fills = self.safe_value(order, 'fills') if fills is not None: trades = self.parse_trades(fills, market) numTrades = len(trades) if numTrades > 0: cost = trades[0]['cost'] fee = { 'cost': trades[0]['fee']['cost'], 'currency': trades[0]['fee']['currency'], } for i in range(1, len(trades)): cost = self.sum(cost, trades[i]['cost']) fee['cost'] = self.sum(fee['cost'], trades[i]['fee']['cost']) average = None if cost is not None: if filled: average = cost / filled if self.options['parseOrderToPrecision']: cost = float(self.cost_to_precision(symbol, cost)) result = { 'info': order, 'id': id, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'lastTradeTimestamp': None, 'symbol': symbol, 'type': type, 'side': side, 'price': price, 'amount': amount, 'cost': cost, 'average': average, 'filled': filled, 'remaining': remaining, 'status': status, 'fee': fee, 'trades': trades, } return result def create_order(self, symbol, type, side, amount, price=None, params={}): self.load_markets() market = self.market(symbol) # the next 5 lines are added to support for testing orders method = 'privatePostOrder' test = self.safe_value(params, 'test', False) if test: method += 'Test' params = self.omit(params, 'test') uppercaseType = type.upper() order = { 'symbol': market['id'], 'quantity': self.amount_to_precision(symbol, amount), 'type': uppercaseType, 'side': side.upper(), 'newOrderRespType': self.options['newOrderRespType'], # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills } timeInForceIsRequired = False priceIsRequired = False stopPriceIsRequired = False if uppercaseType == 'LIMIT': priceIsRequired = True timeInForceIsRequired = True elif (uppercaseType == 'STOP_LOSS') or (uppercaseType == 'TAKE_PROFIT'): stopPriceIsRequired = True elif (uppercaseType == 'STOP_LOSS_LIMIT') or (uppercaseType == 'TAKE_PROFIT_LIMIT'): stopPriceIsRequired = True priceIsRequired = True timeInForceIsRequired = True elif uppercaseType == 'LIMIT_MAKER': priceIsRequired = True if priceIsRequired: if price is None: raise InvalidOrder(self.id + ' createOrder method requires a price argument for a ' + type + ' order') order['price'] = self.price_to_precision(symbol, price) if timeInForceIsRequired: order['timeInForce'] = self.options['defaultTimeInForce'] # 'GTC' = Good To Cancel(default), 'IOC' = Immediate Or Cancel if stopPriceIsRequired: stopPrice = self.safe_float(params, 'stopPrice') if stopPrice is None: raise InvalidOrder(self.id + ' createOrder method requires a stopPrice extra param for a ' + type + ' order') else: order['stopPrice'] = self.price_to_precision(symbol, stopPrice) response = getattr(self, method)(self.extend(order, params)) return self.parse_order(response, market) def fetch_order(self, id, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchOrder requires a symbol argument') self.load_markets() market = self.market(symbol) origClientOrderId = self.safe_value(params, 'origClientOrderId') request = { 'symbol': market['id'], } if origClientOrderId is not None: request['origClientOrderId'] = origClientOrderId else: request['orderId'] = int(id) response = self.privateGetOrder(self.extend(request, params)) return self.parse_order(response, market) def fetch_orders(self, symbol=None, since=None, limit=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchOrders requires a symbol argument') self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } if limit is not None: request['limit'] = limit response = self.privateGetAllOrders(self.extend(request, params)) # # [ # { # "symbol": "LTCBTC", # "orderId": 1, # "clientOrderId": "myOrder1", # "price": "0.1", # "origQty": "1.0", # "executedQty": "0.0", # "cummulativeQuoteQty": "0.0", # "status": "NEW", # "timeInForce": "GTC", # "type": "LIMIT", # "side": "BUY", # "stopPrice": "0.0", # "icebergQty": "0.0", # "time": 1499827319559, # "updateTime": 1499827319559, # "isWorking": True # } # ] # return self.parse_orders(response, market, since, limit) def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}): self.load_markets() market = None request = {} if symbol is not None: market = self.market(symbol) request['symbol'] = market['id'] elif self.options['warnOnFetchOpenOrdersWithoutSymbol']: symbols = self.symbols numSymbols = len(symbols) fetchOpenOrdersRateLimit = int(numSymbols / 2) raise ExchangeError(self.id + ' fetchOpenOrders WARNING: fetching open orders without specifying a symbol is rate-limited to one call per ' + str(fetchOpenOrdersRateLimit) + ' seconds. Do not call self method frequently to avoid ban. Set ' + self.id + '.options["warnOnFetchOpenOrdersWithoutSymbol"] = False to suppress self warning message.') response = self.privateGetOpenOrders(self.extend(request, params)) return self.parse_orders(response, market, since, limit) def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}): orders = self.fetch_orders(symbol, since, limit, params) return self.filter_by(orders, 'status', 'closed') def cancel_order(self, id, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' cancelOrder requires a symbol argument') self.load_markets() market = self.market(symbol) response = self.privateDeleteOrder(self.extend({ 'symbol': market['id'], 'orderId': int(id), # 'origClientOrderId': id, }, params)) return self.parse_order(response) def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchMyTrades requires a symbol argument') self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } if limit is not None: request['limit'] = limit response = self.privateGetMyTrades(self.extend(request, params)) return self.parse_trades(response, market, since, limit) def fetch_deposits(self, code=None, since=None, limit=None, params={}): self.load_markets() currency = None request = {} if code is not None: currency = self.currency(code) request['asset'] = currency['id'] if since is not None: request['startTime'] = since response = self.wapiGetDepositHistory(self.extend(request, params)) # # { success: True, # depositList: [{insertTime: 1517425007000, # amount: 0.3, # address: "0x0123456789abcdef", # addressTag: "", # txId: "0x0123456789abcdef", # asset: "ETH", # status: 1 }]} # return self.parseTransactions(response['depositList'], currency, since, limit) def fetch_withdrawals(self, code=None, since=None, limit=None, params={}): self.load_markets() currency = None request = {} if code is not None: currency = self.currency(code) request['asset'] = currency['id'] if since is not None: request['startTime'] = since response = self.wapiGetWithdrawHistory(self.extend(request, params)) # # {withdrawList: [{ amount: 14, # address: "0x0123456789abcdef...", # successTime: 1514489710000, # addressTag: "", # txId: "0x0123456789abcdef...", # id: "0123456789abcdef...", # asset: "ETH", # applyTime: 1514488724000, # status: 6 }, # { amount: 7600, # address: "0x0123456789abcdef...", # successTime: 1515323226000, # addressTag: "", # txId: "0x0123456789abcdef...", # id: "0123456789abcdef...", # asset: "ICN", # applyTime: 1515322539000, # status: 6 } ], # success: True } # return self.parseTransactions(response['withdrawList'], currency, since, limit) def parse_transaction_status_by_type(self, status, type=None): if type is None: return status statuses = { 'deposit': { '0': 'pending', '1': 'ok', }, 'withdrawal': { '0': 'pending', # Email Sent '1': 'canceled', # Cancelled(different from 1 = ok in deposits) '2': 'pending', # Awaiting Approval '3': 'failed', # Rejected '4': 'pending', # Processing '5': 'failed', # Failure '6': 'ok', # Completed }, } return statuses[type][status] if (status in list(statuses[type].keys())) else status def parse_transaction(self, transaction, currency=None): # # fetchDeposits # {insertTime: 1517425007000, # amount: 0.3, # address: "0x0123456789abcdef", # addressTag: "", # txId: "0x0123456789abcdef", # asset: "ETH", # status: 1 } # # fetchWithdrawals # # { amount: 14, # address: "0x0123456789abcdef...", # successTime: 1514489710000, # addressTag: "", # txId: "0x0123456789abcdef...", # id: "0123456789abcdef...", # asset: "ETH", # applyTime: 1514488724000, # status: 6 } # id = self.safe_string(transaction, 'id') address = self.safe_string(transaction, 'address') tag = self.safe_string(transaction, 'addressTag') # set but unused if len(tag) < 1: tag = None txid = self.safe_value(transaction, 'txId') code = None currencyId = self.safe_string(transaction, 'asset') if currencyId in self.currencies_by_id: currency = self.currencies_by_id[currencyId] else: code = self.common_currency_code(currencyId) if currency is not None: code = currency['code'] timestamp = None insertTime = self.safe_integer(transaction, 'insertTime') applyTime = self.safe_integer(transaction, 'applyTime') type = self.safe_string(transaction, 'type') if type is None: if (insertTime is not None) and(applyTime is None): type = 'deposit' timestamp = insertTime elif (insertTime is None) and(applyTime is not None): type = 'withdrawal' timestamp = applyTime status = self.parse_transaction_status_by_type(self.safe_string(transaction, 'status'), type) amount = self.safe_float(transaction, 'amount') feeCost = None fee = { 'cost': feeCost, 'currency': code, } return { 'info': transaction, 'id': id, 'txid': txid, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'address': address, 'tag': tag, 'type': type, 'amount': amount, 'currency': code, 'status': status, 'updated': None, 'fee': fee, } def fetch_deposit_address(self, code, params={}): self.load_markets() currency = self.currency(code) response = self.wapiGetDepositAddress(self.extend({ 'asset': currency['id'], }, params)) if 'success' in response: if response['success']: address = self.safe_string(response, 'address') tag = self.safe_string(response, 'addressTag') return { 'currency': code, 'address': self.check_address(address), 'tag': tag, 'info': response, } def fetch_funding_fees(self, codes=None, params={}): response = self.wapiGetAssetDetail() # # { # "success": True, # "assetDetail": { # "CTR": { # "minWithdrawAmount": "70.00000000", #min withdraw amount # "depositStatus": False,//deposit status # "withdrawFee": 35, # withdraw fee # "withdrawStatus": True, #withdraw status # "depositTip": "Delisted, Deposit Suspended" #reason # }, # "SKY": { # "minWithdrawAmount": "0.02000000", # "depositStatus": True, # "withdrawFee": 0.01, # "withdrawStatus": True # } # } # } # detail = self.safe_value(response, 'assetDetail') ids = list(detail.keys()) withdrawFees = {} for i in range(0, len(ids)): id = ids[i] code = self.common_currency_code(id) withdrawFees[code] = self.safe_float(detail[id], 'withdrawFee') return { 'withdraw': withdrawFees, 'deposit': {}, 'info': response, } def withdraw(self, code, amount, address, tag=None, params={}): self.check_address(address) self.load_markets() currency = self.currency(code) name = address[0:20] request = { 'asset': currency['id'], 'address': address, 'amount': float(amount), 'name': name, } if tag: request['addressTag'] = tag response = self.wapiPostWithdraw(self.extend(request, params)) return { 'info': response, 'id': self.safe_string(response, 'id'), } def sign(self, path, api='public', method='GET', params={}, headers=None, body=None): url = self.urls['api'][api] url += '/' + path if api == 'wapi': url += '.html' # v1 special case for userDataStream if path == 'userDataStream': body = self.urlencode(params) headers = { 'X-MBX-APIKEY': self.apiKey, 'Content-Type': 'application/x-www-form-urlencoded', } elif (api == 'private') or (api == 'wapi'): self.check_required_credentials() query = self.urlencode(self.extend({ 'timestamp': self.nonce(), 'recvWindow': self.options['recvWindow'], }, params)) signature = self.hmac(self.encode(query), self.encode(self.secret)) query += '&' + 'signature=' + signature headers = { 'X-MBX-APIKEY': self.apiKey, } if (method == 'GET') or (method == 'DELETE') or (api == 'wapi'): url += '?' + query else: body = query headers['Content-Type'] = 'application/x-www-form-urlencoded' else: if params: url += '?' + self.urlencode(params) return {'url': url, 'method': method, 'body': body, 'headers': headers} def handle_errors(self, code, reason, url, method, headers, body): if (code == 418) or (code == 429): raise DDoSProtection(self.id + ' ' + str(code) + ' ' + reason + ' ' + body) # error response in a form: {"code": -1013, "msg": "Invalid quantity."} # following block cointains legacy checks against message patterns in "msg" property # will switch "code" checks eventually, when we know all of them if code >= 400: if body.find('Price * QTY is zero or less') >= 0: raise InvalidOrder(self.id + ' order cost = amount * price is zero or less ' + body) if body.find('LOT_SIZE') >= 0: raise InvalidOrder(self.id + ' order amount should be evenly divisible by lot size ' + body) if body.find('PRICE_FILTER') >= 0: raise InvalidOrder(self.id + ' order price is invalid, i.e. exceeds allowed price precision, exceeds min price or max price limits or is invalid float value in general, use self.price_to_precision(symbol, amount) ' + body) if len(body) > 0: if body[0] == '{': response = json.loads(body) # check success value for wapi endpoints # response in format {'msg': 'The coin does not exist.', 'success': True/false} success = self.safe_value(response, 'success', True) if not success: message = self.safe_string(response, 'msg') parsedMessage = None if message is not None: try: parsedMessage = json.loads(message) except Exception as e: # do nothing parsedMessage = None if parsedMessage is not None: response = parsedMessage # checks against error codes error = self.safe_string(response, 'code') if error is not None: exceptions = self.exceptions if error in exceptions: # a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."} # despite that their message is very confusing, it is raised by Binance # on a temporary ban(the API key is valid, but disabled for a while) if (error == '-2015') and self.options['hasAlreadyAuthenticatedSuccessfully']: raise DDoSProtection(self.id + ' temporary banned: ' + body) message = self.safe_string(response, 'msg') if message == 'Order would trigger immediately.': raise InvalidOrder(self.id + ' ' + body) elif message == 'Account has insufficient balance for requested action.': raise InsufficientFunds(self.id + ' ' + body) elif message == 'Rest API trading is not enabled.': raise ExchangeNotAvailable(self.id + ' ' + body) raise exceptions[error](self.id + ' ' + body) else: raise ExchangeError(self.id + ' ' + body) if not success: raise ExchangeError(self.id + ' ' + body) def request(self, path, api='public', method='GET', params={}, headers=None, body=None): response = self.fetch2(path, api, method, params, headers, body) # a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."} if (api == 'private') or (api == 'wapi'): self.options['hasAlreadyAuthenticatedSuccessfully'] = True return response
42.374663
355
0.468588
ge import Exchange import math import json from ccxt.base.errors import ExchangeError from ccxt.base.errors import AuthenticationError from ccxt.base.errors import ArgumentsRequired from ccxt.base.errors import InsufficientFunds from ccxt.base.errors import InvalidOrder from ccxt.base.errors import OrderNotFound from ccxt.base.errors import DDoSProtection from ccxt.base.errors import ExchangeNotAvailable from ccxt.base.errors import InvalidNonce class binance (Exchange): def describe(self): return self.deep_extend(super(binance, self).describe(), { 'id': 'binance', 'name': 'Binance', 'countries': ['JP'], 'rateLimit': 500, 'certified': True, 'has': { 'fetchDepositAddress': True, 'CORS': False, 'fetchBidsAsks': True, 'fetchTickers': True, 'fetchOHLCV': True, 'fetchMyTrades': True, 'fetchOrder': True, 'fetchOrders': True, 'fetchOpenOrders': True, 'fetchClosedOrders': True, 'withdraw': True, 'fetchFundingFees': True, 'fetchDeposits': True, 'fetchWithdrawals': True, 'fetchTransactions': False, }, 'timeframes': { '1m': '1m', '3m': '3m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '2h': '2h', '4h': '4h', '6h': '6h', '8h': '8h', '12h': '12h', '1d': '1d', '3d': '3d', '1w': '1w', '1M': '1M', }, 'urls': { 'logo': 'https://user-images.githubusercontent.com/1294454/29604020-d5483cdc-87ee-11e7-94c7-d1a8d9169293.jpg', 'api': { 'web': 'https://www.binance.com', 'wapi': 'https://api.binance.com/wapi/v3', 'public': 'https://api.binance.com/api/v1', 'private': 'https://api.binance.com/api/v3', 'v3': 'https://api.binance.com/api/v3', 'v1': 'https://api.binance.com/api/v1', }, 'www': 'https://www.binance.com', 'referral': 'https://www.binance.com/?ref=10205187', 'doc': 'https://github.com/binance-exchange/binance-official-api-docs/blob/master/rest-api.md', 'fees': 'https://www.binance.com/en/fee/schedule', }, 'api': { 'web': { 'get': [ 'exchange/public/product', 'assetWithdraw/getAllAsset.html', ], }, 'wapi': { 'post': [ 'withdraw', ], 'get': [ 'depositHistory', 'withdrawHistory', 'depositAddress', 'accountStatus', 'systemStatus', 'userAssetDribbletLog', 'tradeFee', 'assetDetail', ], }, 'v3': { 'get': [ 'ticker/price', 'ticker/bookTicker', ], }, 'public': { 'get': [ 'exchangeInfo', 'ping', 'time', 'depth', 'aggTrades', 'klines', 'ticker/24hr', 'ticker/allPrices', 'ticker/allBookTickers', 'ticker/price', 'ticker/bookTicker', 'exchangeInfo', ], 'put': ['userDataStream'], 'post': ['userDataStream'], 'delete': ['userDataStream'], }, 'private': { 'get': [ 'order', 'openOrders', 'allOrders', 'account', 'myTrades', ], 'post': [ 'order', 'order/test', ], 'delete': [ 'order', ], }, }, 'fees': { 'trading': { 'tierBased': False, 'percentage': True, 'taker': 0.001, 'maker': 0.001, }, 'funding': { 'tierBased': False, 'percentage': False, 'withdraw': { 'ADA': 1.0, 'ADX': 4.7, 'AION': 1.9, 'AMB': 11.4, 'APPC': 6.5, 'ARK': 0.1, 'ARN': 3.1, 'AST': 10.0, 'BAT': 18.0, 'BCD': 1.0, 'BCH': 0.001, 'BCPT': 10.2, 'BCX': 1.0, 'BNB': 0.7, 'BNT': 1.5, 'BQX': 1.6, 'BRD': 6.4, 'BTC': 0.001, 'BTG': 0.001, 'BTM': 5.0, 'BTS': 1.0, 'CDT': 67.0, 'CMT': 37.0, 'CND': 47.0, 'CTR': 5.4, 'DASH': 0.002, 'DGD': 0.06, 'DLT': 11.7, 'DNT': 51.0, 'EDO': 2.5, 'ELF': 6.5, 'ENG': 2.1, 'ENJ': 42.0, 'EOS': 1.0, 'ETC': 0.01, 'ETF': 1.0, 'ETH': 0.01, 'EVX': 2.5, 'FUEL': 45.0, 'FUN': 85.0, 'GAS': 0, 'GTO': 20.0, 'GVT': 0.53, 'GXS': 0.3, 'HCC': 0.0005, 'HSR': 0.0001, 'ICN': 3.5, 'ICX': 1.3, 'INS': 1.5, 'IOTA': 0.5, 'KMD': 0.002, 'KNC': 2.6, 'LEND': 54.0, 'LINK': 12.8, 'LLT': 54.0, 'LRC': 9.1, 'LSK': 0.1, 'LTC': 0.01, 'LUN': 0.29, 'MANA': 74.0, 'MCO': 0.86, 'MDA': 4.7, 'MOD': 2.0, 'MTH': 34.0, 'MTL': 1.9, 'NAV': 0.2, 'NEBL': 0.01, 'NEO': 0.0, 'NULS': 2.1, 'OAX': 8.3, 'OMG': 0.57, 'OST': 17.0, 'POE': 88.0, 'POWR': 8.6, 'PPT': 0.25, 'QSP': 21.0, 'QTUM': 0.01, 'RCN': 35.0, 'RDN': 2.2, 'REQ': 18.1, 'RLC': 4.1, 'SALT': 1.3, 'SBTC': 1.0, 'SNGLS': 42, 'SNM': 29.0, 'SNT': 32.0, 'STORJ': 5.9, 'STRAT': 0.1, 'SUB': 7.4, 'TNB': 82.0, 'TNT': 47.0, 'TRIG': 6.7, 'TRX': 129.0, 'USDT': 23.0, 'VEN': 1.8, 'VIB': 28.0, 'VIBE': 7.2, 'WABI': 3.5, 'WAVES': 0.002, 'WINGS': 9.3, 'WTC': 0.5, 'XLM': 0.01, 'XMR': 0.04, 'XRP': 0.25, 'XVG': 0.1, 'XZC': 0.02, 'YOYOW': 39.0, 'ZEC': 0.005, 'ZRX': 5.7, }, 'deposit': {}, }, }, 'commonCurrencies': { 'YOYO': 'YOYOW', 'BCC': 'BCH', }, 'options': { 'defaultTimeInForce': 'GTC', 'defaultLimitOrderType': 'limit', 'hasAlreadyAuthenticatedSuccessfully': False, 'warnOnFetchOpenOrdersWithoutSymbol': True, 'recvWindow': 5 * 1000, 'timeDifference': 0, 'adjustForTimeDifference': False, 'parseOrderToPrecision': False, 'newOrderRespType': 'RESULT', }, 'exceptions': { '-1000': ExchangeNotAvailable, '-1013': InvalidOrder, '-1021': InvalidNonce, '-1022': AuthenticationError, '-1100': InvalidOrder, '-1104': ExchangeError, # Not all sent parameters were read, read 8 parameters but was sent 9 '-1128': ExchangeError, # {"code":-1128,"msg":"Combination of optional parameters invalid."} '-2010': ExchangeError, # generic error code for createOrder -> 'Account has insufficient balance for requested action.', {"code":-2010,"msg":"Rest API trading is not enabled."}, etc... '-2011': OrderNotFound, # cancelOrder(1, 'BTC/USDT') -> 'UNKNOWN_ORDER' '-2013': OrderNotFound, # fetchOrder(1, 'BTC/USDT') -> 'Order does not exist' '-2014': AuthenticationError, # {"code":-2014, "msg": "API-key format invalid."} '-2015': AuthenticationError, # "Invalid API-key, IP, or permissions for action." }, }) def nonce(self): return self.milliseconds() - self.options['timeDifference'] def load_time_difference(self): response = self.publicGetTime() after = self.milliseconds() self.options['timeDifference'] = int(after - response['serverTime']) return self.options['timeDifference'] def fetch_markets(self): response = self.publicGetExchangeInfo() if self.options['adjustForTimeDifference']: self.load_time_difference() markets = response['symbols'] result = [] for i in range(0, len(markets)): market = markets[i] id = market['symbol'] # "123456" is a "test symbol/market" if id == '123456': continue baseId = market['baseAsset'] quoteId = market['quoteAsset'] base = self.common_currency_code(baseId) quote = self.common_currency_code(quoteId) symbol = base + '/' + quote filters = self.index_by(market['filters'], 'filterType') precision = { 'base': market['baseAssetPrecision'], 'quote': market['quotePrecision'], 'amount': market['baseAssetPrecision'], 'price': market['quotePrecision'], } active = (market['status'] == 'TRADING') entry = { 'id': id, 'symbol': symbol, 'base': base, 'quote': quote, 'baseId': baseId, 'quoteId': quoteId, 'info': market, 'active': active, 'precision': precision, 'limits': { 'amount': { 'min': math.pow(10, -precision['amount']), 'max': None, }, 'price': { 'min': math.pow(10, -precision['price']), 'max': None, }, 'cost': { 'min': -1 * math.log10(precision['amount']), 'max': None, }, }, } if 'PRICE_FILTER' in filters: filter = filters['PRICE_FILTER'] entry['precision']['price'] = self.precision_from_string(filter['tickSize']) entry['limits']['price'] = { 'min': self.safe_float(filter, 'minPrice'), 'max': self.safe_float(filter, 'maxPrice'), } if 'LOT_SIZE' in filters: filter = filters['LOT_SIZE'] entry['precision']['amount'] = self.precision_from_string(filter['stepSize']) entry['limits']['amount'] = { 'min': self.safe_float(filter, 'minQty'), 'max': self.safe_float(filter, 'maxQty'), } if 'MIN_NOTIONAL' in filters: entry['limits']['cost']['min'] = float(filters['MIN_NOTIONAL']['minNotional']) result.append(entry) return result def calculate_fee(self, symbol, type, side, amount, price, takerOrMaker='taker', params={}): market = self.markets[symbol] key = 'quote' rate = market[takerOrMaker] cost = float(self.cost_to_precision(symbol, amount * rate)) if side == 'sell': cost *= price else: key = 'base' return { 'type': takerOrMaker, 'currency': market[key], 'rate': rate, 'cost': float(self.fee_to_precision(symbol, cost)), } def fetch_balance(self, params={}): self.load_markets() response = self.privateGetAccount(params) result = {'info': response} balances = response['balances'] for i in range(0, len(balances)): balance = balances[i] currency = balance['asset'] if currency in self.currencies_by_id: currency = self.currencies_by_id[currency]['code'] account = { 'free': float(balance['free']), 'used': float(balance['locked']), 'total': 0.0, } account['total'] = self.sum(account['free'], account['used']) result[currency] = account return self.parse_balance(result) def fetch_order_book(self, symbol, limit=None, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } if limit is not None: request['limit'] = limit # default = maximum = 100 response = self.publicGetDepth(self.extend(request, params)) orderbook = self.parse_order_book(response) orderbook['nonce'] = self.safe_integer(response, 'lastUpdateId') return orderbook def parse_ticker(self, ticker, market=None): timestamp = self.safe_integer(ticker, 'closeTime') iso8601 = None if (timestamp is None) else self.iso8601(timestamp) symbol = self.find_symbol(self.safe_string(ticker, 'symbol'), market) last = self.safe_float(ticker, 'lastPrice') return { 'symbol': symbol, 'timestamp': timestamp, 'datetime': iso8601, 'high': self.safe_float(ticker, 'highPrice'), 'low': self.safe_float(ticker, 'lowPrice'), 'bid': self.safe_float(ticker, 'bidPrice'), 'bidVolume': self.safe_float(ticker, 'bidQty'), 'ask': self.safe_float(ticker, 'askPrice'), 'askVolume': self.safe_float(ticker, 'askQty'), 'vwap': self.safe_float(ticker, 'weightedAvgPrice'), 'open': self.safe_float(ticker, 'openPrice'), 'close': last, 'last': last, 'previousClose': self.safe_float(ticker, 'prevClosePrice'), # previous day close 'change': self.safe_float(ticker, 'priceChange'), 'percentage': self.safe_float(ticker, 'priceChangePercent'), 'average': None, 'baseVolume': self.safe_float(ticker, 'volume'), 'quoteVolume': self.safe_float(ticker, 'quoteVolume'), 'info': ticker, } def fetch_ticker(self, symbol, params={}): self.load_markets() market = self.market(symbol) response = self.publicGetTicker24hr(self.extend({ 'symbol': market['id'], }, params)) return self.parse_ticker(response, market) def parse_tickers(self, rawTickers, symbols=None): tickers = [] for i in range(0, len(rawTickers)): tickers.append(self.parse_ticker(rawTickers[i])) return self.filter_by_array(tickers, 'symbol', symbols) def fetch_bids_asks(self, symbols=None, params={}): self.load_markets() rawTickers = self.publicGetTickerBookTicker(params) return self.parse_tickers(rawTickers, symbols) def fetch_tickers(self, symbols=None, params={}): self.load_markets() rawTickers = self.publicGetTicker24hr(params) return self.parse_tickers(rawTickers, symbols) def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None): return [ ohlcv[0], float(ohlcv[1]), float(ohlcv[2]), float(ohlcv[3]), float(ohlcv[4]), float(ohlcv[5]), ] def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], 'interval': self.timeframes[timeframe], } if since is not None: request['startTime'] = since if limit is not None: request['limit'] = limit # default == max == 500 response = self.publicGetKlines(self.extend(request, params)) return self.parse_ohlcvs(response, market, timeframe, since, limit) def parse_trade(self, trade, market=None): timestampField = 'T' if ('T' in list(trade.keys())) else 'time' timestamp = self.safe_integer(trade, timestampField) priceField = 'p' if ('p' in list(trade.keys())) else 'price' price = self.safe_float(trade, priceField) amountField = 'q' if ('q' in list(trade.keys())) else 'qty' amount = self.safe_float(trade, amountField) idField = 'a' if ('a' in list(trade.keys())) else 'id' id = self.safe_string(trade, idField) side = None order = None if 'orderId' in trade: order = self.safe_string(trade, 'orderId') if 'm' in trade: side = 'sell' if trade['m'] else 'buy' # self is reversed intentionally else: if 'isBuyer' in trade: side = 'buy' if (trade['isBuyer']) else 'sell' # self is a True side fee = None if 'commission' in trade: fee = { 'cost': self.safe_float(trade, 'commission'), 'currency': self.common_currency_code(trade['commissionAsset']), } takerOrMaker = None if 'isMaker' in trade: takerOrMaker = 'maker' if trade['isMaker'] else 'taker' symbol = None if market is None: marketId = self.safe_string(trade, 'symbol') market = self.safe_value(self.markets_by_id, marketId) if market is not None: symbol = market['symbol'] return { 'info': trade, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'symbol': symbol, 'id': id, 'order': order, 'type': None, 'takerOrMaker': takerOrMaker, 'side': side, 'price': price, 'cost': price * amount, 'amount': amount, 'fee': fee, } def fetch_trades(self, symbol, since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } if since is not None: request['startTime'] = since request['endTime'] = self.sum(since, 3600000) if limit is not None: request['limit'] = limit # 'fromId': 123, # ID to get aggregate trades from INCLUSIVE. # 'startTime': 456, # Timestamp in ms to get aggregate trades from INCLUSIVE. # 'endTime': 789, # Timestamp in ms to get aggregate trades until INCLUSIVE. # 'limit': 500, # default = 500, maximum = 1000 # # Caveats: # - default limit(500) applies only if no other parameters set, trades up # to the maximum limit may be returned to satisfy other parameters # - if both limit and time window is set and time window contains more # trades than the limit then the last trades from the window are returned # - 'tradeId' accepted and returned by self method is "aggregate" trade id # which is different from actual trade id # - setting both fromId and time window results in error response = self.publicGetAggTrades(self.extend(request, params)) return self.parse_trades(response, market, since, limit) def parse_order_status(self, status): statuses = { 'NEW': 'open', 'PARTIALLY_FILLED': 'open', 'FILLED': 'closed', 'CANCELED': 'canceled', } return statuses[status] if (status in list(statuses.keys())) else status def parse_order(self, order, market=None): status = self.parse_order_status(self.safe_string(order, 'status')) symbol = self.find_symbol(self.safe_string(order, 'symbol'), market) timestamp = None if 'time' in order: timestamp = order['time'] elif 'transactTime' in order: timestamp = order['transactTime'] price = self.safe_float(order, 'price') amount = self.safe_float(order, 'origQty') filled = self.safe_float(order, 'executedQty') remaining = None cost = self.safe_float(order, 'cummulativeQuoteQty') if filled is not None: if amount is not None: remaining = amount - filled if self.options['parseOrderToPrecision']: remaining = float(self.amount_to_precision(symbol, remaining)) remaining = max(remaining, 0.0) if price is not None: if cost is None: cost = price * filled id = self.safe_string(order, 'orderId') type = self.safe_string(order, 'type') if type is not None: type = type.lower() if type == 'market': if price == 0.0: if (cost is not None) and(filled is not None): if (cost > 0) and(filled > 0): price = cost / filled side = self.safe_string(order, 'side') if side is not None: side = side.lower() fee = None trades = None fills = self.safe_value(order, 'fills') if fills is not None: trades = self.parse_trades(fills, market) numTrades = len(trades) if numTrades > 0: cost = trades[0]['cost'] fee = { 'cost': trades[0]['fee']['cost'], 'currency': trades[0]['fee']['currency'], } for i in range(1, len(trades)): cost = self.sum(cost, trades[i]['cost']) fee['cost'] = self.sum(fee['cost'], trades[i]['fee']['cost']) average = None if cost is not None: if filled: average = cost / filled if self.options['parseOrderToPrecision']: cost = float(self.cost_to_precision(symbol, cost)) result = { 'info': order, 'id': id, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'lastTradeTimestamp': None, 'symbol': symbol, 'type': type, 'side': side, 'price': price, 'amount': amount, 'cost': cost, 'average': average, 'filled': filled, 'remaining': remaining, 'status': status, 'fee': fee, 'trades': trades, } return result def create_order(self, symbol, type, side, amount, price=None, params={}): self.load_markets() market = self.market(symbol) # the next 5 lines are added to support for testing orders method = 'privatePostOrder' test = self.safe_value(params, 'test', False) if test: method += 'Test' params = self.omit(params, 'test') uppercaseType = type.upper() order = { 'symbol': market['id'], 'quantity': self.amount_to_precision(symbol, amount), 'type': uppercaseType, 'side': side.upper(), 'newOrderRespType': self.options['newOrderRespType'], # 'ACK' for order id, 'RESULT' for full order or 'FULL' for order with fills } timeInForceIsRequired = False priceIsRequired = False stopPriceIsRequired = False if uppercaseType == 'LIMIT': priceIsRequired = True timeInForceIsRequired = True elif (uppercaseType == 'STOP_LOSS') or (uppercaseType == 'TAKE_PROFIT'): stopPriceIsRequired = True elif (uppercaseType == 'STOP_LOSS_LIMIT') or (uppercaseType == 'TAKE_PROFIT_LIMIT'): stopPriceIsRequired = True priceIsRequired = True timeInForceIsRequired = True elif uppercaseType == 'LIMIT_MAKER': priceIsRequired = True if priceIsRequired: if price is None: raise InvalidOrder(self.id + ' createOrder method requires a price argument for a ' + type + ' order') order['price'] = self.price_to_precision(symbol, price) if timeInForceIsRequired: order['timeInForce'] = self.options['defaultTimeInForce'] # 'GTC' = Good To Cancel(default), 'IOC' = Immediate Or Cancel if stopPriceIsRequired: stopPrice = self.safe_float(params, 'stopPrice') if stopPrice is None: raise InvalidOrder(self.id + ' createOrder method requires a stopPrice extra param for a ' + type + ' order') else: order['stopPrice'] = self.price_to_precision(symbol, stopPrice) response = getattr(self, method)(self.extend(order, params)) return self.parse_order(response, market) def fetch_order(self, id, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchOrder requires a symbol argument') self.load_markets() market = self.market(symbol) origClientOrderId = self.safe_value(params, 'origClientOrderId') request = { 'symbol': market['id'], } if origClientOrderId is not None: request['origClientOrderId'] = origClientOrderId else: request['orderId'] = int(id) response = self.privateGetOrder(self.extend(request, params)) return self.parse_order(response, market) def fetch_orders(self, symbol=None, since=None, limit=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchOrders requires a symbol argument') self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } if limit is not None: request['limit'] = limit response = self.privateGetAllOrders(self.extend(request, params)) # # [ # { # "symbol": "LTCBTC", # "orderId": 1, # "clientOrderId": "myOrder1", # "price": "0.1", # "origQty": "1.0", # "executedQty": "0.0", # "cummulativeQuoteQty": "0.0", # "status": "NEW", # "timeInForce": "GTC", # "type": "LIMIT", # "side": "BUY", # "stopPrice": "0.0", # "icebergQty": "0.0", # "time": 1499827319559, # "updateTime": 1499827319559, # "isWorking": True # } # ] # return self.parse_orders(response, market, since, limit) def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}): self.load_markets() market = None request = {} if symbol is not None: market = self.market(symbol) request['symbol'] = market['id'] elif self.options['warnOnFetchOpenOrdersWithoutSymbol']: symbols = self.symbols numSymbols = len(symbols) fetchOpenOrdersRateLimit = int(numSymbols / 2) raise ExchangeError(self.id + ' fetchOpenOrders WARNING: fetching open orders without specifying a symbol is rate-limited to one call per ' + str(fetchOpenOrdersRateLimit) + ' seconds. Do not call self method frequently to avoid ban. Set ' + self.id + '.options["warnOnFetchOpenOrdersWithoutSymbol"] = False to suppress self warning message.') response = self.privateGetOpenOrders(self.extend(request, params)) return self.parse_orders(response, market, since, limit) def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}): orders = self.fetch_orders(symbol, since, limit, params) return self.filter_by(orders, 'status', 'closed') def cancel_order(self, id, symbol=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' cancelOrder requires a symbol argument') self.load_markets() market = self.market(symbol) response = self.privateDeleteOrder(self.extend({ 'symbol': market['id'], 'orderId': int(id), # 'origClientOrderId': id, }, params)) return self.parse_order(response) def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}): if symbol is None: raise ArgumentsRequired(self.id + ' fetchMyTrades requires a symbol argument') self.load_markets() market = self.market(symbol) request = { 'symbol': market['id'], } if limit is not None: request['limit'] = limit response = self.privateGetMyTrades(self.extend(request, params)) return self.parse_trades(response, market, since, limit) def fetch_deposits(self, code=None, since=None, limit=None, params={}): self.load_markets() currency = None request = {} if code is not None: currency = self.currency(code) request['asset'] = currency['id'] if since is not None: request['startTime'] = since response = self.wapiGetDepositHistory(self.extend(request, params)) # # { success: True, # depositList: [{insertTime: 1517425007000, # amount: 0.3, # address: "0x0123456789abcdef", # addressTag: "", # txId: "0x0123456789abcdef", # asset: "ETH", # status: 1 }]} # return self.parseTransactions(response['depositList'], currency, since, limit) def fetch_withdrawals(self, code=None, since=None, limit=None, params={}): self.load_markets() currency = None request = {} if code is not None: currency = self.currency(code) request['asset'] = currency['id'] if since is not None: request['startTime'] = since response = self.wapiGetWithdrawHistory(self.extend(request, params)) # # {withdrawList: [{ amount: 14, # address: "0x0123456789abcdef...", # successTime: 1514489710000, # addressTag: "", # txId: "0x0123456789abcdef...", # id: "0123456789abcdef...", # asset: "ETH", # applyTime: 1514488724000, # status: 6 }, # { amount: 7600, # address: "0x0123456789abcdef...", # successTime: 1515323226000, # addressTag: "", # txId: "0x0123456789abcdef...", # id: "0123456789abcdef...", # asset: "ICN", # applyTime: 1515322539000, # status: 6 } ], # success: True } # return self.parseTransactions(response['withdrawList'], currency, since, limit) def parse_transaction_status_by_type(self, status, type=None): if type is None: return status statuses = { 'deposit': { '0': 'pending', '1': 'ok', }, 'withdrawal': { '0': 'pending', # Email Sent '1': 'canceled', # Cancelled(different from 1 = ok in deposits) '2': 'pending', # Awaiting Approval '3': 'failed', # Rejected '4': 'pending', # Processing '5': 'failed', # Failure '6': 'ok', # Completed }, } return statuses[type][status] if (status in list(statuses[type].keys())) else status def parse_transaction(self, transaction, currency=None): # # fetchDeposits # {insertTime: 1517425007000, # amount: 0.3, # address: "0x0123456789abcdef", # addressTag: "", # txId: "0x0123456789abcdef", # asset: "ETH", # status: 1 } # # fetchWithdrawals # # { amount: 14, # address: "0x0123456789abcdef...", # successTime: 1514489710000, # addressTag: "", # txId: "0x0123456789abcdef...", # id: "0123456789abcdef...", # asset: "ETH", # applyTime: 1514488724000, # status: 6 } # id = self.safe_string(transaction, 'id') address = self.safe_string(transaction, 'address') tag = self.safe_string(transaction, 'addressTag') # set but unused if len(tag) < 1: tag = None txid = self.safe_value(transaction, 'txId') code = None currencyId = self.safe_string(transaction, 'asset') if currencyId in self.currencies_by_id: currency = self.currencies_by_id[currencyId] else: code = self.common_currency_code(currencyId) if currency is not None: code = currency['code'] timestamp = None insertTime = self.safe_integer(transaction, 'insertTime') applyTime = self.safe_integer(transaction, 'applyTime') type = self.safe_string(transaction, 'type') if type is None: if (insertTime is not None) and(applyTime is None): type = 'deposit' timestamp = insertTime elif (insertTime is None) and(applyTime is not None): type = 'withdrawal' timestamp = applyTime status = self.parse_transaction_status_by_type(self.safe_string(transaction, 'status'), type) amount = self.safe_float(transaction, 'amount') feeCost = None fee = { 'cost': feeCost, 'currency': code, } return { 'info': transaction, 'id': id, 'txid': txid, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'address': address, 'tag': tag, 'type': type, 'amount': amount, 'currency': code, 'status': status, 'updated': None, 'fee': fee, } def fetch_deposit_address(self, code, params={}): self.load_markets() currency = self.currency(code) response = self.wapiGetDepositAddress(self.extend({ 'asset': currency['id'], }, params)) if 'success' in response: if response['success']: address = self.safe_string(response, 'address') tag = self.safe_string(response, 'addressTag') return { 'currency': code, 'address': self.check_address(address), 'tag': tag, 'info': response, } def fetch_funding_fees(self, codes=None, params={}): response = self.wapiGetAssetDetail() # # { # "success": True, # "assetDetail": { # "CTR": { # "minWithdrawAmount": "70.00000000", #min withdraw amount # "depositStatus": False,//deposit status # "withdrawFee": 35, # withdraw fee # "withdrawStatus": True, #withdraw status # "depositTip": "Delisted, Deposit Suspended" #reason # }, # "SKY": { # "minWithdrawAmount": "0.02000000", # "depositStatus": True, # "withdrawFee": 0.01, # "withdrawStatus": True # } # } # } # detail = self.safe_value(response, 'assetDetail') ids = list(detail.keys()) withdrawFees = {} for i in range(0, len(ids)): id = ids[i] code = self.common_currency_code(id) withdrawFees[code] = self.safe_float(detail[id], 'withdrawFee') return { 'withdraw': withdrawFees, 'deposit': {}, 'info': response, } def withdraw(self, code, amount, address, tag=None, params={}): self.check_address(address) self.load_markets() currency = self.currency(code) name = address[0:20] request = { 'asset': currency['id'], 'address': address, 'amount': float(amount), 'name': name, } if tag: request['addressTag'] = tag response = self.wapiPostWithdraw(self.extend(request, params)) return { 'info': response, 'id': self.safe_string(response, 'id'), } def sign(self, path, api='public', method='GET', params={}, headers=None, body=None): url = self.urls['api'][api] url += '/' + path if api == 'wapi': url += '.html' # v1 special case for userDataStream if path == 'userDataStream': body = self.urlencode(params) headers = { 'X-MBX-APIKEY': self.apiKey, 'Content-Type': 'application/x-www-form-urlencoded', } elif (api == 'private') or (api == 'wapi'): self.check_required_credentials() query = self.urlencode(self.extend({ 'timestamp': self.nonce(), 'recvWindow': self.options['recvWindow'], }, params)) signature = self.hmac(self.encode(query), self.encode(self.secret)) query += '&' + 'signature=' + signature headers = { 'X-MBX-APIKEY': self.apiKey, } if (method == 'GET') or (method == 'DELETE') or (api == 'wapi'): url += '?' + query else: body = query headers['Content-Type'] = 'application/x-www-form-urlencoded' else: if params: url += '?' + self.urlencode(params) return {'url': url, 'method': method, 'body': body, 'headers': headers} def handle_errors(self, code, reason, url, method, headers, body): if (code == 418) or (code == 429): raise DDoSProtection(self.id + ' ' + str(code) + ' ' + reason + ' ' + body) # error response in a form: {"code": -1013, "msg": "Invalid quantity."} # following block cointains legacy checks against message patterns in "msg" property # will switch "code" checks eventually, when we know all of them if code >= 400: if body.find('Price * QTY is zero or less') >= 0: raise InvalidOrder(self.id + ' order cost = amount * price is zero or less ' + body) if body.find('LOT_SIZE') >= 0: raise InvalidOrder(self.id + ' order amount should be evenly divisible by lot size ' + body) if body.find('PRICE_FILTER') >= 0: raise InvalidOrder(self.id + ' order price is invalid, i.e. exceeds allowed price precision, exceeds min price or max price limits or is invalid float value in general, use self.price_to_precision(symbol, amount) ' + body) if len(body) > 0: if body[0] == '{': response = json.loads(body) # check success value for wapi endpoints # response in format {'msg': 'The coin does not exist.', 'success': True/false} success = self.safe_value(response, 'success', True) if not success: message = self.safe_string(response, 'msg') parsedMessage = None if message is not None: try: parsedMessage = json.loads(message) except Exception as e: # do nothing parsedMessage = None if parsedMessage is not None: response = parsedMessage # checks against error codes error = self.safe_string(response, 'code') if error is not None: exceptions = self.exceptions if error in exceptions: # a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."} # despite that their message is very confusing, it is raised by Binance # on a temporary ban(the API key is valid, but disabled for a while) if (error == '-2015') and self.options['hasAlreadyAuthenticatedSuccessfully']: raise DDoSProtection(self.id + ' temporary banned: ' + body) message = self.safe_string(response, 'msg') if message == 'Order would trigger immediately.': raise InvalidOrder(self.id + ' ' + body) elif message == 'Account has insufficient balance for requested action.': raise InsufficientFunds(self.id + ' ' + body) elif message == 'Rest API trading is not enabled.': raise ExchangeNotAvailable(self.id + ' ' + body) raise exceptions[error](self.id + ' ' + body) else: raise ExchangeError(self.id + ' ' + body) if not success: raise ExchangeError(self.id + ' ' + body) def request(self, path, api='public', method='GET', params={}, headers=None, body=None): response = self.fetch2(path, api, method, params, headers, body) # a workaround for {"code":-2015,"msg":"Invalid API-key, IP, or permissions for action."} if (api == 'private') or (api == 'wapi'): self.options['hasAlreadyAuthenticatedSuccessfully'] = True return response
true
true
f71ced92e40f1740937111306959303ed4663fa3
9,781
py
Python
test.py
erprashu/Metal_erning
79d1a6a457be37258df50a9194946caeb86845a2
[ "MIT" ]
null
null
null
test.py
erprashu/Metal_erning
79d1a6a457be37258df50a9194946caeb86845a2
[ "MIT" ]
null
null
null
test.py
erprashu/Metal_erning
79d1a6a457be37258df50a9194946caeb86845a2
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import argparse import torch import torch.nn.functional as F from torch.utils.data import DataLoader from torch.autograd import Variable from tqdm import tqdm from models.protonet_embedding import ProtoNetEmbedding from models.R2D2_embedding import R2D2Embedding from models.ResNet12_embedding import resnet12 from models.classification_heads import ClassificationHead, R2D2Head from utils import pprint, set_gpu, Timer, count_accuracy, log import random import numpy as np import os import pdb def get_model(options): # Choose the embedding network if options.network == 'ProtoNet': network = ProtoNetEmbedding().cuda() elif options.network == 'R2D2': network = R2D2Embedding().cuda() elif options.network == 'ResNet': if options.dataset == 'miniImageNet' or options.dataset == 'tieredImageNet': network = resnet12(avg_pool=False, drop_rate=0.1, dropblock_size=5).cuda() network = torch.nn.DataParallel(network) else: network = resnet12(avg_pool=False, drop_rate=0.1, dropblock_size=2).cuda() network = torch.nn.DataParallel(network) else: print ("Cannot recognize the network type") assert(False) # Choose the classification head if opt.head == 'ProtoNet': cls_head = ClassificationHead(base_learner='ProtoNet').cuda() elif opt.head == 'Ridge': cls_head = ClassificationHead(base_learner='Ridge').cuda() elif opt.head == 'R2D2': cls_head = R2D2Head().cuda() elif opt.head == 'SVM': cls_head = ClassificationHead(base_learner='SVM-CS').cuda() else: print ("Cannot recognize the classification head type") assert(False) return (network, cls_head) def get_dataset(options): # Choose the embedding network if options.dataset == 'miniImageNet': from data.mini_imagenet import MiniImageNet, FewShotDataloader dataset_test = MiniImageNet(phase='test') data_loader = FewShotDataloader elif options.dataset == 'tieredImageNet': from data.tiered_imagenet import tieredImageNet, FewShotDataloader dataset_test = tieredImageNet(phase='test') data_loader = FewShotDataloader elif options.dataset == 'CIFAR_FS': from data.CIFAR_FS import CIFAR_FS, FewShotDataloader dataset_test = CIFAR_FS(phase='test') data_loader = FewShotDataloader elif options.dataset == 'FC100': from data.FC100 import FC100, FewShotDataloader dataset_test = FC100(phase='test') data_loader = FewShotDataloader else: print ("Cannot recognize the dataset type") assert(False) return (dataset_test, data_loader) def self_mix(data): size = data.size() W = size[-1] H = size[-2] # uniform cx = np.random.randint(W) cy = np.random.randint(H) cut_w = W//2 cut_h = H//2 bbx1 = np.clip(cx - cut_w // 2, 0, W) bby1 = np.clip(cy - cut_h // 2, 0, H) bbx2 = np.clip(cx + cut_w // 2, 0, W) bby2 = np.clip(cy + cut_h // 2, 0, H) while True: bbxn = np.random.randint(0, W-(bbx2-bbx1)) bbyn = np.random.randint(0, H-(bby2-bby1)) if bbxn != bbx1 or bbyn != bby1: break if (bbx2 - bbx1) == (bby2 - bby1): k = random.sample([0, 1, 2, 3], 1)[0] else: k = 0 data[:, :, bbx1:bbx2, bby1:bby2] = torch.rot90(data[:, :, bbxn:bbxn + (bbx2-bbx1), bbyn:bbyn + (bby2-bby1)], k, [2,3]) #data[:, :, bbx1:bbx2, bby1:bby2] = data[:, :, bbxn:bbxn + (bbx2-bbx1), bbyn:bbyn + (bby2-bby1)] return data def flip(x, dim): indices = [slice(None)] * x.dim() indices[dim] = torch.arange(x.size(dim) - 1, -1, -1, dtype=torch.long, device=x.device) return x[tuple(indices)] def build_grid(source_size,target_size): k = float(target_size)/float(source_size) direct = torch.linspace(-k,k,target_size).unsqueeze(0).repeat(target_size,1).unsqueeze(-1) full = torch.cat([direct,direct.transpose(1,0)],dim=2).unsqueeze(0) return full.cuda() def random_crop_grid(x,grid): delta = x.size(2)-grid.size(1) grid = grid.repeat(x.size(0),1,1,1).cuda() #Add random shifts by x grid[:,:,:,0] = grid[:,:,:,0]+ torch.FloatTensor(x.size(0)).cuda().random_(0, delta).unsqueeze(-1).unsqueeze(-1).expand(-1, grid.size(1), grid.size(2)) /x.size(2) #Add random shifts by y grid[:,:,:,1] = grid[:,:,:,1]+ torch.FloatTensor(x.size(0)).cuda().random_(0, delta).unsqueeze(-1).unsqueeze(-1).expand(-1, grid.size(1), grid.size(2)) /x.size(2) return grid def random_cropping(batch, t): #Building central crop of t pixel size grid_source = build_grid(batch.size(-1),t) #Make radom shift for each batch grid_shifted = random_crop_grid(batch,grid_source) #Sample using grid sample sampled_batch = F.grid_sample(batch, grid_shifted, mode='nearest') return sampled_batch def shot_aug(data_support, labels_support, n_support, method, opt): size = data_support.shape if method == "fliplr": n_support = opt.s_du * n_support data_shot = flip(data_support, -1) data_support = torch.cat((data_support, data_shot), dim = 1) labels_support = torch.cat((labels_support, labels_support), dim = 1) elif method == "random_crop": n_support = opt.s_du * n_support data_shot = F.pad(data_support.view([-1] + list(data_support.shape[-3:])), (4,4,4,4)) data_shot = random_cropping(data_shot, 32) data_support = torch.cat((data_support, data_shot.view([size[0], -1] + list(data_support.shape[-3:]))), dim = 1) labels_support = torch.cat((labels_support, labels_support), dim = 1) return data_support, labels_support, n_support if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--gpu', default='0') parser.add_argument('--load', default='./experiments/exp_1/best_model.pth', help='path of the checkpoint file') parser.add_argument('--episode', type=int, default=1000, help='number of episodes to test') parser.add_argument('--way', type=int, default=5, help='number of classes in one test episode') parser.add_argument('--shot', type=int, default=1, help='number of support examples per training class') parser.add_argument('--shot_aug', '-shotaug', default=[], nargs='+', type=str, help='If use shot level data augmentation.') parser.add_argument('--s_du', type=int, default=1, help='number of support examples augmented by shot') parser.add_argument('--query', type=int, default=15, help='number of query examples per training class') parser.add_argument('--network', type=str, default='ProtoNet', help='choose which embedding network to use. ProtoNet, R2D2, ResNet') parser.add_argument('--head', type=str, default='ProtoNet', help='choose which embedding network to use. ProtoNet, Ridge, R2D2, SVM') parser.add_argument('--dataset', type=str, default='miniImageNet', help='choose which classification head to use. miniImageNet, tieredImageNet, CIFAR_FS, FC100') opt = parser.parse_args() (dataset_test, data_loader) = get_dataset(opt) dloader_test = data_loader( dataset=dataset_test, nKnovel=opt.way, nKbase=0, nExemplars=opt.shot, # num training examples per novel category nTestNovel=opt.query * opt.way, # num test examples for all the novel categories nTestBase=0, # num test examples for all the base categories batch_size=1, num_workers=1, epoch_size=opt.episode, # num of batches per epoch ) set_gpu(opt.gpu) # Define the models (embedding_net, cls_head) = get_model(opt) # Load saved model checkpoints saved_models = torch.load(opt.load) embedding_net.load_state_dict(saved_models['embedding']) embedding_net.eval() cls_head.load_state_dict(saved_models['head']) cls_head.eval() # Evaluate on test set test_accuracies = [] for i, batch in enumerate(tqdm(dloader_test()), 1): data_support, labels_support, data_query, labels_query, _, _ = [x.cuda() for x in batch] n_support = opt.way * opt.shot n_query = opt.way * opt.query for method in opt.shot_aug: data_support, labels_support, n_support = shot_aug(data_support, labels_support, n_support, method, opt) with torch.no_grad(): emb_support = embedding_net(data_support.reshape([-1] + list(data_support.shape[-3:]))) emb_support = emb_support.reshape(1, n_support, -1) emb_query = embedding_net(data_query.reshape([-1] + list(data_query.shape[-3:]))) emb_query = emb_query.reshape(1, n_query, -1) if opt.head == 'SVM': logits = cls_head(emb_query, emb_support, labels_support, opt.way, opt.shot, maxIter=3) else: logits = cls_head(emb_query, emb_support, labels_support, opt.way, opt.shot) acc = count_accuracy(logits.reshape(-1, opt.way), labels_query.reshape(-1)) test_accuracies.append(acc.item()) avg = np.mean(np.array(test_accuracies)) std = np.std(np.array(test_accuracies)) ci = std / np.sqrt(i + 1) if i % 50 == 0: print('Episode [{}/{}]:\t\t\tAccuracy: {:.2f} ± {:.2f} % ({:.2f} %)'\ .format(i, opt.episode, avg, ci, acc))
39.922449
166
0.630815
import argparse import torch import torch.nn.functional as F from torch.utils.data import DataLoader from torch.autograd import Variable from tqdm import tqdm from models.protonet_embedding import ProtoNetEmbedding from models.R2D2_embedding import R2D2Embedding from models.ResNet12_embedding import resnet12 from models.classification_heads import ClassificationHead, R2D2Head from utils import pprint, set_gpu, Timer, count_accuracy, log import random import numpy as np import os import pdb def get_model(options): if options.network == 'ProtoNet': network = ProtoNetEmbedding().cuda() elif options.network == 'R2D2': network = R2D2Embedding().cuda() elif options.network == 'ResNet': if options.dataset == 'miniImageNet' or options.dataset == 'tieredImageNet': network = resnet12(avg_pool=False, drop_rate=0.1, dropblock_size=5).cuda() network = torch.nn.DataParallel(network) else: network = resnet12(avg_pool=False, drop_rate=0.1, dropblock_size=2).cuda() network = torch.nn.DataParallel(network) else: print ("Cannot recognize the network type") assert(False) if opt.head == 'ProtoNet': cls_head = ClassificationHead(base_learner='ProtoNet').cuda() elif opt.head == 'Ridge': cls_head = ClassificationHead(base_learner='Ridge').cuda() elif opt.head == 'R2D2': cls_head = R2D2Head().cuda() elif opt.head == 'SVM': cls_head = ClassificationHead(base_learner='SVM-CS').cuda() else: print ("Cannot recognize the classification head type") assert(False) return (network, cls_head) def get_dataset(options): if options.dataset == 'miniImageNet': from data.mini_imagenet import MiniImageNet, FewShotDataloader dataset_test = MiniImageNet(phase='test') data_loader = FewShotDataloader elif options.dataset == 'tieredImageNet': from data.tiered_imagenet import tieredImageNet, FewShotDataloader dataset_test = tieredImageNet(phase='test') data_loader = FewShotDataloader elif options.dataset == 'CIFAR_FS': from data.CIFAR_FS import CIFAR_FS, FewShotDataloader dataset_test = CIFAR_FS(phase='test') data_loader = FewShotDataloader elif options.dataset == 'FC100': from data.FC100 import FC100, FewShotDataloader dataset_test = FC100(phase='test') data_loader = FewShotDataloader else: print ("Cannot recognize the dataset type") assert(False) return (dataset_test, data_loader) def self_mix(data): size = data.size() W = size[-1] H = size[-2] cx = np.random.randint(W) cy = np.random.randint(H) cut_w = W//2 cut_h = H//2 bbx1 = np.clip(cx - cut_w // 2, 0, W) bby1 = np.clip(cy - cut_h // 2, 0, H) bbx2 = np.clip(cx + cut_w // 2, 0, W) bby2 = np.clip(cy + cut_h // 2, 0, H) while True: bbxn = np.random.randint(0, W-(bbx2-bbx1)) bbyn = np.random.randint(0, H-(bby2-bby1)) if bbxn != bbx1 or bbyn != bby1: break if (bbx2 - bbx1) == (bby2 - bby1): k = random.sample([0, 1, 2, 3], 1)[0] else: k = 0 data[:, :, bbx1:bbx2, bby1:bby2] = torch.rot90(data[:, :, bbxn:bbxn + (bbx2-bbx1), bbyn:bbyn + (bby2-bby1)], k, [2,3]) return data def flip(x, dim): indices = [slice(None)] * x.dim() indices[dim] = torch.arange(x.size(dim) - 1, -1, -1, dtype=torch.long, device=x.device) return x[tuple(indices)] def build_grid(source_size,target_size): k = float(target_size)/float(source_size) direct = torch.linspace(-k,k,target_size).unsqueeze(0).repeat(target_size,1).unsqueeze(-1) full = torch.cat([direct,direct.transpose(1,0)],dim=2).unsqueeze(0) return full.cuda() def random_crop_grid(x,grid): delta = x.size(2)-grid.size(1) grid = grid.repeat(x.size(0),1,1,1).cuda() grid[:,:,:,0] = grid[:,:,:,0]+ torch.FloatTensor(x.size(0)).cuda().random_(0, delta).unsqueeze(-1).unsqueeze(-1).expand(-1, grid.size(1), grid.size(2)) /x.size(2) grid[:,:,:,1] = grid[:,:,:,1]+ torch.FloatTensor(x.size(0)).cuda().random_(0, delta).unsqueeze(-1).unsqueeze(-1).expand(-1, grid.size(1), grid.size(2)) /x.size(2) return grid def random_cropping(batch, t): grid_source = build_grid(batch.size(-1),t) grid_shifted = random_crop_grid(batch,grid_source) sampled_batch = F.grid_sample(batch, grid_shifted, mode='nearest') return sampled_batch def shot_aug(data_support, labels_support, n_support, method, opt): size = data_support.shape if method == "fliplr": n_support = opt.s_du * n_support data_shot = flip(data_support, -1) data_support = torch.cat((data_support, data_shot), dim = 1) labels_support = torch.cat((labels_support, labels_support), dim = 1) elif method == "random_crop": n_support = opt.s_du * n_support data_shot = F.pad(data_support.view([-1] + list(data_support.shape[-3:])), (4,4,4,4)) data_shot = random_cropping(data_shot, 32) data_support = torch.cat((data_support, data_shot.view([size[0], -1] + list(data_support.shape[-3:]))), dim = 1) labels_support = torch.cat((labels_support, labels_support), dim = 1) return data_support, labels_support, n_support if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--gpu', default='0') parser.add_argument('--load', default='./experiments/exp_1/best_model.pth', help='path of the checkpoint file') parser.add_argument('--episode', type=int, default=1000, help='number of episodes to test') parser.add_argument('--way', type=int, default=5, help='number of classes in one test episode') parser.add_argument('--shot', type=int, default=1, help='number of support examples per training class') parser.add_argument('--shot_aug', '-shotaug', default=[], nargs='+', type=str, help='If use shot level data augmentation.') parser.add_argument('--s_du', type=int, default=1, help='number of support examples augmented by shot') parser.add_argument('--query', type=int, default=15, help='number of query examples per training class') parser.add_argument('--network', type=str, default='ProtoNet', help='choose which embedding network to use. ProtoNet, R2D2, ResNet') parser.add_argument('--head', type=str, default='ProtoNet', help='choose which embedding network to use. ProtoNet, Ridge, R2D2, SVM') parser.add_argument('--dataset', type=str, default='miniImageNet', help='choose which classification head to use. miniImageNet, tieredImageNet, CIFAR_FS, FC100') opt = parser.parse_args() (dataset_test, data_loader) = get_dataset(opt) dloader_test = data_loader( dataset=dataset_test, nKnovel=opt.way, nKbase=0, nExemplars=opt.shot, nTestNovel=opt.query * opt.way, nTestBase=0, batch_size=1, num_workers=1, epoch_size=opt.episode, ) set_gpu(opt.gpu) (embedding_net, cls_head) = get_model(opt) saved_models = torch.load(opt.load) embedding_net.load_state_dict(saved_models['embedding']) embedding_net.eval() cls_head.load_state_dict(saved_models['head']) cls_head.eval() test_accuracies = [] for i, batch in enumerate(tqdm(dloader_test()), 1): data_support, labels_support, data_query, labels_query, _, _ = [x.cuda() for x in batch] n_support = opt.way * opt.shot n_query = opt.way * opt.query for method in opt.shot_aug: data_support, labels_support, n_support = shot_aug(data_support, labels_support, n_support, method, opt) with torch.no_grad(): emb_support = embedding_net(data_support.reshape([-1] + list(data_support.shape[-3:]))) emb_support = emb_support.reshape(1, n_support, -1) emb_query = embedding_net(data_query.reshape([-1] + list(data_query.shape[-3:]))) emb_query = emb_query.reshape(1, n_query, -1) if opt.head == 'SVM': logits = cls_head(emb_query, emb_support, labels_support, opt.way, opt.shot, maxIter=3) else: logits = cls_head(emb_query, emb_support, labels_support, opt.way, opt.shot) acc = count_accuracy(logits.reshape(-1, opt.way), labels_query.reshape(-1)) test_accuracies.append(acc.item()) avg = np.mean(np.array(test_accuracies)) std = np.std(np.array(test_accuracies)) ci = std / np.sqrt(i + 1) if i % 50 == 0: print('Episode [{}/{}]:\t\t\tAccuracy: {:.2f} ± {:.2f} % ({:.2f} %)'\ .format(i, opt.episode, avg, ci, acc))
true
true
f71cee5ae51aff0ff873d978caa5e27b2de22765
2,707
py
Python
ganggu/asynctask.py
xuecan/ganggu
f3d3727fc8228b899d2e2c7ebe99b9e4a9926a09
[ "MIT" ]
null
null
null
ganggu/asynctask.py
xuecan/ganggu
f3d3727fc8228b899d2e2c7ebe99b9e4a9926a09
[ "MIT" ]
null
null
null
ganggu/asynctask.py
xuecan/ganggu
f3d3727fc8228b899d2e2c7ebe99b9e4a9926a09
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright (C) 2012-2016 Xue Can <xuecan@gmail.com> and contributors. # Licensed under the MIT license: http://opensource.org/licenses/mit-license """ Celery 应用程序生成器 Celery 应用程序的配置众多,这里提供一个快速的生成器,避免经常需要查阅手册。 本模块根据 Celery 4.0.0rc4 重新编写。配置详情请参考: * http://docs.celeryproject.org/en/master/userguide/configuration.html """ import celery if '4.0.0' > celery.__version__: raise RuntimeError('Require celery 4.0.0rc4 or up') from celery import Celery from kombu.exceptions import OperationalError from .datastructures import Object def make_worker(name, set_as_current=True): """返回默认的 worker 实例,还需要进一步配置方可使用""" name = str(name) worker = Celery(name, set_as_current=set_as_current) worker.conf.update( # names task_default_queue=name, task_default_exchange=name, task_default_routing_key=name, # genenals accept_content=['json'], enable_utc=True, timezone='Asia/Shanghai', # tasks task_serializer='json', task_compression=None, task_protocol=2, task_track_started=True, task_publish_retry=False, # no retry # results result_serializer='json', result_compression=None, result_expires=3600, # 1 hour # workers worker_prefetch_multiplier=1, # no prefetch worker_disable_rate_limits=True, # no rate limit worker_max_tasks_per_child=1000, # prevent memory leak worker_hijack_root_logger=False # we have logkit ) return worker def with_retries(worker, max_=3, start=0, interval=0.2): worker.conf.update( task_publish_retry=True, task_publish_retry_policy={ 'max_retries': max_, 'interval_start': start, 'interval_step': interval, 'interval_max': interval, } ) def _with_broker(worker, broker, read_broker=None): if read_broker: worker.conf.broker_write_url = broker worker.conf.broker_read_url = read_broker else: worker.conf.broker_url = broker def with_amqp_broker(worker, broker, read_broker=None): worker.conf.task_queue_ha_policy = 'all' _with_broker(worker, broker, read_broker) def with_redis_broker(worker, broker, read_broker=None): worker.conf.broker_transport_options = { 'visibility_timeout': 3600, 'fanout_prefix': True, 'fanout_patterns': True, } _with_broker(worker, broker, read_broker) def with_backend(worker, backend): worker.conf.result_backend = backend # patch: don't use image import celery.utils.term celery.utils.term.supports_images = lambda: False
27.907216
76
0.674917
import celery if '4.0.0' > celery.__version__: raise RuntimeError('Require celery 4.0.0rc4 or up') from celery import Celery from kombu.exceptions import OperationalError from .datastructures import Object def make_worker(name, set_as_current=True): name = str(name) worker = Celery(name, set_as_current=set_as_current) worker.conf.update( task_default_queue=name, task_default_exchange=name, task_default_routing_key=name, accept_content=['json'], enable_utc=True, timezone='Asia/Shanghai', task_serializer='json', task_compression=None, task_protocol=2, task_track_started=True, task_publish_retry=False, result_serializer='json', result_compression=None, result_expires=3600, worker_prefetch_multiplier=1, worker_disable_rate_limits=True, worker_max_tasks_per_child=1000, worker_hijack_root_logger=False ) return worker def with_retries(worker, max_=3, start=0, interval=0.2): worker.conf.update( task_publish_retry=True, task_publish_retry_policy={ 'max_retries': max_, 'interval_start': start, 'interval_step': interval, 'interval_max': interval, } ) def _with_broker(worker, broker, read_broker=None): if read_broker: worker.conf.broker_write_url = broker worker.conf.broker_read_url = read_broker else: worker.conf.broker_url = broker def with_amqp_broker(worker, broker, read_broker=None): worker.conf.task_queue_ha_policy = 'all' _with_broker(worker, broker, read_broker) def with_redis_broker(worker, broker, read_broker=None): worker.conf.broker_transport_options = { 'visibility_timeout': 3600, 'fanout_prefix': True, 'fanout_patterns': True, } _with_broker(worker, broker, read_broker) def with_backend(worker, backend): worker.conf.result_backend = backend import celery.utils.term celery.utils.term.supports_images = lambda: False
true
true
f71ceecd29d1f9ce5d1bf239127cbd90242a62b2
7,255
py
Python
src/layers/xfmr.py
uw-bionlp/ards
e9fc27f7034cc6b54f0ccdba4a58377948cf0258
[ "BSD-3-Clause" ]
null
null
null
src/layers/xfmr.py
uw-bionlp/ards
e9fc27f7034cc6b54f0ccdba4a58377948cf0258
[ "BSD-3-Clause" ]
null
null
null
src/layers/xfmr.py
uw-bionlp/ards
e9fc27f7034cc6b54f0ccdba4a58377948cf0258
[ "BSD-3-Clause" ]
null
null
null
import torch from tqdm import tqdm from transformers import AutoTokenizer, AutoModel import logging from torch.nn import ConstantPad3d, ConstantPad2d from layers.utils import set_model_device, set_tensor_device ''' tutorial4 tokenization https://mccormickml.com/2019/07/22/BERT-fine-tuning/ how to use clinical bert https://huggingface.co/emilyalsentzer/Bio_ClinicalBERT align ng character offsets with bert tokenization https://github.com/LightTag/sequence-labeling-with-transformers/blob/master/sequence_aligner/dataset.py ''' INPUT_IDS = 'input_ids' ATTENTION_MASK = 'attention_mask' OFFSET_MAPPING = 'offset_mapping' PRETRAINED = "emilyalsentzer/Bio_ClinicalBERT" def tokenize_documents(documents, \ pretrained=PRETRAINED, add_special_tokens=True, max_length=50, return_attention_mask=True, return_tensors='pt', return_offsets_mapping=True, is_split_into_words=False ): logging.info("Tokenization using AutoTokenizer") # Instantiate tokenizer tokenizer = AutoTokenizer.from_pretrained(pretrained) # Tokenize all of the sentences and map the tokens to thier word IDs. input_ids = [] mask = [] offsets = [] pbar = tqdm(total=len(documents)) for i, text in enumerate(documents): # `encode_plus` will: # (1) Tokenize the sentence. # (2) Prepend the `[CLS]` token to the start. # (3) Append the `[SEP]` token to the end. # (4) Map tokens to their IDs. # (5) Pad or truncate the sentence to `max_length` # (6) Create attention masks for [PAD] tokens. encoded_dict = tokenizer.batch_encode_plus( text, # Sentence to encode. add_special_tokens = add_special_tokens, # Add '[CLS]' and '[SEP]' max_length = max_length, # Pad & truncate all sentences. padding = 'max_length', truncation = True, return_attention_mask = return_attention_mask, # Construct attn. masks. return_tensors = return_tensors, # Return pytorch tensors. return_offsets_mapping = return_offsets_mapping, is_split_into_words = is_split_into_words) input_ids.append(encoded_dict[INPUT_IDS]) mask.append(encoded_dict[ATTENTION_MASK]) offsets_ = encoded_dict[OFFSET_MAPPING].tolist() offsets_ = [[tuple(token) for token in sentence] for sentence in offsets_] offsets.append(offsets_) if i == 0: logging.info("-"*80) logging.info("") logging.info("Returned params:\n{}".format(encoded_dict.keys())) logging.info("") logging.info('Input:\n{}'.format(text)) logging.info("") #logging.info('IDs: {}\n{}'.format(input_ids[0].shape, input_ids[0])) logging.info('IDs: {}'.format(input_ids[0].shape)) logging.info("") #logging.info('Attn: {}\n{}'.format(mask[0].shape, mask[0])) logging.info('Attn: {}'.format(mask[0].shape)) wps = [tokenizer.convert_ids_to_tokens(ids_) for ids_ in input_ids[0].squeeze()] logging.info("") logging.info('Tok:\n') for wps_ in wps[:10]: logging.info(f'{wps_[:10]} ....') #logging.info("") #logging.info('Idx:\n{}'.format(offsets[0])) #logging.info("") #logging.info("-"*80) pbar.update() pbar.close() logging.info("") logging.info('Document count: {}'.format(len(input_ids))) logging.info("") return (input_ids, mask, offsets) def encode_documents(input_ids, mask, \ pretrained=PRETRAINED, device=None, train=False): logging.info("Embedding using AutoModel") model = AutoModel.from_pretrained(pretrained) if train: model.train() else: model.eval() set_model_device(model, device) X = [] masks = [] pbar = tqdm(total=len(input_ids)) assert len(input_ids) == len(mask) for i, (ids, msk) in enumerate(zip(input_ids, mask)): ids = set_tensor_device(ids, device) msk = set_tensor_device(msk, device) x = model( \ ids, token_type_ids=None, attention_mask=msk)[0] x = x.cpu().detach() X.append(x) if i == 1: logging.info("Encode documents") #logging.info("-"*80) #logging.info("") #logging.info('IDs: {}\n{}'.format(ids.shape, ids)) logging.info('IDs: {}'.format(ids.shape)) #logging.info("") #logging.info('Mask: {}\n{}'.format(msk.shape, msk)) logging.info('Mask: {}'.format(msk.shape)) #logging.info("") #logging.info('X: {}\n{}'.format(x.shape, x)) logging.info('X: {}'.format(x.shape)) logging.info('') #logging.info("") #logging.info("-"*80) pbar.update() pbar.close() logging.info("") logging.info('Document count: {}'.format(len(X))) logging.info("") return X def char2wordpiece(start, end, offsets): ''' convert character indices to word piece indices (i.e. document) Parameters ---------- char_indices: character indices for span offsets: offsets returned by transformer tokenizer Returns ------- word_indices: word piece indices for spans ''' start_new = -1 end_new = -1 for index, (start_word, end_word) in enumerate(offsets): # start_word = character index of word piece start (inclusive) # end_word = character index of word piece end (exclusive) # index = index of word peice in sentence if (start_new == -1) and \ (start >= start_word) and \ (start < end_word): start_new = index if (end_new == -1) and \ (end > start_word) and \ (end <= end_word): # add one so end_new is exclusive end_new = index + 1 assert start_new != -1 assert end_new != -1 return (start_new, end_new) def wordpiece2char(start, end, offsets): ''' convert word piece indices to character indices for sequence of sentences (i.e. document) Parameters ---------- word_indices: word piece indices for spans offsets: offsets returned by transformer tokenizer Returns ------- char_indices: character indices per spans ''' indices = offsets[start:end] # character index of start start_new = indices[0][0] # character index of end end_new = indices[-1][-1] return (start_new, end_new) def demo(): #loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict] #for logger in loggers:# # logger.setLevel(logging.info) documents = [['patient is reporting fever and cough.', 'chest x re indicates bilateral infile traits'], ['diffuse lung disease', 'reporting position is addr']] tokens = tokenize_documents(documents, max_length=19) embedding = encode_documents(tokens)
26.478102
107
0.596554
import torch from tqdm import tqdm from transformers import AutoTokenizer, AutoModel import logging from torch.nn import ConstantPad3d, ConstantPad2d from layers.utils import set_model_device, set_tensor_device INPUT_IDS = 'input_ids' ATTENTION_MASK = 'attention_mask' OFFSET_MAPPING = 'offset_mapping' PRETRAINED = "emilyalsentzer/Bio_ClinicalBERT" def tokenize_documents(documents, \ pretrained=PRETRAINED, add_special_tokens=True, max_length=50, return_attention_mask=True, return_tensors='pt', return_offsets_mapping=True, is_split_into_words=False ): logging.info("Tokenization using AutoTokenizer") tokenizer = AutoTokenizer.from_pretrained(pretrained) input_ids = [] mask = [] offsets = [] pbar = tqdm(total=len(documents)) for i, text in enumerate(documents): encoded_dict = tokenizer.batch_encode_plus( text, add_special_tokens = add_special_tokens, max_length = max_length, padding = 'max_length', truncation = True, return_attention_mask = return_attention_mask, return_tensors = return_tensors, return_offsets_mapping = return_offsets_mapping, is_split_into_words = is_split_into_words) input_ids.append(encoded_dict[INPUT_IDS]) mask.append(encoded_dict[ATTENTION_MASK]) offsets_ = encoded_dict[OFFSET_MAPPING].tolist() offsets_ = [[tuple(token) for token in sentence] for sentence in offsets_] offsets.append(offsets_) if i == 0: logging.info("-"*80) logging.info("") logging.info("Returned params:\n{}".format(encoded_dict.keys())) logging.info("") logging.info('Input:\n{}'.format(text)) logging.info("") logging.info('IDs: {}'.format(input_ids[0].shape)) logging.info("") logging.info('Attn: {}'.format(mask[0].shape)) wps = [tokenizer.convert_ids_to_tokens(ids_) for ids_ in input_ids[0].squeeze()] logging.info("") logging.info('Tok:\n') for wps_ in wps[:10]: logging.info(f'{wps_[:10]} ....') pbar.update() pbar.close() logging.info("") logging.info('Document count: {}'.format(len(input_ids))) logging.info("") return (input_ids, mask, offsets) def encode_documents(input_ids, mask, \ pretrained=PRETRAINED, device=None, train=False): logging.info("Embedding using AutoModel") model = AutoModel.from_pretrained(pretrained) if train: model.train() else: model.eval() set_model_device(model, device) X = [] masks = [] pbar = tqdm(total=len(input_ids)) assert len(input_ids) == len(mask) for i, (ids, msk) in enumerate(zip(input_ids, mask)): ids = set_tensor_device(ids, device) msk = set_tensor_device(msk, device) x = model( \ ids, token_type_ids=None, attention_mask=msk)[0] x = x.cpu().detach() X.append(x) if i == 1: logging.info("Encode documents") logging.info('IDs: {}'.format(ids.shape)) logging.info('Mask: {}'.format(msk.shape)) logging.info('X: {}'.format(x.shape)) logging.info('') pbar.update() pbar.close() logging.info("") logging.info('Document count: {}'.format(len(X))) logging.info("") return X def char2wordpiece(start, end, offsets): start_new = -1 end_new = -1 for index, (start_word, end_word) in enumerate(offsets): if (start_new == -1) and \ (start >= start_word) and \ (start < end_word): start_new = index if (end_new == -1) and \ (end > start_word) and \ (end <= end_word): end_new = index + 1 assert start_new != -1 assert end_new != -1 return (start_new, end_new) def wordpiece2char(start, end, offsets): indices = offsets[start:end] start_new = indices[0][0] end_new = indices[-1][-1] return (start_new, end_new) def demo(): documents = [['patient is reporting fever and cough.', 'chest x re indicates bilateral infile traits'], ['diffuse lung disease', 'reporting position is addr']] tokens = tokenize_documents(documents, max_length=19) embedding = encode_documents(tokens)
true
true
f71ceed56868a5e9294f76930727f0329ca98560
113
py
Python
docs/docs_app/not_found.py
glsdown/dash-loading-spinners
5fdfe9fc439b6c7aa624c23fb72123b785c9de8e
[ "MIT" ]
14
2021-06-21T16:34:20.000Z
2022-02-25T21:42:30.000Z
docs/docs_app/not_found.py
glsdown/dash-loading-spinners
5fdfe9fc439b6c7aa624c23fb72123b785c9de8e
[ "MIT" ]
null
null
null
docs/docs_app/not_found.py
glsdown/dash-loading-spinners
5fdfe9fc439b6c7aa624c23fb72123b785c9de8e
[ "MIT" ]
null
null
null
import dash_bootstrap_components as dbc layout = dbc.Jumbotron(["404 - Not Found"], className="h4 text-danger")
28.25
71
0.761062
import dash_bootstrap_components as dbc layout = dbc.Jumbotron(["404 - Not Found"], className="h4 text-danger")
true
true
f71ceed8923c4307e3f9f61c40b8ca31ec7f9d14
3,034
py
Python
MxShop/extra_apps/xadmin/views/website.py
youshuad/django-vue-shop
dbede2301b10cb95ef30d0bbbbd594b240071fc1
[ "MIT" ]
null
null
null
MxShop/extra_apps/xadmin/views/website.py
youshuad/django-vue-shop
dbede2301b10cb95ef30d0bbbbd594b240071fc1
[ "MIT" ]
null
null
null
MxShop/extra_apps/xadmin/views/website.py
youshuad/django-vue-shop
dbede2301b10cb95ef30d0bbbbd594b240071fc1
[ "MIT" ]
null
null
null
from __future__ import absolute_import from django.utils.translation import ugettext as _ from django.contrib.auth import REDIRECT_FIELD_NAME from django.views.decorators.cache import never_cache # from django.contrib.auth import login,logout,authenticate from django.contrib.auth.views import LoginView as login from django.contrib.auth.views import LogoutView as logout from django.contrib.auth.forms import UserCreationForm from django.http import HttpResponse from .base import BaseAdminView, filter_hook from .dashboard import Dashboard from xadmin.forms import AdminAuthenticationForm from xadmin.models import UserSettings from xadmin.layout import FormHelper class IndexView(Dashboard): title = _("Main Dashboard") icon = "fa fa-dashboard" def get_page_id(self): return 'home' class UserSettingView(BaseAdminView): @never_cache def post(self, request): key = request.POST['key'] val = request.POST['value'] us, created = UserSettings.objects.get_or_create( user=self.user, key=key) us.value = val us.save() return HttpResponse('') class LoginView(BaseAdminView): title = _("Please Login") login_form = None login_template = None @filter_hook def update_params(self, defaults): pass @never_cache def get(self, request, *args, **kwargs): context = self.get_context() helper = FormHelper() helper.form_tag = False helper.include_media = False context.update({ 'title': self.title, 'helper': helper, 'app_path': request.get_full_path(), REDIRECT_FIELD_NAME: request.get_full_path(), }) defaults = { 'extra_context': context, # 'current_app': self.admin_site.name, 'authentication_form': self.login_form or AdminAuthenticationForm, 'template_name': self.login_template or 'xadmin/views/login.html', } self.update_params(defaults) # return login(request, **defaults) return login.as_view(**defaults)(request) @never_cache def post(self, request, *args, **kwargs): return self.get(request) class LogoutView(BaseAdminView): logout_template = None need_site_permission = False @filter_hook def update_params(self, defaults): pass @never_cache def get(self, request, *args, **kwargs): context = self.get_context() defaults = { 'extra_context': context, # 'current_app': self.admin_site.name, 'template_name': self.logout_template or 'xadmin/views/logged_out.html', } if self.logout_template is not None: defaults['template_name'] = self.logout_template self.update_params(defaults) # return logout(request, **defaults) return logout.as_view(**defaults)(request) @never_cache def post(self, request, *args, **kwargs): return self.get(request)
29.456311
84
0.66381
from __future__ import absolute_import from django.utils.translation import ugettext as _ from django.contrib.auth import REDIRECT_FIELD_NAME from django.views.decorators.cache import never_cache from django.contrib.auth.views import LoginView as login from django.contrib.auth.views import LogoutView as logout from django.contrib.auth.forms import UserCreationForm from django.http import HttpResponse from .base import BaseAdminView, filter_hook from .dashboard import Dashboard from xadmin.forms import AdminAuthenticationForm from xadmin.models import UserSettings from xadmin.layout import FormHelper class IndexView(Dashboard): title = _("Main Dashboard") icon = "fa fa-dashboard" def get_page_id(self): return 'home' class UserSettingView(BaseAdminView): @never_cache def post(self, request): key = request.POST['key'] val = request.POST['value'] us, created = UserSettings.objects.get_or_create( user=self.user, key=key) us.value = val us.save() return HttpResponse('') class LoginView(BaseAdminView): title = _("Please Login") login_form = None login_template = None @filter_hook def update_params(self, defaults): pass @never_cache def get(self, request, *args, **kwargs): context = self.get_context() helper = FormHelper() helper.form_tag = False helper.include_media = False context.update({ 'title': self.title, 'helper': helper, 'app_path': request.get_full_path(), REDIRECT_FIELD_NAME: request.get_full_path(), }) defaults = { 'extra_context': context, 'authentication_form': self.login_form or AdminAuthenticationForm, 'template_name': self.login_template or 'xadmin/views/login.html', } self.update_params(defaults) return login.as_view(**defaults)(request) @never_cache def post(self, request, *args, **kwargs): return self.get(request) class LogoutView(BaseAdminView): logout_template = None need_site_permission = False @filter_hook def update_params(self, defaults): pass @never_cache def get(self, request, *args, **kwargs): context = self.get_context() defaults = { 'extra_context': context, 'template_name': self.logout_template or 'xadmin/views/logged_out.html', } if self.logout_template is not None: defaults['template_name'] = self.logout_template self.update_params(defaults) return logout.as_view(**defaults)(request) @never_cache def post(self, request, *args, **kwargs): return self.get(request)
true
true
f71ceefdeb70227f2eeec78efabe0c05cba65a6e
5,151
py
Python
docs-src/source/conf.py
daxnet/apworks-core
29347f7fe93fa547c3cdfdbf5eec31fc1a4dcb32
[ "Apache-2.0" ]
212
2017-02-25T07:56:35.000Z
2022-03-29T01:58:48.000Z
docs-src/source/conf.py
dahaoniuniu/apworks-core
580492f0d03633a4a1099b44dd967a06d8fb8308
[ "Apache-2.0" ]
7
2017-04-08T14:06:49.000Z
2021-03-30T12:58:06.000Z
docs-src/source/conf.py
dahaoniuniu/apworks-core
580492f0d03633a4a1099b44dd967a06d8fb8308
[ "Apache-2.0" ]
59
2017-04-03T06:47:10.000Z
2021-08-18T05:40:47.000Z
# -*- coding: utf-8 -*- # # Apworks documentation build configuration file, created by # sphinx-quickstart on Sat Mar 25 15:41:49 2017. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Apworks' copyright = u'2017, Sunny Chen' author = u'Sunny Chen' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = u'1.0' # The full version, including alpha/beta/rc tags. release = u'1.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'en,zh_CN' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'Apworksdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'Apworks.tex', u'Apworks Documentation', u'Sunny Chen', 'manual'), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'apworks', u'Apworks Documentation', [author], 1) ] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'Apworks', u'Apworks Documentation', author, 'Apworks', 'One line description of project.', 'Miscellaneous'), ] # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = project epub_author = author epub_publisher = author epub_copyright = copyright # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html']
28.938202
79
0.677732
extensions = [] templates_path = ['_templates'] source_suffix = '.rst' master_doc = 'index' project = u'Apworks' copyright = u'2017, Sunny Chen' author = u'Sunny Chen' # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = u'1.0' # The full version, including alpha/beta/rc tags. release = u'1.0.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = 'en,zh_CN' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'Apworksdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'Apworks.tex', u'Apworks Documentation', u'Sunny Chen', 'manual'), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'apworks', u'Apworks Documentation', [author], 1) ] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'Apworks', u'Apworks Documentation', author, 'Apworks', 'One line description of project.', 'Miscellaneous'), ] # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = project epub_author = author epub_publisher = author epub_copyright = copyright # The unique identifier of the text. This can be a ISBN number # or the project homepage. # # epub_identifier = '' # A unique identification for the text. # # epub_uid = '' # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html']
true
true
f71cf026b7d351eb49d776725321c404f4254c2b
1,032
py
Python
proxy_client.py
AcidCannon/CMPUT404.W2021.LAB2
cb680f98bc625415a4fb25bc091e802ba05df238
[ "Apache-2.0" ]
null
null
null
proxy_client.py
AcidCannon/CMPUT404.W2021.LAB2
cb680f98bc625415a4fb25bc091e802ba05df238
[ "Apache-2.0" ]
null
null
null
proxy_client.py
AcidCannon/CMPUT404.W2021.LAB2
cb680f98bc625415a4fb25bc091e802ba05df238
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 import socket # CONSTANTS OUTBOUND_HOST = "127.0.0.1" OUTBOUND_PORT = 8001 OUTBOUND_BUFFER_SIZE = 1024 PAYLOAD_URL = "www.google.com" PAYLOAD = f"GET / HTTP/1.0\r\nHost: {PAYLOAD_URL}\r\n\r\n" def main(): # Create a socket object with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: # Connect to the proxy server s.connect((OUTBOUND_HOST, OUTBOUND_PORT)) # Send the payload to the proxy server s.sendall(PAYLOAD.encode()) # Get IP and port of peer peer_addr = s.getpeername() # No longer write/send s.shutdown(socket.SHUT_WR) # Reading data until no more left data = b"" while True: fetched_data = s.recv(OUTBOUND_BUFFER_SIZE) if not fetched_data: break data += fetched_data print("Received From:", str(peer_addr[0]) + ":" + str(peer_addr[1]), "Content:", data) if __name__ == "__main__": main()
27.157895
95
0.591085
import socket OUTBOUND_HOST = "127.0.0.1" OUTBOUND_PORT = 8001 OUTBOUND_BUFFER_SIZE = 1024 PAYLOAD_URL = "www.google.com" PAYLOAD = f"GET / HTTP/1.0\r\nHost: {PAYLOAD_URL}\r\n\r\n" def main(): with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.connect((OUTBOUND_HOST, OUTBOUND_PORT)) s.sendall(PAYLOAD.encode()) peer_addr = s.getpeername() s.shutdown(socket.SHUT_WR) data = b"" while True: fetched_data = s.recv(OUTBOUND_BUFFER_SIZE) if not fetched_data: break data += fetched_data print("Received From:", str(peer_addr[0]) + ":" + str(peer_addr[1]), "Content:", data) if __name__ == "__main__": main()
true
true
f71cf03658518edaba3cbd0aee36d8141f9b1311
11,097
py
Python
src/olympia/users/forms.py
Rob--W/addons-server
cc104705e17ddeeb57254403ed292acb904a9a41
[ "BSD-3-Clause" ]
1
2020-04-07T07:21:25.000Z
2020-04-07T07:21:25.000Z
src/olympia/users/forms.py
Rob--W/addons-server
cc104705e17ddeeb57254403ed292acb904a9a41
[ "BSD-3-Clause" ]
null
null
null
src/olympia/users/forms.py
Rob--W/addons-server
cc104705e17ddeeb57254403ed292acb904a9a41
[ "BSD-3-Clause" ]
2
2018-03-04T00:11:22.000Z
2019-12-14T09:45:55.000Z
import os import re from django import forms from django.conf import settings from django.core.files.storage import default_storage as storage from django.utils.translation import ugettext as _, ugettext_lazy as _lazy import commonware.log from olympia import amo from olympia.accounts.views import fxa_error_message from olympia.amo.fields import HttpHttpsOnlyURLField from olympia.users import notifications from olympia.amo.utils import clean_nl, has_links, slug_validator from olympia.lib import happyforms from olympia.translations import LOCALES from . import tasks from .models import ( UserProfile, UserNotification, BlacklistedName) from .widgets import ( NotificationsSelectMultiple, RequiredCheckboxInput, RequiredEmailInput, RequiredTextarea) log = commonware.log.getLogger('z.users') admin_re = re.compile('(?=.*\d)(?=.*[a-zA-Z])') class UserDeleteForm(forms.Form): email = forms.CharField(max_length=255, required=True, widget=RequiredEmailInput) confirm = forms.BooleanField(required=True, widget=RequiredCheckboxInput) def __init__(self, *args, **kwargs): self.request = kwargs.pop('request', None) super(UserDeleteForm, self).__init__(*args, **kwargs) self.fields['email'].widget.attrs['placeholder'] = ( self.request.user.email) def clean_email(self): user_email = self.request.user.email if not user_email == self.cleaned_data['email']: raise forms.ValidationError(_('Email must be {email}.').format( email=user_email)) def clean(self): amouser = self.request.user if amouser.is_developer: # This is tampering because the form isn't shown on the page if the # user is a developer log.warning(u'[Tampering] Attempt to delete developer account (%s)' % self.request.user) raise forms.ValidationError("") class UserEditForm(happyforms.ModelForm): username = forms.CharField(max_length=50, required=False) display_name = forms.CharField(label=_lazy(u'Display Name'), max_length=50, required=False) location = forms.CharField(label=_lazy(u'Location'), max_length=100, required=False) occupation = forms.CharField(label=_lazy(u'Occupation'), max_length=100, required=False) homepage = HttpHttpsOnlyURLField(label=_lazy(u'Homepage'), required=False) email = forms.EmailField( required=False, help_text=fxa_error_message( _(u'Firefox Accounts users cannot currently change their email ' u'address.')), widget=forms.EmailInput(attrs={'readonly': 'readonly'})) photo = forms.FileField(label=_lazy(u'Profile Photo'), required=False) notifications = forms.MultipleChoiceField( choices=[], widget=NotificationsSelectMultiple, initial=notifications.NOTIFICATIONS_DEFAULT, required=False) lang = forms.TypedChoiceField(label=_lazy(u'Default locale'), choices=LOCALES) def __init__(self, *args, **kwargs): self.request = kwargs.pop('request', None) instance = kwargs.get('instance') if instance and instance.has_anonymous_username(): kwargs.setdefault('initial', {}) kwargs['initial']['username'] = '' super(UserEditForm, self).__init__(*args, **kwargs) errors = {'invalid': _('This URL has an invalid format. ' 'Valid URLs look like ' 'http://example.com/my_page.')} self.fields['homepage'].error_messages = errors if not self.instance.lang and self.request: self.initial['lang'] = self.request.LANG if self.instance: default = dict((i, n.default_checked) for i, n in notifications.NOTIFICATIONS_BY_ID.items()) user = dict((n.notification_id, n.enabled) for n in self.instance.notifications.all()) default.update(user) # Add choices to Notification. choices = notifications.NOTIFICATIONS_CHOICES if not self.instance.is_developer: choices = notifications.NOTIFICATIONS_CHOICES_NOT_DEV # Append a "NEW" message to new notification options. saved = self.instance.notifications.values_list('notification_id', flat=True) self.choices_status = {} for idx, label in choices: self.choices_status[idx] = idx not in saved self.fields['notifications'].choices = choices self.fields['notifications'].initial = [i for i, v in default.items() if v] self.fields['notifications'].widget.form_instance = self class Meta: model = UserProfile fields = ( 'username', 'email', 'display_name', 'location', 'occupation', 'homepage', 'photo', 'lang', 'bio', 'display_collections', 'display_collections_fav', 'notifications', ) def clean_username(self): name = self.cleaned_data['username'] if not name: if self.instance.has_anonymous_username(): name = self.instance.username else: name = self.instance.anonymize_username() # All-digits usernames are disallowed since they can be # confused for user IDs in URLs. (See bug 862121.) if name.isdigit(): raise forms.ValidationError( _('Usernames cannot contain only digits.')) slug_validator( name, lower=False, message=_('Enter a valid username consisting of letters, numbers, ' 'underscores or hyphens.')) if BlacklistedName.blocked(name): raise forms.ValidationError(_('This username cannot be used.')) # FIXME: Bug 858452. Remove this check when collation of the username # column is changed to case insensitive. if (UserProfile.objects.exclude(id=self.instance.id) .filter(username__iexact=name).exists()): raise forms.ValidationError(_('This username is already in use.')) return name def clean_display_name(self): name = self.cleaned_data['display_name'] if BlacklistedName.blocked(name): raise forms.ValidationError(_('This display name cannot be used.')) return name def clean_email(self): # TODO(django 1.9): Change the field to disabled=True and remove this. return self.instance.email def clean_photo(self): photo = self.cleaned_data['photo'] if not photo: return if photo.content_type not in ('image/png', 'image/jpeg'): raise forms.ValidationError( _('Images must be either PNG or JPG.')) if photo.size > settings.MAX_PHOTO_UPLOAD_SIZE: raise forms.ValidationError( _('Please use images smaller than %dMB.' % (settings.MAX_PHOTO_UPLOAD_SIZE / 1024 / 1024 - 1))) return photo def clean_bio(self): bio = self.cleaned_data['bio'] normalized = clean_nl(unicode(bio)) if has_links(normalized): # There's some links, we don't want them. raise forms.ValidationError(_('No links are allowed.')) return bio def save(self, log_for_developer=True): u = super(UserEditForm, self).save(commit=False) data = self.cleaned_data photo = data['photo'] if photo: u.picture_type = 'image/png' tmp_destination = u.picture_path + '__unconverted' with storage.open(tmp_destination, 'wb') as fh: for chunk in photo.chunks(): fh.write(chunk) tasks.resize_photo.delay(tmp_destination, u.picture_path, set_modified_on=[u]) for (i, n) in notifications.NOTIFICATIONS_BY_ID.items(): enabled = n.mandatory or (str(i) in data['notifications']) UserNotification.update_or_create( user=u, notification_id=i, update={'enabled': enabled}) log.debug(u'User (%s) updated their profile' % u) u.save() return u class AdminUserEditForm(UserEditForm): """This is the form used by admins to edit users' info.""" email = forms.EmailField(widget=RequiredEmailInput) admin_log = forms.CharField(required=True, label='Reason for change', widget=RequiredTextarea(attrs={'rows': 4})) notes = forms.CharField(required=False, label='Notes', widget=forms.Textarea(attrs={'rows': 4})) anonymize = forms.BooleanField(required=False) def changed_fields(self): """Returns changed_data ignoring these fields.""" return (set(self.changed_data) - set(['admin_log', 'notifications', 'photo'])) def changes(self): """A dictionary of changed fields, old, new.""" details = dict([(k, (self.initial[k], self.cleaned_data[k])) for k in self.changed_fields()]) return details def clean_anonymize(self): if (self.cleaned_data['anonymize'] and self.changed_fields() != set(['anonymize'])): raise forms.ValidationError(_('To anonymize, enter a reason for' ' the change but do not change any' ' other field.')) return self.cleaned_data['anonymize'] def clean_email(self): return self.cleaned_data['email'] def save(self, *args, **kw): profile = super(AdminUserEditForm, self).save(log_for_developer=False) if self.cleaned_data['anonymize']: amo.log(amo.LOG.ADMIN_USER_ANONYMIZED, self.instance, self.cleaned_data['admin_log']) profile.anonymize() # This also logs else: amo.log(amo.LOG.ADMIN_USER_EDITED, self.instance, self.cleaned_data['admin_log'], details=self.changes()) log.info('Admin edit user: %s changed fields: %s' % (self.instance, self.changed_fields())) return profile class BlacklistedNameAddForm(forms.Form): """Form for adding blacklisted names in bulk fashion.""" names = forms.CharField(widget=forms.Textarea( attrs={'cols': 40, 'rows': 16})) def clean_names(self): names = self.cleaned_data['names'].strip() if not names: raise forms.ValidationError( _('Please enter at least one name to blacklist.')) names = os.linesep.join( [s.strip() for s in names.splitlines() if s.strip()]) return names
39.212014
79
0.605119
import os import re from django import forms from django.conf import settings from django.core.files.storage import default_storage as storage from django.utils.translation import ugettext as _, ugettext_lazy as _lazy import commonware.log from olympia import amo from olympia.accounts.views import fxa_error_message from olympia.amo.fields import HttpHttpsOnlyURLField from olympia.users import notifications from olympia.amo.utils import clean_nl, has_links, slug_validator from olympia.lib import happyforms from olympia.translations import LOCALES from . import tasks from .models import ( UserProfile, UserNotification, BlacklistedName) from .widgets import ( NotificationsSelectMultiple, RequiredCheckboxInput, RequiredEmailInput, RequiredTextarea) log = commonware.log.getLogger('z.users') admin_re = re.compile('(?=.*\d)(?=.*[a-zA-Z])') class UserDeleteForm(forms.Form): email = forms.CharField(max_length=255, required=True, widget=RequiredEmailInput) confirm = forms.BooleanField(required=True, widget=RequiredCheckboxInput) def __init__(self, *args, **kwargs): self.request = kwargs.pop('request', None) super(UserDeleteForm, self).__init__(*args, **kwargs) self.fields['email'].widget.attrs['placeholder'] = ( self.request.user.email) def clean_email(self): user_email = self.request.user.email if not user_email == self.cleaned_data['email']: raise forms.ValidationError(_('Email must be {email}.').format( email=user_email)) def clean(self): amouser = self.request.user if amouser.is_developer: # user is a developer log.warning(u'[Tampering] Attempt to delete developer account (%s)' % self.request.user) raise forms.ValidationError("") class UserEditForm(happyforms.ModelForm): username = forms.CharField(max_length=50, required=False) display_name = forms.CharField(label=_lazy(u'Display Name'), max_length=50, required=False) location = forms.CharField(label=_lazy(u'Location'), max_length=100, required=False) occupation = forms.CharField(label=_lazy(u'Occupation'), max_length=100, required=False) homepage = HttpHttpsOnlyURLField(label=_lazy(u'Homepage'), required=False) email = forms.EmailField( required=False, help_text=fxa_error_message( _(u'Firefox Accounts users cannot currently change their email ' u'address.')), widget=forms.EmailInput(attrs={'readonly': 'readonly'})) photo = forms.FileField(label=_lazy(u'Profile Photo'), required=False) notifications = forms.MultipleChoiceField( choices=[], widget=NotificationsSelectMultiple, initial=notifications.NOTIFICATIONS_DEFAULT, required=False) lang = forms.TypedChoiceField(label=_lazy(u'Default locale'), choices=LOCALES) def __init__(self, *args, **kwargs): self.request = kwargs.pop('request', None) instance = kwargs.get('instance') if instance and instance.has_anonymous_username(): kwargs.setdefault('initial', {}) kwargs['initial']['username'] = '' super(UserEditForm, self).__init__(*args, **kwargs) errors = {'invalid': _('This URL has an invalid format. ' 'Valid URLs look like ' 'http://example.com/my_page.')} self.fields['homepage'].error_messages = errors if not self.instance.lang and self.request: self.initial['lang'] = self.request.LANG if self.instance: default = dict((i, n.default_checked) for i, n in notifications.NOTIFICATIONS_BY_ID.items()) user = dict((n.notification_id, n.enabled) for n in self.instance.notifications.all()) default.update(user) # Add choices to Notification. choices = notifications.NOTIFICATIONS_CHOICES if not self.instance.is_developer: choices = notifications.NOTIFICATIONS_CHOICES_NOT_DEV # Append a "NEW" message to new notification options. saved = self.instance.notifications.values_list('notification_id', flat=True) self.choices_status = {} for idx, label in choices: self.choices_status[idx] = idx not in saved self.fields['notifications'].choices = choices self.fields['notifications'].initial = [i for i, v in default.items() if v] self.fields['notifications'].widget.form_instance = self class Meta: model = UserProfile fields = ( 'username', 'email', 'display_name', 'location', 'occupation', 'homepage', 'photo', 'lang', 'bio', 'display_collections', 'display_collections_fav', 'notifications', ) def clean_username(self): name = self.cleaned_data['username'] if not name: if self.instance.has_anonymous_username(): name = self.instance.username else: name = self.instance.anonymize_username() # All-digits usernames are disallowed since they can be # confused for user IDs in URLs. (See bug 862121.) if name.isdigit(): raise forms.ValidationError( _('Usernames cannot contain only digits.')) slug_validator( name, lower=False, message=_('Enter a valid username consisting of letters, numbers, ' 'underscores or hyphens.')) if BlacklistedName.blocked(name): raise forms.ValidationError(_('This username cannot be used.')) # FIXME: Bug 858452. Remove this check when collation of the username # column is changed to case insensitive. if (UserProfile.objects.exclude(id=self.instance.id) .filter(username__iexact=name).exists()): raise forms.ValidationError(_('This username is already in use.')) return name def clean_display_name(self): name = self.cleaned_data['display_name'] if BlacklistedName.blocked(name): raise forms.ValidationError(_('This display name cannot be used.')) return name def clean_email(self): # TODO(django 1.9): Change the field to disabled=True and remove this. return self.instance.email def clean_photo(self): photo = self.cleaned_data['photo'] if not photo: return if photo.content_type not in ('image/png', 'image/jpeg'): raise forms.ValidationError( _('Images must be either PNG or JPG.')) if photo.size > settings.MAX_PHOTO_UPLOAD_SIZE: raise forms.ValidationError( _('Please use images smaller than %dMB.' % (settings.MAX_PHOTO_UPLOAD_SIZE / 1024 / 1024 - 1))) return photo def clean_bio(self): bio = self.cleaned_data['bio'] normalized = clean_nl(unicode(bio)) if has_links(normalized): # There's some links, we don't want them. raise forms.ValidationError(_('No links are allowed.')) return bio def save(self, log_for_developer=True): u = super(UserEditForm, self).save(commit=False) data = self.cleaned_data photo = data['photo'] if photo: u.picture_type = 'image/png' tmp_destination = u.picture_path + '__unconverted' with storage.open(tmp_destination, 'wb') as fh: for chunk in photo.chunks(): fh.write(chunk) tasks.resize_photo.delay(tmp_destination, u.picture_path, set_modified_on=[u]) for (i, n) in notifications.NOTIFICATIONS_BY_ID.items(): enabled = n.mandatory or (str(i) in data['notifications']) UserNotification.update_or_create( user=u, notification_id=i, update={'enabled': enabled}) log.debug(u'User (%s) updated their profile' % u) u.save() return u class AdminUserEditForm(UserEditForm): email = forms.EmailField(widget=RequiredEmailInput) admin_log = forms.CharField(required=True, label='Reason for change', widget=RequiredTextarea(attrs={'rows': 4})) notes = forms.CharField(required=False, label='Notes', widget=forms.Textarea(attrs={'rows': 4})) anonymize = forms.BooleanField(required=False) def changed_fields(self): return (set(self.changed_data) - set(['admin_log', 'notifications', 'photo'])) def changes(self): details = dict([(k, (self.initial[k], self.cleaned_data[k])) for k in self.changed_fields()]) return details def clean_anonymize(self): if (self.cleaned_data['anonymize'] and self.changed_fields() != set(['anonymize'])): raise forms.ValidationError(_('To anonymize, enter a reason for' ' the change but do not change any' ' other field.')) return self.cleaned_data['anonymize'] def clean_email(self): return self.cleaned_data['email'] def save(self, *args, **kw): profile = super(AdminUserEditForm, self).save(log_for_developer=False) if self.cleaned_data['anonymize']: amo.log(amo.LOG.ADMIN_USER_ANONYMIZED, self.instance, self.cleaned_data['admin_log']) profile.anonymize() # This also logs else: amo.log(amo.LOG.ADMIN_USER_EDITED, self.instance, self.cleaned_data['admin_log'], details=self.changes()) log.info('Admin edit user: %s changed fields: %s' % (self.instance, self.changed_fields())) return profile class BlacklistedNameAddForm(forms.Form): names = forms.CharField(widget=forms.Textarea( attrs={'cols': 40, 'rows': 16})) def clean_names(self): names = self.cleaned_data['names'].strip() if not names: raise forms.ValidationError( _('Please enter at least one name to blacklist.')) names = os.linesep.join( [s.strip() for s in names.splitlines() if s.strip()]) return names
true
true
f71cf4a9fd2cce5b7e524e3da45d2e4d49508fe0
2,256
py
Python
Data/MarvinData.py
PatrickKutch/FUDD
faf36e24b7da99b75764f411586a823a172e4d01
[ "Apache-2.0" ]
null
null
null
Data/MarvinData.py
PatrickKutch/FUDD
faf36e24b7da99b75764f411586a823a172e4d01
[ "Apache-2.0" ]
null
null
null
Data/MarvinData.py
PatrickKutch/FUDD
faf36e24b7da99b75764f411586a823a172e4d01
[ "Apache-2.0" ]
null
null
null
############################################################################## # Copyright (c) 2016 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ############################################################################## # File Abstract: # Wrapper class for a piece of data, could be from file or from network # ############################################################################## from Util import Time class MarvinData(object): def __init__(self,Namespace,ID,Value,ElapsedTime,FormatVersion,isLive=True): #from Helpers import Configuration self.FormatVersion=FormatVersion self.Value = Value if True == isLive: self.ArrivalTime = Time.GetCurrMS() else: self.ArrivalTime = ElapsedTime self.Namespace = Namespace #self.Namespace = Configuration.get().HandleBITWNamespace(Namespace) # if Bump in the Wire, change NS self.ID = ID self.Live = isLive def ToXML(self,destIsFile=False): startCDATA="<![CDATA[" endCDATA="]]>" if False == destIsFile: buffer = "<?xml version=\"1.0\" encoding=\"utf-8\"?>" else: buffer = "" buffer = buffer + "<Oscar Type=\"Data\">" buffer = buffer + "<Version>1</Version>" buffer = buffer + "<Namespace>"+self.Namespace+"</Namespace>" buffer = buffer + "<ID>"+self.ID+"</ID>" if False == destIsFile: buffer = buffer + "<Value LiveData=\""+str(self.Live)+"\">"+startCDATA+self.Value+endCDATA+"</Value>" else: buffer = buffer + "<Value>"+self.Value+"</Value>" buffer = buffer + "</Oscar>" return buffer
36.983607
113
0.557624
true
true
f71cf7333084978808cab1d46f6e030e28cd6846
19,083
py
Python
Lib/test/test_site.py
inging44/python3
fcd8d9d2ee54b46b757ecf34f284b4e60a43097a
[ "bzip2-1.0.6" ]
1,872
2015-01-02T18:56:47.000Z
2022-03-31T07:34:39.000Z
Lib/test/test_site.py
inging44/python3
fcd8d9d2ee54b46b757ecf34f284b4e60a43097a
[ "bzip2-1.0.6" ]
675
2015-02-27T09:01:01.000Z
2022-03-31T14:03:25.000Z
Lib/test/test_site.py
inging44/python3
fcd8d9d2ee54b46b757ecf34f284b4e60a43097a
[ "bzip2-1.0.6" ]
278
2015-01-02T03:48:20.000Z
2022-03-29T20:40:44.000Z
"""Tests for 'site'. Tests assume the initial paths in sys.path once the interpreter has begun executing have not been removed. """ import unittest import test.support from test.support import captured_stderr, TESTFN, EnvironmentVarGuard import builtins import os import sys import re import encodings import urllib.request import urllib.error import subprocess import sysconfig from copy import copy # These tests are not particularly useful if Python was invoked with -S. # If you add tests that are useful under -S, this skip should be moved # to the class level. if sys.flags.no_site: raise unittest.SkipTest("Python was invoked with -S") import site if site.ENABLE_USER_SITE and not os.path.isdir(site.USER_SITE): # need to add user site directory for tests os.makedirs(site.USER_SITE) site.addsitedir(site.USER_SITE) class HelperFunctionsTests(unittest.TestCase): """Tests for helper functions. """ def setUp(self): """Save a copy of sys.path""" self.sys_path = sys.path[:] self.old_base = site.USER_BASE self.old_site = site.USER_SITE self.old_prefixes = site.PREFIXES self.original_vars = sysconfig._CONFIG_VARS self.old_vars = copy(sysconfig._CONFIG_VARS) def tearDown(self): """Restore sys.path""" sys.path[:] = self.sys_path site.USER_BASE = self.old_base site.USER_SITE = self.old_site site.PREFIXES = self.old_prefixes sysconfig._CONFIG_VARS = self.original_vars sysconfig._CONFIG_VARS.clear() sysconfig._CONFIG_VARS.update(self.old_vars) def test_makepath(self): # Test makepath() have an absolute path for its first return value # and a case-normalized version of the absolute path for its # second value. path_parts = ("Beginning", "End") original_dir = os.path.join(*path_parts) abs_dir, norm_dir = site.makepath(*path_parts) self.assertEqual(os.path.abspath(original_dir), abs_dir) if original_dir == os.path.normcase(original_dir): self.assertEqual(abs_dir, norm_dir) else: self.assertEqual(os.path.normcase(abs_dir), norm_dir) def test_init_pathinfo(self): dir_set = site._init_pathinfo() for entry in [site.makepath(path)[1] for path in sys.path if path and os.path.isdir(path)]: self.assertIn(entry, dir_set, "%s from sys.path not found in set returned " "by _init_pathinfo(): %s" % (entry, dir_set)) def pth_file_tests(self, pth_file): """Contain common code for testing results of reading a .pth file""" self.assertIn(pth_file.imported, sys.modules, "%s not in sys.modules" % pth_file.imported) self.assertIn(site.makepath(pth_file.good_dir_path)[0], sys.path) self.assertFalse(os.path.exists(pth_file.bad_dir_path)) def test_addpackage(self): # Make sure addpackage() imports if the line starts with 'import', # adds directories to sys.path for any line in the file that is not a # comment or import that is a valid directory name for where the .pth # file resides; invalid directories are not added pth_file = PthFile() pth_file.cleanup(prep=True) # to make sure that nothing is # pre-existing that shouldn't be try: pth_file.create() site.addpackage(pth_file.base_dir, pth_file.filename, set()) self.pth_file_tests(pth_file) finally: pth_file.cleanup() def make_pth(self, contents, pth_dir='.', pth_name=TESTFN): # Create a .pth file and return its (abspath, basename). pth_dir = os.path.abspath(pth_dir) pth_basename = pth_name + '.pth' pth_fn = os.path.join(pth_dir, pth_basename) pth_file = open(pth_fn, 'w', encoding='utf-8') self.addCleanup(lambda: os.remove(pth_fn)) pth_file.write(contents) pth_file.close() return pth_dir, pth_basename def test_addpackage_import_bad_syntax(self): # Issue 10642 pth_dir, pth_fn = self.make_pth("import bad)syntax\n") with captured_stderr() as err_out: site.addpackage(pth_dir, pth_fn, set()) self.assertRegex(err_out.getvalue(), "line 1") self.assertRegex(err_out.getvalue(), re.escape(os.path.join(pth_dir, pth_fn))) # XXX: the previous two should be independent checks so that the # order doesn't matter. The next three could be a single check # but my regex foo isn't good enough to write it. self.assertRegex(err_out.getvalue(), 'Traceback') self.assertRegex(err_out.getvalue(), r'import bad\)syntax') self.assertRegex(err_out.getvalue(), 'SyntaxError') def test_addpackage_import_bad_exec(self): # Issue 10642 pth_dir, pth_fn = self.make_pth("randompath\nimport nosuchmodule\n") with captured_stderr() as err_out: site.addpackage(pth_dir, pth_fn, set()) self.assertRegex(err_out.getvalue(), "line 2") self.assertRegex(err_out.getvalue(), re.escape(os.path.join(pth_dir, pth_fn))) # XXX: ditto previous XXX comment. self.assertRegex(err_out.getvalue(), 'Traceback') self.assertRegex(err_out.getvalue(), 'ImportError') @unittest.skipIf(sys.platform == "win32", "Windows does not raise an " "error for file paths containing null characters") def test_addpackage_import_bad_pth_file(self): # Issue 5258 pth_dir, pth_fn = self.make_pth("abc\x00def\n") with captured_stderr() as err_out: site.addpackage(pth_dir, pth_fn, set()) self.assertRegex(err_out.getvalue(), "line 1") self.assertRegex(err_out.getvalue(), re.escape(os.path.join(pth_dir, pth_fn))) # XXX: ditto previous XXX comment. self.assertRegex(err_out.getvalue(), 'Traceback') self.assertRegex(err_out.getvalue(), 'TypeError') def test_addsitedir(self): # Same tests for test_addpackage since addsitedir() essentially just # calls addpackage() for every .pth file in the directory pth_file = PthFile() pth_file.cleanup(prep=True) # Make sure that nothing is pre-existing # that is tested for try: pth_file.create() site.addsitedir(pth_file.base_dir, set()) self.pth_file_tests(pth_file) finally: pth_file.cleanup() @unittest.skipUnless(site.ENABLE_USER_SITE, "requires access to PEP 370 " "user-site (site.ENABLE_USER_SITE)") def test_s_option(self): usersite = site.USER_SITE self.assertIn(usersite, sys.path) env = os.environ.copy() rc = subprocess.call([sys.executable, '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) self.assertEqual(rc, 1) env = os.environ.copy() rc = subprocess.call([sys.executable, '-s', '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) if usersite == site.getsitepackages()[0]: self.assertEqual(rc, 1) else: self.assertEqual(rc, 0) env = os.environ.copy() env["PYTHONNOUSERSITE"] = "1" rc = subprocess.call([sys.executable, '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) if usersite == site.getsitepackages()[0]: self.assertEqual(rc, 1) else: self.assertEqual(rc, 0) env = os.environ.copy() env["PYTHONUSERBASE"] = "/tmp" rc = subprocess.call([sys.executable, '-c', 'import sys, site; sys.exit(site.USER_BASE.startswith("/tmp"))'], env=env) self.assertEqual(rc, 1) def test_getuserbase(self): site.USER_BASE = None user_base = site.getuserbase() # the call sets site.USER_BASE self.assertEqual(site.USER_BASE, user_base) # let's set PYTHONUSERBASE and see if it uses it site.USER_BASE = None import sysconfig sysconfig._CONFIG_VARS = None with EnvironmentVarGuard() as environ: environ['PYTHONUSERBASE'] = 'xoxo' self.assertTrue(site.getuserbase().startswith('xoxo'), site.getuserbase()) def test_getusersitepackages(self): site.USER_SITE = None site.USER_BASE = None user_site = site.getusersitepackages() # the call sets USER_BASE *and* USER_SITE self.assertEqual(site.USER_SITE, user_site) self.assertTrue(user_site.startswith(site.USER_BASE), user_site) def test_getsitepackages(self): site.PREFIXES = ['xoxo'] dirs = site.getsitepackages() if (sys.platform == "darwin" and sysconfig.get_config_var("PYTHONFRAMEWORK")): # OS X framework builds site.PREFIXES = ['Python.framework'] dirs = site.getsitepackages() self.assertEqual(len(dirs), 3) wanted = os.path.join('/Library', sysconfig.get_config_var("PYTHONFRAMEWORK"), sys.version[:3], 'site-packages') self.assertEqual(dirs[2], wanted) elif os.sep == '/': # OS X non-framwework builds, Linux, FreeBSD, etc self.assertEqual(len(dirs), 2) wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3], 'site-packages') self.assertEqual(dirs[0], wanted) wanted = os.path.join('xoxo', 'lib', 'site-python') self.assertEqual(dirs[1], wanted) else: # other platforms self.assertEqual(len(dirs), 2) self.assertEqual(dirs[0], 'xoxo') wanted = os.path.join('xoxo', 'lib', 'site-packages') self.assertEqual(dirs[1], wanted) class PthFile(object): """Helper class for handling testing of .pth files""" def __init__(self, filename_base=TESTFN, imported="time", good_dirname="__testdir__", bad_dirname="__bad"): """Initialize instance variables""" self.filename = filename_base + ".pth" self.base_dir = os.path.abspath('') self.file_path = os.path.join(self.base_dir, self.filename) self.imported = imported self.good_dirname = good_dirname self.bad_dirname = bad_dirname self.good_dir_path = os.path.join(self.base_dir, self.good_dirname) self.bad_dir_path = os.path.join(self.base_dir, self.bad_dirname) def create(self): """Create a .pth file with a comment, blank lines, an ``import <self.imported>``, a line with self.good_dirname, and a line with self.bad_dirname. Creation of the directory for self.good_dir_path (based off of self.good_dirname) is also performed. Make sure to call self.cleanup() to undo anything done by this method. """ FILE = open(self.file_path, 'w') try: print("#import @bad module name", file=FILE) print("\n", file=FILE) print("import %s" % self.imported, file=FILE) print(self.good_dirname, file=FILE) print(self.bad_dirname, file=FILE) finally: FILE.close() os.mkdir(self.good_dir_path) def cleanup(self, prep=False): """Make sure that the .pth file is deleted, self.imported is not in sys.modules, and that both self.good_dirname and self.bad_dirname are not existing directories.""" if os.path.exists(self.file_path): os.remove(self.file_path) if prep: self.imported_module = sys.modules.get(self.imported) if self.imported_module: del sys.modules[self.imported] else: if self.imported_module: sys.modules[self.imported] = self.imported_module if os.path.exists(self.good_dir_path): os.rmdir(self.good_dir_path) if os.path.exists(self.bad_dir_path): os.rmdir(self.bad_dir_path) class ImportSideEffectTests(unittest.TestCase): """Test side-effects from importing 'site'.""" def setUp(self): """Make a copy of sys.path""" self.sys_path = sys.path[:] def tearDown(self): """Restore sys.path""" sys.path[:] = self.sys_path def test_abs_paths(self): # Make sure all imported modules have their __file__ and __cached__ # attributes as absolute paths. Arranging to put the Lib directory on # PYTHONPATH would cause the os module to have a relative path for # __file__ if abs_paths() does not get run. sys and builtins (the # only other modules imported before site.py runs) do not have # __file__ or __cached__ because they are built-in. parent = os.path.relpath(os.path.dirname(os.__file__)) env = os.environ.copy() env['PYTHONPATH'] = parent code = ('import os, sys', # use ASCII to avoid locale issues with non-ASCII directories 'os_file = os.__file__.encode("ascii", "backslashreplace")', r'sys.stdout.buffer.write(os_file + b"\n")', 'os_cached = os.__cached__.encode("ascii", "backslashreplace")', r'sys.stdout.buffer.write(os_cached + b"\n")') command = '\n'.join(code) # First, prove that with -S (no 'import site'), the paths are # relative. proc = subprocess.Popen([sys.executable, '-S', '-c', command], env=env, stdout=subprocess.PIPE) stdout, stderr = proc.communicate() self.assertEqual(proc.returncode, 0) os__file__, os__cached__ = stdout.splitlines()[:2] self.assertFalse(os.path.isabs(os__file__)) self.assertFalse(os.path.isabs(os__cached__)) # Now, with 'import site', it works. proc = subprocess.Popen([sys.executable, '-c', command], env=env, stdout=subprocess.PIPE) stdout, stderr = proc.communicate() self.assertEqual(proc.returncode, 0) os__file__, os__cached__ = stdout.splitlines()[:2] self.assertTrue(os.path.isabs(os__file__)) self.assertTrue(os.path.isabs(os__cached__)) def test_no_duplicate_paths(self): # No duplicate paths should exist in sys.path # Handled by removeduppaths() site.removeduppaths() seen_paths = set() for path in sys.path: self.assertNotIn(path, seen_paths) seen_paths.add(path) @unittest.skip('test not implemented') def test_add_build_dir(self): # Test that the build directory's Modules directory is used when it # should be. # XXX: implement pass def test_setting_quit(self): # 'quit' and 'exit' should be injected into builtins self.assertTrue(hasattr(builtins, "quit")) self.assertTrue(hasattr(builtins, "exit")) def test_setting_copyright(self): # 'copyright', 'credits', and 'license' should be in builtins self.assertTrue(hasattr(builtins, "copyright")) self.assertTrue(hasattr(builtins, "credits")) self.assertTrue(hasattr(builtins, "license")) def test_setting_help(self): # 'help' should be set in builtins self.assertTrue(hasattr(builtins, "help")) def test_aliasing_mbcs(self): if sys.platform == "win32": import locale if locale.getdefaultlocale()[1].startswith('cp'): for value in encodings.aliases.aliases.values(): if value == "mbcs": break else: self.fail("did not alias mbcs") def test_sitecustomize_executed(self): # If sitecustomize is available, it should have been imported. if "sitecustomize" not in sys.modules: try: import sitecustomize except ImportError: pass else: self.fail("sitecustomize not imported automatically") @test.support.requires_resource('network') @test.support.system_must_validate_cert @unittest.skipUnless(sys.version_info[3] == 'final', 'only for released versions') @unittest.skipUnless(hasattr(urllib.request, "HTTPSHandler"), 'need SSL support to download license') def test_license_exists_at_url(self): # This test is a bit fragile since it depends on the format of the # string displayed by license in the absence of a LICENSE file. url = license._Printer__data.split()[1] req = urllib.request.Request(url, method='HEAD') try: with test.support.transient_internet(url): with urllib.request.urlopen(req) as data: code = data.getcode() except urllib.error.HTTPError as e: code = e.code self.assertEqual(code, 200, msg="Can't find " + url) class StartupImportTests(unittest.TestCase): def test_startup_imports(self): # This tests checks which modules are loaded by Python when it # initially starts upon startup. popen = subprocess.Popen([sys.executable, '-I', '-v', '-c', 'import sys; print(set(sys.modules))'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = popen.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') modules = eval(stdout) self.assertIn('site', modules) # http://bugs.python.org/issue19205 re_mods = {'re', '_sre', 'sre_compile', 'sre_constants', 'sre_parse'} # _osx_support uses the re module in many placs if sys.platform != 'darwin': self.assertFalse(modules.intersection(re_mods), stderr) # http://bugs.python.org/issue9548 self.assertNotIn('locale', modules, stderr) if sys.platform != 'darwin': # http://bugs.python.org/issue19209 self.assertNotIn('copyreg', modules, stderr) # http://bugs.python.org/issue19218> collection_mods = {'_collections', 'collections', 'functools', 'heapq', 'itertools', 'keyword', 'operator', 'reprlib', 'types', 'weakref' }.difference(sys.builtin_module_names) self.assertFalse(modules.intersection(collection_mods), stderr) if __name__ == "__main__": unittest.main()
40.602128
78
0.606613
import unittest import test.support from test.support import captured_stderr, TESTFN, EnvironmentVarGuard import builtins import os import sys import re import encodings import urllib.request import urllib.error import subprocess import sysconfig from copy import copy if sys.flags.no_site: raise unittest.SkipTest("Python was invoked with -S") import site if site.ENABLE_USER_SITE and not os.path.isdir(site.USER_SITE): os.makedirs(site.USER_SITE) site.addsitedir(site.USER_SITE) class HelperFunctionsTests(unittest.TestCase): def setUp(self): self.sys_path = sys.path[:] self.old_base = site.USER_BASE self.old_site = site.USER_SITE self.old_prefixes = site.PREFIXES self.original_vars = sysconfig._CONFIG_VARS self.old_vars = copy(sysconfig._CONFIG_VARS) def tearDown(self): sys.path[:] = self.sys_path site.USER_BASE = self.old_base site.USER_SITE = self.old_site site.PREFIXES = self.old_prefixes sysconfig._CONFIG_VARS = self.original_vars sysconfig._CONFIG_VARS.clear() sysconfig._CONFIG_VARS.update(self.old_vars) def test_makepath(self): path_parts = ("Beginning", "End") original_dir = os.path.join(*path_parts) abs_dir, norm_dir = site.makepath(*path_parts) self.assertEqual(os.path.abspath(original_dir), abs_dir) if original_dir == os.path.normcase(original_dir): self.assertEqual(abs_dir, norm_dir) else: self.assertEqual(os.path.normcase(abs_dir), norm_dir) def test_init_pathinfo(self): dir_set = site._init_pathinfo() for entry in [site.makepath(path)[1] for path in sys.path if path and os.path.isdir(path)]: self.assertIn(entry, dir_set, "%s from sys.path not found in set returned " "by _init_pathinfo(): %s" % (entry, dir_set)) def pth_file_tests(self, pth_file): self.assertIn(pth_file.imported, sys.modules, "%s not in sys.modules" % pth_file.imported) self.assertIn(site.makepath(pth_file.good_dir_path)[0], sys.path) self.assertFalse(os.path.exists(pth_file.bad_dir_path)) def test_addpackage(self): pth_file = PthFile() pth_file.cleanup(prep=True) try: pth_file.create() site.addpackage(pth_file.base_dir, pth_file.filename, set()) self.pth_file_tests(pth_file) finally: pth_file.cleanup() def make_pth(self, contents, pth_dir='.', pth_name=TESTFN): # Create a .pth file and return its (abspath, basename). pth_dir = os.path.abspath(pth_dir) pth_basename = pth_name + '.pth' pth_fn = os.path.join(pth_dir, pth_basename) pth_file = open(pth_fn, 'w', encoding='utf-8') self.addCleanup(lambda: os.remove(pth_fn)) pth_file.write(contents) pth_file.close() return pth_dir, pth_basename def test_addpackage_import_bad_syntax(self): # Issue 10642 pth_dir, pth_fn = self.make_pth("import bad)syntax\n") with captured_stderr() as err_out: site.addpackage(pth_dir, pth_fn, set()) self.assertRegex(err_out.getvalue(), "line 1") self.assertRegex(err_out.getvalue(), re.escape(os.path.join(pth_dir, pth_fn))) # XXX: the previous two should be independent checks so that the # order doesn't matter. The next three could be a single check self.assertRegex(err_out.getvalue(), 'Traceback') self.assertRegex(err_out.getvalue(), r'import bad\)syntax') self.assertRegex(err_out.getvalue(), 'SyntaxError') def test_addpackage_import_bad_exec(self): # Issue 10642 pth_dir, pth_fn = self.make_pth("randompath\nimport nosuchmodule\n") with captured_stderr() as err_out: site.addpackage(pth_dir, pth_fn, set()) self.assertRegex(err_out.getvalue(), "line 2") self.assertRegex(err_out.getvalue(), re.escape(os.path.join(pth_dir, pth_fn))) # XXX: ditto previous XXX comment. self.assertRegex(err_out.getvalue(), 'Traceback') self.assertRegex(err_out.getvalue(), 'ImportError') @unittest.skipIf(sys.platform == "win32", "Windows does not raise an " "error for file paths containing null characters") def test_addpackage_import_bad_pth_file(self): # Issue 5258 pth_dir, pth_fn = self.make_pth("abc\x00def\n") with captured_stderr() as err_out: site.addpackage(pth_dir, pth_fn, set()) self.assertRegex(err_out.getvalue(), "line 1") self.assertRegex(err_out.getvalue(), re.escape(os.path.join(pth_dir, pth_fn))) # XXX: ditto previous XXX comment. self.assertRegex(err_out.getvalue(), 'Traceback') self.assertRegex(err_out.getvalue(), 'TypeError') def test_addsitedir(self): # Same tests for test_addpackage since addsitedir() essentially just # calls addpackage() for every .pth file in the directory pth_file = PthFile() pth_file.cleanup(prep=True) # Make sure that nothing is pre-existing # that is tested for try: pth_file.create() site.addsitedir(pth_file.base_dir, set()) self.pth_file_tests(pth_file) finally: pth_file.cleanup() @unittest.skipUnless(site.ENABLE_USER_SITE, "requires access to PEP 370 " "user-site (site.ENABLE_USER_SITE)") def test_s_option(self): usersite = site.USER_SITE self.assertIn(usersite, sys.path) env = os.environ.copy() rc = subprocess.call([sys.executable, '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) self.assertEqual(rc, 1) env = os.environ.copy() rc = subprocess.call([sys.executable, '-s', '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) if usersite == site.getsitepackages()[0]: self.assertEqual(rc, 1) else: self.assertEqual(rc, 0) env = os.environ.copy() env["PYTHONNOUSERSITE"] = "1" rc = subprocess.call([sys.executable, '-c', 'import sys; sys.exit(%r in sys.path)' % usersite], env=env) if usersite == site.getsitepackages()[0]: self.assertEqual(rc, 1) else: self.assertEqual(rc, 0) env = os.environ.copy() env["PYTHONUSERBASE"] = "/tmp" rc = subprocess.call([sys.executable, '-c', 'import sys, site; sys.exit(site.USER_BASE.startswith("/tmp"))'], env=env) self.assertEqual(rc, 1) def test_getuserbase(self): site.USER_BASE = None user_base = site.getuserbase() # the call sets site.USER_BASE self.assertEqual(site.USER_BASE, user_base) # let's set PYTHONUSERBASE and see if it uses it site.USER_BASE = None import sysconfig sysconfig._CONFIG_VARS = None with EnvironmentVarGuard() as environ: environ['PYTHONUSERBASE'] = 'xoxo' self.assertTrue(site.getuserbase().startswith('xoxo'), site.getuserbase()) def test_getusersitepackages(self): site.USER_SITE = None site.USER_BASE = None user_site = site.getusersitepackages() self.assertEqual(site.USER_SITE, user_site) self.assertTrue(user_site.startswith(site.USER_BASE), user_site) def test_getsitepackages(self): site.PREFIXES = ['xoxo'] dirs = site.getsitepackages() if (sys.platform == "darwin" and sysconfig.get_config_var("PYTHONFRAMEWORK")): site.PREFIXES = ['Python.framework'] dirs = site.getsitepackages() self.assertEqual(len(dirs), 3) wanted = os.path.join('/Library', sysconfig.get_config_var("PYTHONFRAMEWORK"), sys.version[:3], 'site-packages') self.assertEqual(dirs[2], wanted) elif os.sep == '/': self.assertEqual(len(dirs), 2) wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3], 'site-packages') self.assertEqual(dirs[0], wanted) wanted = os.path.join('xoxo', 'lib', 'site-python') self.assertEqual(dirs[1], wanted) else: self.assertEqual(len(dirs), 2) self.assertEqual(dirs[0], 'xoxo') wanted = os.path.join('xoxo', 'lib', 'site-packages') self.assertEqual(dirs[1], wanted) class PthFile(object): def __init__(self, filename_base=TESTFN, imported="time", good_dirname="__testdir__", bad_dirname="__bad"): self.filename = filename_base + ".pth" self.base_dir = os.path.abspath('') self.file_path = os.path.join(self.base_dir, self.filename) self.imported = imported self.good_dirname = good_dirname self.bad_dirname = bad_dirname self.good_dir_path = os.path.join(self.base_dir, self.good_dirname) self.bad_dir_path = os.path.join(self.base_dir, self.bad_dirname) def create(self): FILE = open(self.file_path, 'w') try: print("#import @bad module name", file=FILE) print("\n", file=FILE) print("import %s" % self.imported, file=FILE) print(self.good_dirname, file=FILE) print(self.bad_dirname, file=FILE) finally: FILE.close() os.mkdir(self.good_dir_path) def cleanup(self, prep=False): if os.path.exists(self.file_path): os.remove(self.file_path) if prep: self.imported_module = sys.modules.get(self.imported) if self.imported_module: del sys.modules[self.imported] else: if self.imported_module: sys.modules[self.imported] = self.imported_module if os.path.exists(self.good_dir_path): os.rmdir(self.good_dir_path) if os.path.exists(self.bad_dir_path): os.rmdir(self.bad_dir_path) class ImportSideEffectTests(unittest.TestCase): def setUp(self): self.sys_path = sys.path[:] def tearDown(self): sys.path[:] = self.sys_path def test_abs_paths(self): parent = os.path.relpath(os.path.dirname(os.__file__)) env = os.environ.copy() env['PYTHONPATH'] = parent code = ('import os, sys', 'os_file = os.__file__.encode("ascii", "backslashreplace")', r'sys.stdout.buffer.write(os_file + b"\n")', 'os_cached = os.__cached__.encode("ascii", "backslashreplace")', r'sys.stdout.buffer.write(os_cached + b"\n")') command = '\n'.join(code) proc = subprocess.Popen([sys.executable, '-S', '-c', command], env=env, stdout=subprocess.PIPE) stdout, stderr = proc.communicate() self.assertEqual(proc.returncode, 0) os__file__, os__cached__ = stdout.splitlines()[:2] self.assertFalse(os.path.isabs(os__file__)) self.assertFalse(os.path.isabs(os__cached__)) proc = subprocess.Popen([sys.executable, '-c', command], env=env, stdout=subprocess.PIPE) stdout, stderr = proc.communicate() self.assertEqual(proc.returncode, 0) os__file__, os__cached__ = stdout.splitlines()[:2] self.assertTrue(os.path.isabs(os__file__)) self.assertTrue(os.path.isabs(os__cached__)) def test_no_duplicate_paths(self): site.removeduppaths() seen_paths = set() for path in sys.path: self.assertNotIn(path, seen_paths) seen_paths.add(path) @unittest.skip('test not implemented') def test_add_build_dir(self): # should be. # XXX: implement pass def test_setting_quit(self): # 'quit' and 'exit' should be injected into builtins self.assertTrue(hasattr(builtins, "quit")) self.assertTrue(hasattr(builtins, "exit")) def test_setting_copyright(self): # 'copyright', 'credits', and 'license' should be in builtins self.assertTrue(hasattr(builtins, "copyright")) self.assertTrue(hasattr(builtins, "credits")) self.assertTrue(hasattr(builtins, "license")) def test_setting_help(self): # 'help' should be set in builtins self.assertTrue(hasattr(builtins, "help")) def test_aliasing_mbcs(self): if sys.platform == "win32": import locale if locale.getdefaultlocale()[1].startswith('cp'): for value in encodings.aliases.aliases.values(): if value == "mbcs": break else: self.fail("did not alias mbcs") def test_sitecustomize_executed(self): # If sitecustomize is available, it should have been imported. if "sitecustomize" not in sys.modules: try: import sitecustomize except ImportError: pass else: self.fail("sitecustomize not imported automatically") @test.support.requires_resource('network') @test.support.system_must_validate_cert @unittest.skipUnless(sys.version_info[3] == 'final', 'only for released versions') @unittest.skipUnless(hasattr(urllib.request, "HTTPSHandler"), 'need SSL support to download license') def test_license_exists_at_url(self): # This test is a bit fragile since it depends on the format of the # string displayed by license in the absence of a LICENSE file. url = license._Printer__data.split()[1] req = urllib.request.Request(url, method='HEAD') try: with test.support.transient_internet(url): with urllib.request.urlopen(req) as data: code = data.getcode() except urllib.error.HTTPError as e: code = e.code self.assertEqual(code, 200, msg="Can't find " + url) class StartupImportTests(unittest.TestCase): def test_startup_imports(self): popen = subprocess.Popen([sys.executable, '-I', '-v', '-c', 'import sys; print(set(sys.modules))'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = popen.communicate() stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') modules = eval(stdout) self.assertIn('site', modules) re_mods = {'re', '_sre', 'sre_compile', 'sre_constants', 'sre_parse'} if sys.platform != 'darwin': self.assertFalse(modules.intersection(re_mods), stderr) self.assertNotIn('locale', modules, stderr) if sys.platform != 'darwin': self.assertNotIn('copyreg', modules, stderr) collection_mods = {'_collections', 'collections', 'functools', 'heapq', 'itertools', 'keyword', 'operator', 'reprlib', 'types', 'weakref' }.difference(sys.builtin_module_names) self.assertFalse(modules.intersection(collection_mods), stderr) if __name__ == "__main__": unittest.main()
true
true
f71cf74301c83443bdf85dae4b32b0b7c4ddf129
518
py
Python
jax_cfd/ml/optimizer_modules.py
ngam/jax-cfd
8eff9c47bdc7fb19b6453db94ca65f6be64d91f6
[ "Apache-2.0" ]
244
2021-05-18T18:49:14.000Z
2022-03-30T18:27:21.000Z
jax_cfd/ml/optimizer_modules.py
ngam/jax-cfd
8eff9c47bdc7fb19b6453db94ca65f6be64d91f6
[ "Apache-2.0" ]
14
2021-06-24T22:15:44.000Z
2022-03-30T06:22:52.000Z
jax_cfd/ml/optimizer_modules.py
ngam/jax-cfd
8eff9c47bdc7fb19b6453db94ca65f6be64d91f6
[ "Apache-2.0" ]
36
2021-05-29T09:30:44.000Z
2022-03-28T12:33:40.000Z
"""Configurable optimizers from JAX.""" import gin from jax.example_libraries import optimizers @gin.configurable def optimizer(value): return value gin.external_configurable(optimizers.adam) gin.external_configurable(optimizers.momentum) gin.external_configurable(optimizers.nesterov) gin.external_configurable(optimizers.exponential_decay) gin.external_configurable(optimizers.inverse_time_decay) gin.external_configurable(optimizers.polynomial_decay) gin.external_configurable(optimizers.piecewise_constant)
27.263158
56
0.864865
import gin from jax.example_libraries import optimizers @gin.configurable def optimizer(value): return value gin.external_configurable(optimizers.adam) gin.external_configurable(optimizers.momentum) gin.external_configurable(optimizers.nesterov) gin.external_configurable(optimizers.exponential_decay) gin.external_configurable(optimizers.inverse_time_decay) gin.external_configurable(optimizers.polynomial_decay) gin.external_configurable(optimizers.piecewise_constant)
true
true
f71cf7ceb04c02f72c61db2c923addcb66daac1d
96,917
py
Python
anima/env/mayaEnv/toolbox.py
Khosiyat/anima
f631c08400547f49ac5f1feeb730f22c255eb771
[ "MIT" ]
1
2021-07-03T19:03:41.000Z
2021-07-03T19:03:41.000Z
anima/env/mayaEnv/toolbox.py
Khosiyat/anima
f631c08400547f49ac5f1feeb730f22c255eb771
[ "MIT" ]
null
null
null
anima/env/mayaEnv/toolbox.py
Khosiyat/anima
f631c08400547f49ac5f1feeb730f22c255eb771
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- import functools import os from anima.env.mayaEnv.animation import Animation from anima.env.mayaEnv.general import General from anima.env.mayaEnv.modeling import Modeling from anima.env.mayaEnv.previs import Previs from anima.env.mayaEnv.reference import Reference from anima.env.mayaEnv.render import Render from anima.env.mayaEnv.rigging import Rigging import pymel.core as pm import maya.mel as mel from anima.env.mayaEnv import auxiliary, camera_tools __last_commands__ = [] # list of dictionaries __last_tab__ = 'ANIMA_TOOLBOX_LAST_TAB_INDEX' __commands__ = [] def repeater(index): """repeats the last command with the given index """ global __last_commands__ try: call_data = __last_commands__[index] return call_data[0](*call_data[1], **call_data[2]) except IndexError: return None def repeat_last(call_data): """own implementation of pm.repeatLast """ global __last_commands__ index = len(__last_commands__) callable_ = call_data[0] args = call_data[1] kwargs = call_data[2] command = \ 'print \\"\\";python(\\\"from anima.env.mayaEnv.toolbox import ' \ 'repeater; repeater(%s);\\\");' % index repeat_last_command = 'repeatLast -ac "%(command)s" -acl "%(label)s";' % { 'command': command, 'label': callable_.__name__ } print(repeat_last_command) pm.mel.eval(repeat_last_command) __last_commands__.append(call_data) # also call the callable callable_(*args, **kwargs) def repeated_callback(callable_, *args, **kwargs): """Adds the given callable to the last commands list and adds a caller to the pm.repeatLast """ return pm.Callback( repeat_last, [callable_, args, kwargs] ) class Color(object): """a simple color class """ colors = [ (1.000, 0.500, 0.666), (1.000, 0.833, 0.500), (0.666, 1.000, 0.500), (0.500, 1.000, 0.833), (0.500, 0.666, 1.000), (0.833, 0.500, 1.000) ] def __init__(self, index=0): self.index = index self.max_colors = len(self.colors) def change(self): """updates the index to the next one """ self.index = int((self.index + 1) % self.max_colors) def reset(self): """resets the color index """ self.index = 0 @property def color(self): """returns the current color values """ return self.colors[self.index] def filter_tools(search_text): """filters toolbox :param str search_text: The search_text """ for command in __commands__: uitype = command.type() if uitype == 'button': label = command.getLabel() if search_text.lower() not in label.lower(): command.setVisible(False) else: command.setVisible(True) elif uitype == 'rowLayout': # get the children children = command.children() matched_children = False for c in children: c_uitype = c.type() if c_uitype in ['button', 'staticText'] and \ search_text in c.getLabel().lower(): matched_children = True break if not matched_children: command.setVisible(False) else: command.setVisible(True) def UI(): # window setup width = 260 height = 650 row_spacing = 3 color = Color() # init the __commands LUT global __commands__ __commands__ = [] if pm.dockControl("toolbox_dockControl", q=True, ex=True): pm.deleteUI("toolbox_dockControl") window_name = "toolbox_window" if pm.window(window_name, q=True, ex=True): pm.deleteUI(window_name, wnd=True) toolbox_window = pm.window( window_name, wh=(width, height), title="Anima ToolBox" ) # the layout that holds the tabs main_form_layout = pm.formLayout( 'main_form_layout', nd=100, parent=toolbox_window ) search_field = pm.textField( 'search_text_field', tcc=filter_tools, placeholderText='Search...', parent=main_form_layout ) main_tab_layout = pm.tabLayout( 'main_tab_layout', scr=True, cr=True, parent=main_form_layout ) # attach the main_tab_layout to main_form_layout pm.formLayout( main_form_layout, edit=True, attachForm=[ (search_field, "top", 0), (search_field, "left", 0), (search_field, "right", 0), # (main_tab_layout, "top", 0), (main_tab_layout, "bottom", 0), (main_tab_layout, "left", 0), (main_tab_layout, "right", 0) ], attachNone=[ (search_field, "bottom") ], attachControl=[ (main_tab_layout, "top", 0, search_field) ] ) with main_tab_layout: # ----- GENERAL ------ general_column_layout = pm.columnLayout( 'general_column_layout', adj=True, cal="center", rs=row_spacing ) with general_column_layout: color.change() pm.button( 'open_version_button', l="Open Version", c=repeated_callback(General.version_dialog, mode=1), ann="Open Version", bgc=color.color ) pm.button( 'save_as_version_button', l="Save As Version", c=repeated_callback(General.version_dialog, mode=0), ann="Save As Version", bgc=color.color ) color.change() pm.button( 'selectionManager_button', l="Selection Manager", c=repeated_callback(General.selection_manager), ann="Selection Manager", bgc=color.color ) color.change() pm.button( 'publishChecker_button', l="Publish Checker", c=repeated_callback(General.publish_checker), ann="Publish Checker", bgc=color.color ) color.change() pm.button( 'rename_unique_button', l='Rename Unique', c=repeated_callback(General.rename_unique), ann=General.rename_unique.__doc__, bgc=color.color ) pm.button( 'removeColonFromNames_button', l="remove colon(:) from node names", c=repeated_callback(General.remove_colon_from_names), ann="removes the colon (:) character from all " "selected object names", bgc=color.color ) pm.button( 'removePastedFromNames_button', l="remove \"pasted_\" from node names", c=repeated_callback(General.remove_pasted), ann="removes the \"passed__\" from all selected " "object names", bgc=color.color ) color.change() pm.button( 'togglePolyMeshes_button', l="toggle polymesh visibility", c=repeated_callback(General.toggle_poly_meshes), ann="toggles the polymesh display in the active model " "panel", bgc=color.color ) color.change() pm.button( 'selectSetMembers_button', l="select set members", c=repeated_callback(General.select_set_members), ann="selects the selected set members in correct " "order", bgc=color.color ) color.change() pm.button( 'delete_unused_intermediate_shapes_button', l='Delete Unused Intermediate Shape Nodes', c=repeated_callback(General.delete_unused_intermediate_shapes), ann='Deletes unused (no connection) intermediate shape nodes', bgc=color.color ) color.change() pm.button( 'export_transform_info_button', l='Export Transform Info', c=repeated_callback(General.export_transform_info), ann='exports transform info', bgc=color.color ) pm.button( 'import_transform_info_button', l='Import Transform Info', c=repeated_callback(General.import_transform_info), ann='imports transform info', bgc=color.color ) color.change() pm.button( 'export_global_transform_info_button', l='Export Global Transform Info', c=repeated_callback(General.export_transform_info, True), ann='exports global transform info', bgc=color.color ) pm.button( 'import_global_transform_info_button', l='Import Global Transform Info', c=repeated_callback(General.import_transform_info, True), ann='imports global transform info', bgc=color.color ) color.change() pm.button( 'export_component_transform_info_button', l='Export Component Transform Info', c=repeated_callback(General.export_component_transform_info), ann='exports component transform info', bgc=color.color ) pm.button( 'import_component_transform_info_button', l='Import Component Transform Info', c=repeated_callback(General.import_component_transform_info), ann='imports component transform info', bgc=color.color ) color.change() pm.button( 'import_rsproxy_data_from_houdini_button', l='Import RSProxy Data From Houdini', c=repeated_callback(General.rsproxy_data_importer), ann=General.rsproxy_data_importer.__doc__, bgc=color.color ) color.change() pm.button( 'generate_thumbnail_button', l='Generate Thumbnail', c=repeated_callback(General.generate_thumbnail), ann='Generates thumbnail for current scene', bgc=color.color ) color.change() pm.button( 'cleanup_light_cameras_button', l='Cleanup Light Cameras', c=repeated_callback(General.cleanup_light_cameras), ann=General.cleanup_light_cameras.__doc__, bgc=color.color ) color.change() from anima.env.mayaEnv.general import unknown_plugin_cleaner_ui pm.button( 'cleanup_plugins_button', l='Cleanup Unknown Plugins', c=repeated_callback(unknown_plugin_cleaner_ui), ann=unknown_plugin_cleaner_ui.__doc__, bgc=color.color ) color.change() pm.button( 'unshape_parent_node_button', l='Unshape Parent Nodes', c=repeated_callback(General.unshape_parent_nodes), ann=General.unshape_parent_nodes.__doc__, bgc=color.color ) # store commands __commands__.extend(general_column_layout.children()) # ----- REFERENCE ------ reference_columnLayout = pm.columnLayout( 'reference_columnLayout', adj=True, cal="center", rs=row_spacing) with reference_columnLayout: color.reset() pm.text(l='===== Reference Tools =====') pm.button( 'nsDelete_button', l="nsDelete", c=repeated_callback(General.namespace_deleter), ann=General.namespace_deleter.__doc__, bgc=color.color ) color.change() pm.button( 'duplicate_selected_reference_button', l='Duplicate Selected Reference', c=repeated_callback(Reference.duplicate_selected_reference), ann='Duplicates the selected reference', bgc=color.color ) color.change() pm.button( 'select_reference_in_reference_editor_button', l='Select Reference In Reference Editor', c=repeated_callback( Reference.select_reference_in_reference_editor ), ann=Reference.select_reference_in_reference_editor.__doc__, bgc=color.color ) color.change() pm.button( 'get_selected_reference_path_button', l='Get Selected Reference Path', c=repeated_callback(Reference.get_selected_reference_path), ann='Prints the selected reference full path', bgc=color.color ) pm.button( 'open_selected_reference_button', l='Open Selected Reference in New Maya', c=repeated_callback(Reference.open_reference_in_new_maya), ann='Opens the selected reference in new Maya ' 'instance', bgc=color.color ) color.change() pm.button( 'publish_model_as_look_dev_button', l='Model -> LookDev', c=repeated_callback(Reference.publish_model_as_look_dev), ann='References the current Model scene to the LookDev scene ' 'of the same task, creates the LookDev scene if ' 'necessary, also reopens the current model scene.', bgc=color.color ) color.change() pm.button( 'fix_reference_namespace_button', l='Fix Reference Namespace', c=repeated_callback(Reference.fix_reference_namespace), ann='Fixes old style reference namespaces with new one, ' 'creates new versions if necessary.', bgc=color.color ) color.change() pm.button( 'fix_reference_paths_button', l='Fix Reference Paths', c=repeated_callback(Reference.fix_reference_paths), ann='Fixes reference paths deeply, so they will use' '$REPO env var.', bgc=color.color ) pm.button( 'fix_student_license_on_references_button', l='Fix Student License Error On References', c=repeated_callback( Reference.fix_student_license_on_references ), ann=Reference.fix_student_license.__doc__, bgc=color.color ) pm.button( 'fix_student_license_on_files_button', l='Fix Student License Error On Selected Files', c=repeated_callback( Reference.fix_student_license_on_selected_file ), ann=Reference.fix_student_license.__doc__, bgc=color.color ) color.change() pm.button( 'archive_button', l='Archive Current Scene', c=repeated_callback(Reference.archive_current_scene), ann='Creates a ZIP file containing the current scene and its' 'references in a flat Maya default project folder ' 'structure', bgc=color.color ) pm.button( 'bind_to_original_button', l='Bind To Original', c=repeated_callback(Reference.bind_to_original), ann='Binds the current local references to the ones on the ' 'repository', bgc=color.color ) pm.button( 'unload_selected_references_button', l='Unload Selected References', c=repeated_callback(Reference.unload_selected_references), ann='Unloads the highest references that is related with the selected objects', bgc=color.color ) pm.button( 'unload_unselected_references_button', l='Unload UnSelected References', c=repeated_callback(Reference.unload_unselected_references), ann='Unloads any references that is not related with the ' 'selected objects', bgc=color.color ) color.change() pm.button( 'remove_selected_references_button', l='Remove Selected References', c=repeated_callback(Reference.remove_selected_references), ann='Removes the highest references that is related with the selected objects', bgc=color.color ) color.change() pm.text(l='===== Representation Tools =====') with pm.rowLayout(nc=2, adj=1): pm.checkBoxGrp( 'generate_repr_types_checkbox_grp', l='Reprs', numberOfCheckBoxes=3, labelArray3=['GPU', 'ASS', 'RS'], cl4=['left', 'left', 'left', 'left'], cw4=[51, 50, 50, 50], valueArray3=[1, 1, 1] ) pm.checkBox( 'generate_repr_skip_existing_checkBox', l='Skip existing Reprs.', value=0 ) pm.button( 'generate_repr_of_all_references_button', l='Deep Generate Repr Of All References', c=repeated_callback( Reference.generate_repr_of_all_references_caller ), ann='Deeply generates desired Representations of all ' 'references of this scene', bgc=color.color ) pm.button( 'generate_repr_of_scene_button', l='Generate Repr Of This Scene', c=repeated_callback(Reference.generate_repr_of_scene_caller), ann='Generates desired Representations of this scene', bgc=color.color ) color.change() with pm.rowLayout(nc=2, adj=1): pm.radioButtonGrp( 'repr_apply_to_radio_button_grp', l='Apply To', # ad3=1, labelArray2=['Selected', 'All References'], numberOfRadioButtons=2, cl3=['left', 'left', 'left'], cw3=[50, 65, 65], sl=1 ) pm.button( 'to_base_button', l='To Base', c=repeated_callback(Reference.to_base), ann='Convert selected to Base representation', bgc=color.color ) pm.button( 'to_gpu_button', l='To GPU', c=repeated_callback(Reference.to_gpu), ann='Convert selected to GPU representation', bgc=color.color ) pm.button( 'to_ass_button', l='To ASS', c=repeated_callback(Reference.to_ass), ann='Convert selected to ASS representation', bgc=color.color ) pm.button( 'to_rs_button', l='To RS', c=repeated_callback(Reference.to_rs), ann='Convert selected to RS representation', bgc=color.color ) color.change() pm.button( 'update_alembic_references_button', l='Update Alembic References', c=repeated_callback(auxiliary.update_alembic_references), ann=auxiliary.update_alembic_references.__doc__, bgc=color.color ) # store commands __commands__.extend(reference_columnLayout.children()) # ----- MODELING ------ modeling_column_layout = pm.columnLayout( 'modeling_column_layout', adj=True, cal="center", rs=row_spacing) with modeling_column_layout: color.reset() pm.button('toggleFaceNormalDisplay_button', l="toggle face normal display", c=repeated_callback( pm.runtime.ToggleFaceNormalDisplay), ann="toggles face normal display", bgc=color.color) pm.button('reverseNormals_button', l="reverse normals", c=repeated_callback(Modeling.reverse_normals), ann="reverse normals", bgc=color.color) pm.button('fixNormals_button', l="fix normals", c=repeated_callback(Modeling.fix_normals), ann="applies setToFace then conform and then " "soften edge to all selected objects", bgc=color.color) color.change() pm.button( 'oyHierarchyInstancer_button', l="hierarchy_instancer on selected", c=repeated_callback(Modeling.hierarchy_instancer), ann="hierarchy_instancer on selected", bgc=color.color ) color.change() pm.button( 'relax_verts_button', l="Relax Vertices", c=repeated_callback(Modeling.relax_vertices), ann="opens relax_vertices", bgc=color.color ) with pm.rowLayout(nc=4, adj=1): def smooth_edges_callback(): iteration = pm.intSliderGrp( "smooth_edges_iteration_intField", q=1, v=1 ) Modeling.smooth_edges(iteration=iteration) pm.button( 'smooth_edges_button', l="Smooth Edges", c=repeated_callback(smooth_edges_callback), ann=Modeling.smooth_edges.__doc__, bgc=color.color ) pm.intSliderGrp( 'smooth_edges_iteration_intField', v=100, min=0, max=100 ) color.change() pm.button( 'create_curve_from_mesh_edges_button', l="Curve From Mesh Edges", c=repeated_callback(Modeling.create_curve_from_mesh_edges), ann="Creates a curve from selected mesh edges", bgc=color.color ) color.change() pm.button( 'vertex_aligned_locator_button', l="Vertex Aligned Locator", c=repeated_callback(Modeling.vertex_aligned_locator), ann="Creates an aligned locator from selected vertices", bgc=color.color ) color.change() with pm.rowLayout(nc=8, rat=(1, "both", 0), adj=1): pm.text('set_pivot_text', l='Set Pivot', bgc=color.color) pm.button( 'center_button', l="C", c=repeated_callback( Modeling.set_pivot, 0 ), bgc=(0.8, 0.8, 0.8) ) pm.button( 'minus_X_button', l="-X", c=repeated_callback( Modeling.set_pivot, 1 ), bgc=(1.000, 0.500, 0.666) ) pm.button( 'plus_X_button', l="+X", c=repeated_callback( Modeling.set_pivot, 2 ), bgc=(1.000, 0.500, 0.666) ) pm.button( 'minus_Y_button', l="-Y", c=repeated_callback( Modeling.set_pivot, 3 ), bgc=(0.666, 1.000, 0.500) ) pm.button( 'plus_Y_button', l="+Y", c=repeated_callback( Modeling.set_pivot, 4 ), bgc=(0.666, 1.000, 0.500) ) pm.button( 'minus_Z_button', l="-X", c=repeated_callback( Modeling.set_pivot, 5 ), bgc=(0.500, 0.666, 1.000) ) pm.button( 'plus_Z_button', l="+X", c=repeated_callback( Modeling.set_pivot, 6 ), bgc=(0.500, 0.666, 1.000) ) color.change() with pm.rowLayout(nc=7, rat=(1, "both", 0), adj=1): pm.text(l='Text. Res', bgc=color.color) pm.button( l="128", c=repeated_callback( Modeling.set_texture_res, 128 ), bgc=Color.colors[0] ) pm.button( l="256", c=repeated_callback( Modeling.set_texture_res, 256 ), bgc=Color.colors[1] ) pm.button( l="512", c=repeated_callback( Modeling.set_texture_res, 512 ), bgc=Color.colors[2] ) pm.button( l="1024", c=repeated_callback( Modeling.set_texture_res, 1024 ), bgc=Color.colors[3] ) pm.button( l='2048', c=repeated_callback( Modeling.set_texture_res, 2048 ), bgc=Color.colors[4] ) pm.button( l='4096', c=repeated_callback( Modeling.set_texture_res, 4096 ), bgc=Color.colors[5] ) pm.text(l='========== UV Tools =============') color.change() pm.button( 'fix_uvsets_button', l="Fix UVSets (DiffuseUV -> map1)", c=repeated_callback(Modeling.fix_uvsets), ann=Modeling.fix_uvsets, bgc=color.color ) color.change() pm.button( 'select_zero_uv_area_faces_button', l="Filter Zero UV Area Faces", c=repeated_callback(Modeling.select_zero_uv_area_faces), ann="Selects faces with zero uv area", bgc=color.color ) color.change() pm.button( 'create_auto_uvmap_button', l='Create Auto UVMap', c=repeated_callback(Modeling.create_auto_uvmap), ann=Modeling.create_auto_uvmap.__doc__, bgc=color.color ) with pm.rowLayout(nc=6, adj=1): def transfer_uvs_button_callback(*args, **kwargs): label_lut = { 'W': 0, 'L': 1, 'UV': 2, 'C': 3, 'T': 4 } sample_space = label_lut[ pm.radioCollection( 'transfer_uvs_radio_collection', q=1, sl=1 ) ] Modeling.transfer_uvs(sample_space=sample_space) pm.button('transfer_uvs_button', l="Transfer UVs", c=repeated_callback(transfer_uvs_button_callback), ann="Transfers UVs from one group to other, use it" "for LookDev -> Alembic", bgc=color.color) pm.radioCollection('transfer_uvs_radio_collection') button_with = 40 pm.radioButton( 'W', w=button_with, al='left', ann='World' ) pm.radioButton( 'L', w=button_with, al='left', ann='Local' ) pm.radioButton( 'UV', w=button_with, al='left', ann='UV' ) pm.radioButton( 'C', w=button_with, al='left', ann='Component', sl=1 ) pm.radioButton( 'T', w=button_with, al='left', ann='Topology' ) color.change() pm.text(l='======= Manipulator Tools =======') pm.button('set_to_point_button', l="Set To Point", c=repeated_callback(pm.mel.eval, "manipMoveOrient 1;"), ann="Set manipulator to the point", bgc=color.color) pm.button('set_to_edge_button', l="Set To Edge", c=repeated_callback(pm.mel.eval, "manipMoveOrient 2;"), ann="Set manipulator to the edge", bgc=color.color) pm.button('set_to_face_button', l="Set To Face", c=repeated_callback(pm.mel.eval, "manipMoveOrient 3;"), ann="Set manipulator to the face", bgc=color.color) color.change() pm.button('create_bbox_from_selection_button', l="Create BBOX from selection", c=repeated_callback(Modeling.bbox_from_selection), ann=Modeling.bbox_from_selection.__doc__, bgc=color.color) # store commands __commands__.extend(modeling_column_layout.children()) # ----- RIGGING ------ rigging_columnLayout = pm.columnLayout( 'rigging_columnLayout', adj=True, cal="center", rs=row_spacing ) with rigging_columnLayout: color.reset() pm.button( 'create_joints_on_curve_ui_button', l="Create Joints On Curve UI", c=repeated_callback(Rigging.create_joints_on_curve_ui), ann=Rigging.create_joints_on_curve_ui.__doc__, bgc=color.color ) pm.button( 'mirror_transformation_button', l="Mirror Transformation", c=repeated_callback(Rigging.mirror_transformation), ann=Rigging.mirror_transformation.__doc__, bgc=color.color ) color.change() pm.button( 'IKFKLimbRigger_button', l="IK/FK Limb Rigger", c=repeated_callback(Rigging.ik_fk_limb_rigger), ann=Rigging.ik_fk_limb_rigger.__doc__, bgc=color.color ) with pm.rowLayout(nc=2, adj=1): def ik_fk_limb_rigger_callback(): subdivision = pm.intField('bendy_ik_fk_subdivision_count_field', q=1, v=1) Rigging.bendy_ik_fk_limb_rigger(subdivision=subdivision) pm.button( 'bendy_ik_fk_limb_rigger_button', l='IK/FK Limb Rigger (Bendy)', c=repeated_callback(ik_fk_limb_rigger_callback), ann=Rigging.bendy_ik_fk_limb_rigger.__doc__, bgc=color.color ) pm.intField('bendy_ik_fk_subdivision_count_field', min=0, v=2) pm.button( 'ReverseFootRigger_button', l="Reverse Foot Rigger", c=repeated_callback(Rigging.reverse_foot_rigger), ann=Rigging.reverse_foot_rigger.__doc__, bgc=color.color ) pm.button( 'squashStretchBendRigger_button', l="Squash/Stretch/Bend Rigger", c=repeated_callback(Rigging.squash_stretch_bend_rigger), ann=Rigging.squash_stretch_bend_rigger.__doc__, bgc=color.color ) pm.button( 'setupStretchySplineIKCurve_button', l="setup stretchy splineIK curve", c=repeated_callback(Rigging.setup_stretchy_spline_ik_curve), ann="connects necessary nodes to calculate arcLength " "change in percent", bgc=color.color ) pm.button( 'selectJointsDeformingTheObject_button', l="select joints deforming the object", c=repeated_callback(Rigging.select_joints_deforming_object), ann="select joints that deform the object", bgc=color.color ) color.change() pm.button( 'create_axial_correction_group_button', l="Create Axial Correction Groups", c=repeated_callback(Rigging.axial_correction_group), ann=Rigging.axial_correction_group.__doc__, bgc=color.color ) pm.button( 'create_zv_parent_compatible_groups_button', l="Create ZV Parent Compatible Groups", c=repeated_callback(Rigging.create_zv_parent_compatible_groups), ann=Rigging.axial_correction_group.__doc__, bgc=color.color ) color.change() pm.button( 'setClustersToAbsolute_button', l="set selected clusters to absolute", c=repeated_callback(Rigging.set_clusters_relative_state, 0), ann="set Clusters to Absolute", bgc=color.color ) pm.button( 'setClustersToRelative_button', l="set selected clusters to relative", c=repeated_callback( Rigging.set_clusters_relative_state, 1 ), ann="set Clusters to Relative", bgc=color.color ) color.change() pm.button( 'addControllerShape_button', l="add controller shape", c=repeated_callback(Rigging.add_controller_shape), ann="add the shape in the selected joint", bgc=color.color ) pm.button( 'replaceControllerShape_button', l="replace controller shape", c=repeated_callback(Rigging.replace_controller_shape), ann="replaces the shape in the selected joint", bgc=color.color ) color.change() def pin_controller_callback(color, *args): """Creates Pin Controller on the selected Vertex """ from anima.env.mayaEnv import rigging vertex = pm.ls(sl=1)[0] pc = rigging.PinController() pc.color = color pc.pin_to_vertex = vertex pc.setup() # TODO: Give the user the ability of selecting custom colors with pm.rowLayout(nc=4, adj=1): pm.text(l="Pin Controller") pm.button('pin_controller_red_button', l="R", c=repeated_callback(pin_controller_callback, [1, 0, 0]), ann=pin_controller_callback.__doc__, bgc=[1, 0, 0]) pm.button('pin_controller_green_button', l="G", c=repeated_callback(pin_controller_callback, [0, 1, 0]), ann=pin_controller_callback.__doc__, bgc=[0, 1, 0]) pm.button('pin_controller_blue_button', l="B", c=repeated_callback(pin_controller_callback, [0, 0, 1]), ann=pin_controller_callback.__doc__, bgc=[0, 0, 1]) pm.button('rivet_button', l="create rivet", c=repeated_callback(mel.eval, 'rivet'), ann="create rivet", bgc=color.color) pm.button('oyAutoRivet_button', l="auto rivet", c=repeated_callback(mel.eval, 'oyAutoRivet'), ann="auto rivet", bgc=color.color) pm.button( 'oyAutoRivetFollicle_button', l="auto rivet (Follicle)", c=repeated_callback(auxiliary.auto_rivet), ann="creates a rivet setup by using hair follicles", bgc=color.color ) pm.button( 'rivet_per_face_button', l="rivet per face (Follicle)", c=repeated_callback(auxiliary.rivet_per_face), ann="creates a rivet setup per selected face by using hair " "follicles", bgc=color.color ) pm.button('create_hair_from_curves_button', l="Create Hair From Curves", c=repeated_callback(auxiliary.hair_from_curves), ann="creates hair from curves", bgc=color.color) color.change() pm.button('artPaintSkinWeightsTool_button', l="paint weights tool", c=repeated_callback(mel.eval, 'ArtPaintSkinWeightsTool'), ann="paint weights tool", bgc=color.color) def skin_tools_ui_caller(*args): from anima.env.mayaEnv.rigging import SkinToolsUI st = SkinToolsUI() st.ui() pm.button('skin_tools_button', l="Skin Tools", c=skin_tools_ui_caller, ann="skin tools", bgc=color.color) pm.button('oyFixBoundJoint_button', l="fix_bound_joint", c=repeated_callback(Rigging.fix_bound_joint), ann="fix_bound_joint", bgc=color.color) pm.button('toggle_local_rotation_axes_button', l="Toggle Local Rotation Axes", c=repeated_callback(General.toggle_attributes, "displayLocalAxis"), ann="Toggle Local Rotation Axes", bgc=color.color) pm.button('toggle_display_rotate_pivot_button', l="Toggle Display Rotate Pivot", c=repeated_callback(General.toggle_attributes, "displayRotatePivot"), ann="Toggle Display Rotate Pivot", bgc=color.color) pm.button('seroBlendController_button', l="seroBlendController", c=repeated_callback(mel.eval, 'seroBlendController'), ann="seroBlendController", bgc=color.color) pm.button('align_to_pole_vector_button', l="Align To Pole Vector", c=repeated_callback(auxiliary.align_to_pole_vector), ann="align to pole vector", bgc=color.color) color.change() pm.button('oyResetCharSet_button', l="oyResetCharSet", c=repeated_callback(mel.eval, 'oyResetCharSet'), ann="reset char set", bgc=color.color) pm.button('export_blend_connections_button', l="Export blend connections", c=repeated_callback(auxiliary.export_blend_connections), ann="export blend connections", bgc=color.color) color.change() pm.button('createFollicles_button', l="create follicles", c=repeated_callback(Rigging.create_follicles), ann="create follicles", bgc=color.color) color.change() pm.button('oyResetTweaks_button', l="reset tweaks", c=repeated_callback(Rigging.reset_tweaks), ann="reset tweaks", bgc=color.color) color.change() def add_cacheable_attribute_callback(): """add <b>cacheable</b> attribute to the selected nodes """ for node in pm.selected(): Rigging.add_cacheable_attribute(node) pm.button('add_cacheable_attr_button', l="add `cacheable` attribute", c=repeated_callback(add_cacheable_attribute_callback), ann=add_cacheable_attribute_callback.__doc__, bgc=color.color) # store commands __commands__.extend(rigging_columnLayout.children()) # ----- RENDER ------ render_columnLayout = pm.columnLayout( 'render_columnLayout', adj=True, cal="center", rs=row_spacing ) with render_columnLayout: color.reset() color.change() pm.button( 'update_render_settings_button', l="Update Render Settings", c=repeated_callback(Render.update_render_settings), ann=Render.update_render_settings.__doc__, bgc=color.color ) color.change() pm.button( 'delete_render_layers_button', l="Delete Render Layers", c=repeated_callback(Render.delete_render_layers), ann=Render.delete_render_layers.__doc__, bgc=color.color ) pm.button( 'delete_display_layers_button', l="Delete Display Layers", c=repeated_callback(Render.delete_display_layers), ann=Render.delete_display_layers.__doc__, bgc=color.color ) pm.button( 'delete_render_and_display_layers_button', l="Delete Render and Display Layers", c=repeated_callback(Render.delete_render_and_display_layers), ann=Render.delete_render_and_display_layers.__doc__, bgc=color.color ) color.change() pm.button( 'delete_unused_shading_nodes_button', l="Delete Unused Shading Nodes", c=repeated_callback(Render.delete_unused_shading_nodes), ann=Render.delete_unused_shading_nodes.__doc__, bgc=color.color ) color.change() pm.button( 'duplicate_input_graph_button', l="Duplicate Input Graph", c=repeated_callback(Render.duplicate_input_graph), ann=Render.duplicate_input_graph.__doc__, bgc=color.color ) pm.button( 'duplicate_with_connections_button', l="Duplicate With Connections To Network", c=repeated_callback(Render.duplicate_with_connections), ann=Render.duplicate_with_connections.__doc__, bgc=color.color ) color.change() pm.text(l='=========== RedShift Tools ===========') pm.button( 'generate_rs_from_selection_button', l='Generate RSProxy From Selection', c=repeated_callback(Render.generate_rsproxy_from_selection), ann=Render.generate_rsproxy_from_selection.__doc__, bgc=color.color ) pm.button( 'generate_rs_from_selection_per_selection_button', l='Generate RSProxy From Selection (Per Selection)', c=repeated_callback(Render.generate_rsproxy_from_selection, True), ann=Render.generate_rsproxy_from_selection.__doc__, bgc=color.color ) pm.button( 'set_rsproxy_to_bbox_button', l='RSProxy -> Bounding Box', c=repeated_callback(Render.rsproxy_to_bounding_box), ann=Render.rsproxy_to_bounding_box.__doc__, bgc=color.color ) pm.button( 'set_rsproxy_to_preview_mesh_button', l='RSProxy -> Preview Mesh', c=repeated_callback(Render.rsproxy_to_preview_mesh), ann=Render.rsproxy_to_preview_mesh.__doc__, bgc=color.color ) color.change() pm.text(l='===== RedShift IC + IPC Bake =====') pm.button( 'redshift_ic_ipc_bake_button', l="Do Bake", c=repeated_callback(Render.redshift_ic_ipc_bake), ann=Render.redshift_ic_ipc_bake.__doc__, bgc=color.color ) pm.button( 'redshift_ic_ipc_bake_restore_button', l="Restore Settings", c=repeated_callback(Render.redshift_ic_ipc_bake_restore), ann=Render.redshift_ic_ipc_bake_restore.__doc__, bgc=color.color ) pm.text(l='======================================') color.change() pm.button( 'submit_afanasy_button', l="Afanasy Job Submitter", c=repeated_callback(Render.afanasy_job_submitter), ann=Render.afanasy_job_submitter.__doc__, bgc=color.color ) color.change() pm.button( 'open_node_in_browser_button', l="Open node in browser", c=repeated_callback(Render.open_node_in_browser), ann="Open node in browser", bgc=color.color ) color.change() pm.button('auto_convert_to_redshift_button', l="Auto Convert Scene To RedShift (BETA)", c=repeated_callback(Render.auto_convert_to_redshift), ann="Automatically converts the scene from Arnold to " "Redshift, including materials and lights", bgc=color.color) pm.button('convert_nodes_to_redshift_button', l="Convert Selected To RedShift (BETA)", c=repeated_callback(Render.convert_nodes_to_redshift), ann="Automatically converts the selected node from " "Arnold to Redshift", bgc=color.color) def set_shape_attribute_wrapper(attr_name, value): """a wrapper function for set_shape_attribute """ apply_to_hierarchy = pm.checkBox( apply_to_hierarchy_checkBox, q=True, v=True ) disable_undo = pm.checkBox( disable_undo_queue_check_box, q=True, v=True ) Render.set_shape_attribute( attr_name, value, apply_to_hierarchy, disable_undo ) with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1): pm.text('renderThumbnailUpdate_text', l="renderThumbnailUpdate", bgc=color.color) pm.button('set_renderThumbnailUpdate_ON_button', l="ON", c=repeated_callback(pm.renderThumbnailUpdate, 1), bgc=(0, 1, 0)) pm.button('set_renderThumbnailUpdate_OFF_button', l="OFF", c=repeated_callback(pm.renderThumbnailUpdate, 0), bgc=(1, 0, 0)) color.change() pm.button('replaceShadersWithLast_button', l="replace shaders with last", c=repeated_callback(Render.replace_shaders_with_last), ann="replace shaders with last", bgc=color.color) color.change() pm.button('createTextureRefObject_button', l="create texture ref. object", c=repeated_callback(Render.create_texture_ref_object), ann="create texture ref. object", bgc=color.color) pm.text(l='========== Texture Tools =============') color.change() pm.button('assign_substance_textures_button', l="Assign Substance Textures", c=repeated_callback(Render.assign_substance_textures), ann=Render.assign_substance_textures.__doc__, bgc=color.color) color.change() pm.button('normalize_texture_paths_button', l="Normalize Texture Paths (remove $)", c=repeated_callback(Render.normalize_texture_paths), ann=Render.normalize_texture_paths.__doc__, bgc=color.color) pm.button('unnormalize_texture_paths_button', l="Unnormalize Texture Paths (add $)", c=repeated_callback(Render.unnormalize_texture_paths), ann=Render.unnormalize_texture_paths.__doc__, bgc=color.color) color.change() pm.button('assign_random_material_color_button', l="Assign Material with Random Color", c=repeated_callback(Render.assign_random_material_color), ann=Render.assign_random_material_color.__doc__, bgc=color.color) pm.button('randomize_material_color_button', l="Randomize Material Color", c=repeated_callback(Render.randomize_material_color), ann=Render.randomize_material_color.__doc__, bgc=color.color) color.change() pm.button('import_image_as_plane_button', l="Import Image as Plane", c=repeated_callback(Render.import_image_as_plane), ann=Render.import_image_as_plane.__doc__, bgc=color.color) pm.text(l='============ Camera Tools ============') color.change() pm.button( 'CameraFilmOffsetTool_button', l="Camera Film Offset Tool", c=repeated_callback( camera_tools.camera_film_offset_tool ), ann="Camera Film Offset Tool", bgc=color.color ) def camera_focus_plane_tool_callback(): """callback for the camera_focus_plane_tool """ camera = pm.ls(sl=1)[0] camera_tools.camera_focus_plane_tool(camera) pm.button( 'CameraFocusPlaneTool_button', l="Camera Focus Plane Tool", c=repeated_callback(camera_focus_plane_tool_callback), ann="Camera Film Offset Tool", bgc=color.color ) pm.button( 'lock_tracked_camera_channels_button', l="Lock Tracked Camera Channels", c=repeated_callback(camera_tools.lock_tracked_camera_channels), ann=camera_tools.lock_tracked_camera_channels.__doc__, bgc=color.color ) color.change() pm.text(l='===== Vertigo =====') pm.button('vertigo_setup_look_at_button', l="Setup -> Look At", c=repeated_callback(Render.vertigo_setup_look_at), ann="Setup Look At", bgc=color.color) pm.button('vertigo_setup_vertigo_button', l="Setup -> Vertigo", c=repeated_callback(Render.vertigo_setup_vertigo), ann="Setup Vertigo", bgc=color.color) pm.button('vertigo_delete_button', l="Delete", c=repeated_callback(Render.vertigo_delete), ann="Delete", bgc=color.color) pm.text(l='===================') pm.button('oyTracker2Null_button', l="oyTracker2Null", c=repeated_callback(mel.eval, 'oyTracker2Null'), ann="Tracker2Null", bgc=color.color) with pm.rowLayout(nc=3, adj=1): def import_3dequalizer_points_callback(): """callback for Import 3DEqualizer points """ cam_width = pm.intField('import_3DEqualizer_points_width_int_field', q=1, v=1) cam_height = pm.intField('import_3DEqualizer_points_height_int_field', q=1, v=1) camera_tools.import_3dequalizer_points(cam_width, cam_height) pm.button( 'import_3DEqualizer_points_button', l="Import 3DEqualizer Points", c=repeated_callback(import_3dequalizer_points_callback), ann=camera_tools.import_3dequalizer_points.__doc__, bgc=color.color ) pm.intField('import_3DEqualizer_points_width_int_field', min=1, v=1920) pm.intField('import_3DEqualizer_points_height_int_field', min=1, v=1080) pm.text(l='===================') color.change() pm.button('reloadFileTextures_button', l="reload file textures", c=repeated_callback(Render.reload_file_textures), ann="reload file textures", bgc=color.color) color.change() pm.button('transfer_shaders_button', l="Transfer Shaders", c=repeated_callback(Render.transfer_shaders), ann="Transfers shaders from one group to other, use it" "for LookDev -> Alembic", bgc=color.color) color.change() pm.button('fitPlacementToUV_button', l="fit placement to UV", c=repeated_callback(Render.fit_placement_to_UV), ann="fit placement to UV", bgc=color.color) pm.button( 'connect_placement2d_to_file_texture_button', l='Connect Placement2D to File Texture', c=repeated_callback(Render.connect_placement2d_to_file), ann=Render.connect_placement2d_to_file.__doc__, bgc=color.color ) color.change() with pm.rowLayout(nc=2, adj=1): def enable_subdiv_callback(): max_tess = pm.intField('enable_subdiv_int_field', q=1, v=1) Render.enable_subdiv_on_selected( max_subdiv=max_tess, fixed_tes=False ) pm.button( 'enable_subdiv_on_selected_objects_button', l='Enable Subdiv (Adaptive)', c=repeated_callback(enable_subdiv_callback), ann='Enables Arnold/RedShift Subdiv (catclark) on ' 'selected objects', bgc=color.color ) pm.intField('enable_subdiv_int_field', min=0, v=3) with pm.rowLayout(nc=2, adj=1): def fixed_tess_callback(): max_tess = pm.intField('fixed_tess_int_field', q=1, v=1) Render.enable_subdiv_on_selected( fixed_tes=True, max_subdiv=max_tess ) pm.button( 'enable_fixed_subdiv_on_selected_objects_button', l='Enable Subdiv (Fixed Tes.)', c=repeated_callback(fixed_tess_callback), ann='Enables Arnold/RedShift Subdiv (catclark) on selected ' 'objects with fixed tessellation', bgc=color.color ) pm.intField('fixed_tess_int_field', min=0, v=1) pm.button( 'disable_subdiv_on_selected_objects_button', l='Disable Subdiv', c=repeated_callback(Render.disable_subdiv_on_selected), ann=Render.disable_subdiv.__doc__, bgc=color.color ) color.change() pm.button( 'export_shader_data_button', l='Export Shader Attributes', c=repeated_callback(Render.export_shader_attributes), ann=Render.export_shader_attributes.__doc__, bgc=color.color ) pm.button( 'import_shader_data_button', l='Import Shader Attributes', c=repeated_callback(Render.import_shader_attributes), ann=Render.import_shader_attributes.__doc__, bgc=color.color ) color.change() pm.button( 'export_shader_to_houdini_button', l='Export Shader Assignments To Houdini', c=repeated_callback(Render.export_shader_assignments_to_houdini), ann=Render.export_shader_assignments_to_houdini.__doc__, bgc=color.color ) color.change() pm.button( 'create_eye_shader_and_controls_button', l='Create Eye Shader and Controls', c=repeated_callback(Render.create_eye_shader_and_controls), ann='Creates eye shaders and controls for the selected eyes', bgc=color.color ) pm.button( 'setup_outer_eye_render_attributes_button', l='Setup Outer Eye Render Attributes', c=repeated_callback(Render.setup_outer_eye_render_attributes), ann=Render.setup_outer_eye_render_attributes.__doc__, bgc=color.color ) pm.button( 'setup_window_glass_render_attributes_button', l='Setup **Window Glass** Render Attributes', c=repeated_callback(Render.setup_window_glass_render_attributes), ann=Render.setup_window_glass_render_attributes.__doc__, bgc=color.color ) pm.button( 'setup_dummy_window_light_button', l='Setup/Update **Dummy Window** Light Plane', c=repeated_callback(Render.dummy_window_light_plane), ann=Render.dummy_window_light_plane.__doc__, bgc=color.color ) color.change() pm.button( 'create_generic_tooth_shader_button', l='Create Generic TOOTH Shader', c=repeated_callback(Render.create_generic_tooth_shader), ann=Render.create_generic_gum_shader.__doc__, bgc=color.color ) pm.button( 'create_generic_gum_shader_button', l='Create Generic GUM Shader', c=repeated_callback(Render.create_generic_gum_shader), ann=Render.create_generic_gum_shader.__doc__, bgc=color.color ) pm.button( 'create_generic_tongue_shader_button', l='Create Generic TONGUE Shader', c=repeated_callback(Render.create_generic_tongue_shader), ann=Render.create_generic_tongue_shader.__doc__, bgc=color.color ) color.change() pm.button('convert_to_ai_image_button', l="To aiImage", c=repeated_callback( Render.convert_file_node_to_ai_image_node), ann="Converts the selected File (file texture) nodes to " "aiImage nodes, also connects the place2dTexture " "node if necessary", bgc=color.color) color.change() pm.button('to_bbox_button', l="aiStandIn To BBox", c=repeated_callback(Render.standin_to_bbox), ann="Convert selected stand ins to bbox", bgc=color.color) pm.button('to_polywire_button', l="aiStandIn To Polywire", c=repeated_callback(Render.standin_to_polywire), ann="Convert selected stand ins to polywire", bgc=color.color) color.change() with pm.rowLayout(nc=3, adj=3, bgc=color.color): min_range_field = pm.floatField( minValue=1000, maxValue=50000, step=1, pre=0, value=3500, w=50, bgc=color.color, ann='Min Value' ) max_range_field = pm.floatField( minValue=1000, maxValue=50000, step=1, pre=0, value=6500, w=50, bgc=color.color, ann='Max Value' ) pm.button( ann="Randomize Color Temperature", l="Randomize Color Temp.", w=70, c=repeated_callback( Render.randomize_light_color_temp, min_range_field, max_range_field ), bgc=color.color ) with pm.rowLayout(nc=3, adj=3, bgc=color.color): min_range_field = pm.floatField( minValue=0, maxValue=200, step=0.1, pre=1, value=10, w=50, bgc=color.color, ann='Min Value' ) max_range_field = pm.floatField( minValue=0, maxValue=200, step=0.1, pre=1, value=20, w=50, bgc=color.color, ann='Max Value' ) pm.button( ann="Randomize Exposure", l="Randomize Exposure", w=70, c=repeated_callback( Render.randomize_light_intensity, min_range_field, max_range_field ), bgc=color.color ) color.change() pm.button( ann="Create Reflection Curve", l="Reflection Curve", c=repeated_callback( Render.generate_reflection_curve ), bgc=color.color ) color.change() pm.button( ann="Import GPU Content", l="Import GPU Content", c=repeated_callback( Render.import_gpu_content ), bgc=color.color ) color.change() with pm.rowLayout(nc=3, adj=3, bgc=color.color): source_driver_field = pm.textField( text='S:', w=50, bgc=color.color, ann='Source Driver' ) target_driver_field = pm.textField( text='L:', w=50, bgc=color.color, ann='Target Driver' ) pm.button( ann="Move Cache Files to Another Location", l="Move Cache Files", w=70, c=repeated_callback( Render.move_cache_files_wrapper, source_driver_field, target_driver_field ), bgc=color.color ) # store commands __commands__.extend(render_columnLayout.children()) # ----- PREVIS ------ previs_columnLayout = pm.columnLayout( 'previs_columnLayout', adj=True, cal="center", rs=row_spacing ) with previs_columnLayout: color.reset() pm.button('split_camera_button', l="Split Camera", c=repeated_callback(Previs.split_camera), ann=Previs.split_camera.__doc__, bgc=color.color) color.change() pm.button('shots_from_camera_button', l="Shots From Camera", c=repeated_callback(Previs.shots_from_cams), ann=Previs.shots_from_cams.__doc__, bgc=color.color) color.change() pm.button('auto_rename_shots_button', l="Auto Rename Shots", c=repeated_callback(Previs.auto_rename_shots), ann=Previs.auto_rename_shots.__doc__, bgc=color.color) color.change() pm.button('save_previs_to_shots_button', l="Save Previs To Shots", c=repeated_callback(Previs.save_previs_to_shots), ann=Previs.save_previs_to_shots.__doc__, bgc=color.color) color.change() pm.button('very_nice_camera_rig_button', l="Create a Very Nice Camera Rig", c=repeated_callback(camera_tools.very_nice_camera_rig), ann=camera_tools.very_nice_camera_rig.__doc__, bgc=color.color) # store commands __commands__.extend(previs_columnLayout.children()) # ----- ANIMATION ------ animation_columnLayout = pm.columnLayout( 'animation_columnLayout', adj=True, cal="center", rs=row_spacing ) with animation_columnLayout: color.reset() color.change() from anima.env.mayaEnv import picker pm.text(l='===== Object Picker =====') pm.button('picker_setParent_button', l="Set Parent", c=repeated_callback(picker.set_parent), ann="Set Parent", bgc=color.color) pm.button('picker_releaseObject_button', l="Release", c=repeated_callback(picker.release_object), ann="Release Object", bgc=color.color) pm.button('picker_editKeyframes_button', l="Edit Keyframes", c=repeated_callback(picker.edit_keyframes), ann="Edit Keyframes", bgc=color.color) pm.button('picker_fixJump_button', l="Fix Jump", c=repeated_callback(picker.fix_jump), ann="Fix Jump", bgc=color.color) pm.button('picker_explodeSetup_button', l="Explode", c=repeated_callback(picker.explode_setup), ann="Explode Setup", bgc=color.color) color.change() from anima.env.mayaEnv import pivot_switcher pm.text(l='===== Pivot Switcher =====') pm.button('oyPivotSwitcher_setupPivot_button', l="Setup", c=repeated_callback(pivot_switcher.setup_pivot), ann="Setup Pivot", bgc=color.color) pm.button('oyPivotSwitcher_switchPivot_button', l="Switch", c=repeated_callback(pivot_switcher.switch_pivot), ann="Switch Pivot", bgc=color.color) pm.button('oyPivotSwitcher_togglePivot_button', l="Toggle", c=repeated_callback(pivot_switcher.toggle_pivot), ann="Toggle Pivot", bgc=color.color) color.change() pm.text(l='===== Alembic Tools =====') pm.button('bake_all_constraints_button', l="Bake All Constraints", c=repeated_callback(Animation.bake_all_constraints), ann=Animation.bake_all_constraints.__doc__, bgc=color.color) pm.button('bake_alembic_animations_button', l="Bake Alembic Animations", c=repeated_callback(Animation.bake_alembic_animations), ann=Animation.bake_alembic_animations.__doc__, bgc=color.color) rowLayout = pm.rowLayout(nc=2, adj=1, bgc=color.color) with rowLayout: pm.button( 'abc_from_selected_button', l='From Selected', c=repeated_callback(Animation.create_alembic_command), ann='Creates Alembic Cache from selected nodes', bgc=color.color ) from_top_node_checkBox = pm.checkBox( 'from_top_node_checkBox', l="Top Node", value=True, bgc=color.color ) # pm.button( # 'abc_from_source_to_target_button', # l='Source -> Target', # c=repeated_callback(Animation.copy_alembic_data), # ann='Copy Alembic Data from Source to Target by the matching ' # 'node names', # bgc=color.color # ) # rowLayout = pm.rowLayout(nc=2, adj=1, bgc=color.color) pm.text(l='===== EXPORT =====') with pm.rowLayout(nc=3, adj=3): pm.checkBoxGrp( 'export_alembic_of_nodes_checkbox_grp', l='Alembic Options', numberOfCheckBoxes=2, labelArray2=['Isolate', 'Unload Refs'], cl3=['left', 'left', 'left'], cw3=[100, 60, 60], valueArray2=[1, 1] ) pm.intFieldGrp( 'export_alembic_of_nodes_handles_int_slider_grp', l='Handles', el='frames', nf=1, adj=2, cw3=[65, 1, 20], v1=1, ) def export_alembic_callback_with_options(func): """calls the function with the parameters from the ui :param func: :return: """ isolate, unload_refs = pm.checkBoxGrp( 'export_alembic_of_nodes_checkbox_grp', q=1, valueArray2=1 ) handles = pm.intFieldGrp('export_alembic_of_nodes_handles_int_slider_grp', q=1, v1=1) func(isolate=isolate, unload_refs=unload_refs, handles=handles) pm.button( 'export_alembic_of_selected_cacheable_nodes_button', l='Selected Cacheable Nodes', c=repeated_callback(export_alembic_callback_with_options, auxiliary.export_alembic_of_selected_cacheable_nodes), ann=auxiliary.export_alembic_of_selected_cacheable_nodes.__doc__.split('\n')[0], bgc=color.color ) pm.button( 'export_alembic_of_all_cacheable_nodes_button', l='ALL Cacheable Nodes', c=repeated_callback(export_alembic_callback_with_options, auxiliary.export_alembic_of_all_cacheable_nodes), ann=auxiliary.export_alembic_of_all_cacheable_nodes.__doc__.split('\n')[0], bgc=color.color ) pm.button( 'export_alembic_on_farm_button', l='Export Alembic On Farm', c=repeated_callback(Animation.export_alembics_on_farm), ann=Animation.export_alembics_on_farm.__doc__.split('\n')[0], bgc=color.color ) pm.text(l='===== Playblast Tools =====') color.change() pm.button( 'playblast_on_farm_button', l='PLayblast On Farm', c=repeated_callback(Animation.playblast_on_farm), ann=Animation.playblast_on_farm.__doc__.split('\n')[0], bgc=color.color ) pm.text(l='===== Exporters =====') color.change() rowLayout = pm.rowLayout(nc=3, adj=3, bgc=color.color) with rowLayout: start = int(pm.playbackOptions(q=1, minTime=1)) end = int(pm.playbackOptions(q=1, maxTime=1)) startButtonField = pm.textField( text=start, w=50, bgc=color.color, ann='start frame' ) endButtonField = pm.textField( text=end, w=50, bgc=color.color, ann='end frame' ) pm.button(ann="Exports maya camera to nuke", l="cam2chan", w=70, c=repeated_callback( Animation.cam_2_chan, startButtonField, endButtonField ), bgc=color.color) pm.text(l='===== Component Animation =====') color.change() smooth_selected_keyframes_text_fbg = pm.textFieldButtonGrp( 'smooth_selected_keyframes_text_fbg_button', bl="Smooth Selected Keyframes", adj=2, tx=1, cw=(1, 40), ann="select keyframes in graph editor to smooth", bgc=color.color ) def smooth_selected_keyframes_text_fbg_callback(): iteration = int( pm.textFieldButtonGrp( "smooth_selected_keyframes_text_fbg_button", q=1, tx=1 ) ) Animation.smooth_selected_keyframes(iteration) pm.textFieldButtonGrp( smooth_selected_keyframes_text_fbg, e=1, bc=repeated_callback( smooth_selected_keyframes_text_fbg_callback ) ) smooth_component_anim = pm.textFieldButtonGrp( 'oySmoothComponentAnimation_button', bl="Smooth Component Animation", adj=2, tx=1, cw=(1, 40), ann="select components to smooth", bgc=color.color ) pm.textFieldButtonGrp( smooth_component_anim, e=1, bc=repeated_callback( Animation.smooth_component_animation, smooth_component_anim ) ) color.change() pm.button( 'bake_component_animation_button', l='Bake component animation to Locator', c=repeated_callback(Animation.bake_component_animation), ann='Creates a locator at the center of selected components ' 'and moves it with the components along the current ' 'frame range', bgc=color.color ) pm.button( 'create_follicle_button', l='Attach Follicle', c=repeated_callback(Animation.attach_follicle), ann='Attaches a follicle in the selected components', bgc=color.color ) pm.button( 'equalize_node_speed_button', l='Equalize Node Speed', c=repeated_callback(Animation.equalize_node_speed), ann=Animation.equalize_node_speed.__doc__, bgc=color.color ) pm.text(l='===== Generic Tools =====') color.change() pm.button( 'set_range_from_shot_node_button', l='Range From Shot', c=repeated_callback(Animation.set_range_from_shot), ann='Sets the playback range from the shot node in the scene', bgc=color.color ) color.change() pm.button( 'delete_base_anim_layer_button', l='Delete Base Anim Layer', c=repeated_callback(Animation.delete_base_anim_layer), ann=Animation.delete_base_anim_layer.__doc__, bgc=color.color ) # store commands __commands__.extend(animation_columnLayout.children()) # Obsolete obsolete_columnLayout = pm.columnLayout( 'obsolete_columnLayout', adj=True, cal="center", ann="Obsolete", rs=row_spacing ) with obsolete_columnLayout: color.reset() pm.button('addMiLabel_button', l="add miLabel to selected", c=repeated_callback(Render.add_miLabel), ann="add miLabel to selected", bgc=color.color) color.change() pm.button('connectFacingRatioToVCoord_button', l="connect facingRatio to vCoord", c=repeated_callback( Render.connect_facingRatio_to_vCoord), ann="connect facingRatio to vCoord", bgc=color.color) color.change() with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1): pm.text('miFinalGatherCast_text', l="miFinalGatherCast", bgc=color.color) pm.button('set_miFinalGatherCast_ON_button', l="ON", c=repeated_callback( set_shape_attribute_wrapper, "miFinalGatherCast", 1 ), bgc=(0, 1, 0)) pm.button('set_miFinalGatherCast_OFF_button', l="OFF", c=repeated_callback( set_shape_attribute_wrapper, "miFinalGatherCast", 0 ), bgc=(1, 0, 0)) with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1): pm.text('miFinalGatherReceive_text', l="miFinalGatherReceive", bgc=color.color) pm.button('set_miFinalGatherReceive_ON_button', l="ON", c=repeated_callback( set_shape_attribute_wrapper, "miFinalGatherReceive", 1 ), bgc=(0, 1, 0)) pm.button('set_miFinalGatherReceive_OFF_button', l="OFF", c=repeated_callback( set_shape_attribute_wrapper, "miFinalGatherReceive", 0 ), bgc=(1, 0, 0)) with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1): pm.text('miFinalGatherHide_text', l="miFinalGatherHide", bgc=color.color) pm.button('set_miFinalGatherHide_ON_button', l="ON", c=repeated_callback(Render.set_finalGatherHide, 1), bgc=(0, 1, 0)) pm.button('set_miFinalGatherHide_OFF_button', l="OFF", c=repeated_callback(Render.set_finalGatherHide, 0), bgc=(1, 0, 0)) color.change() pm.button('convertToMRTexture_button', l="use mib_texture_filter_lookup", c=repeated_callback( Render.use_mib_texture_filter_lookup), ann=( "adds an mib_texture_filter_lookup node in \n" + "between the file nodes and their outputs, to \n" + "get a sharper look output from the texture file"), bgc=color.color) pm.button('convertToLinear_button', l="convert to Linear texture", c=repeated_callback(Render.convert_to_linear), ann="convert to Linear texture", bgc=color.color) pm.button('useImageSequence_button', l="use image sequence for \nmentalrayTexture", c=repeated_callback(Render.use_image_sequence), ann="use image sequence for \nmentalrayTexture", bgc=color.color) color.change() pm.button('oyAddToSelectedContainer_button', l="add to selected container", c=repeated_callback(Render.add_to_selected_container), ann="add to selected container", bgc=color.color) pm.button('oyRemoveFromContainer_button', l="remove from selected container", c=repeated_callback(Render.remove_from_container), ann="remove from selected container", bgc=color.color) color.change() pm.button('oySmedgeRenderSlicer_button', l="oySmedgeRenderSlicer", c=repeated_callback(mel.eval, 'oySmedgeRenderSlicer'), ann="SmedgeRenderSlicer", bgc=color.color) color.change() pm.button( 'exponentialSmooth_button', l="exponential smooth", c=repeated_callback(Modeling.polySmoothFace, 0), ann="applies exponential smooth to selected objects", bgc=color.color ) pm.button( 'linearSmooth_button', l="linear smooth", c=repeated_callback(Modeling.polySmoothFace, 1), ann="applies linear smooth to selected objects", bgc=color.color ) pm.button( 'deActivateSmooth_button', l="deActivate smooth", c=repeated_callback(Modeling.activate_deActivate_smooth, 1), ann="deActivates all polySmoothFace nodes in the " "scene", bgc=color.color ) pm.button( 'activateSmooth_button', l="activate smooth", c=repeated_callback(Modeling.activate_deActivate_smooth, 0), ann="activates all deActivated polySmoothFace nodes " "in the scene", bgc=color.color ) pm.button( 'deleteSmooth_button', l="delete smooth", c=repeated_callback(Modeling.delete_smooth), ann="deletes all the polySmoothFace nodes from the " "scene", bgc=color.color ) pm.button( 'deleteSmoothOnSelected_button', l="delete smooth on selected", c=repeated_callback(Modeling.delete_smooth_on_selected), ann="deletes selected polySmoothFace nodes from scene", bgc=color.color ) color.change() pm.button( 'deleteAllSound_button', l="delete all sound", c=repeated_callback(General.delete_all_sound), ann="delete all sound", bgc=color.color ) pm.button( 'displayHandlesOfSelectedObjects_button', l="toggle handles of selected objects", c=repeated_callback( General.toggle_attributes, "displayHandle" ), ann="select objects to toggle handle", bgc=color.color ) color.change() pm.button( 'referenceSelectedObjects_button', l="reference selected objects", c=repeated_callback( General.reference_selected_objects ), ann="sets objects display override to reference", bgc=color.color ) pm.button( 'dereferenceSelectedObjects_button', l="de-reference selected objects", c=repeated_callback( General.dereference_selected_objects ), ann="sets objects display override to reference", bgc=color.color ) color.change() pm.button( 'oyDeReferencer_button', l="dereferencer", c=repeated_callback(General.dereferencer), ann="sets all objects display override to normal", bgc=color.color ) color.change() enable_matte_row_layout = pm.rowLayout(nc=6, adj=1) with enable_matte_row_layout: pm.text( l='Enable Arnold Matte', ) pm.button( l='Default', c=repeated_callback(Render.enable_matte, 0), ann='Enables Arnold Matte on selected objects with <b>No Color</b>', bgc=color.color ) pm.button( l='R', c=repeated_callback(Render.enable_matte, 1), ann='Enables Arnold Matte on selected objects with <b>Red</b>', bgc=[1, 0, 0] ) pm.button( l='G', c=repeated_callback(Render.enable_matte, 2), ann='Enables Arnold Matte on selected objects with <b>Green</b>', bgc=[0, 1, 0] ) pm.button( l='B', c=repeated_callback(Render.enable_matte, 3), ann='Enables Arnold Matte on selected objects with <b>Blue</b>', bgc=[0, 0, 1] ) pm.button( l='A', c=repeated_callback(Render.enable_matte, 4), ann='Enables Arnold Matte on selected objects with <b>Alpha</b>', bgc=[0.5, 0.5, 0.5] ) color.change() pm.button( 'fix_render_layer_out_adjustment_errors_button', l="fixRenderLayerOutAdjustmentErrors", c='pm.mel.eval("fixRenderLayerOutAdjustmentErrors();")', ann="fixRenderLayerOutAdjustmentErrors", bgc=color.color ) pm.separator() color.change() with pm.rowLayout(nc=2, adj=2): apply_to_hierarchy_checkBox = pm.checkBox( 'apply_to_hierarchy_checkBox', l="Apply to Hierarchy", value=True, bgc=color.color ) disable_undo_queue_check_box = pm.checkBox( 'disable_undo_queue_checkBox', l="Disable Undo", value=False, bgc=color.color ) attr_names = [ 'castsShadows', 'receiveShadows', 'motionBlur', 'primaryVisibility', 'visibleInReflections', 'visibleInRefractions', 'aiSelfShadows', 'aiOpaque', 'aiVisibleInDiffuse', 'aiVisibleInGlossy', 'aiMatte', 'overrideShaders' ] for attr_name in attr_names: with pm.rowLayout(nc=4, rat=(1, "both", 0), adj=1): pm.text('%s_text' % attr_name, l=attr_name, bgc=color.color) pm.button( 'set_%s_ON_button' % attr_name, l="ON", c=repeated_callback( set_shape_attribute_wrapper, attr_name, 1, ), bgc=(0, 1, 0) ) pm.button( 'set_%s_OFF_button' % attr_name, l="OFF", c=repeated_callback( set_shape_attribute_wrapper, attr_name, 0 ), bgc=(1, 0, 0) ) pm.button( 'set_%s_REMOVE_button' % attr_name, l="REM", ann='Remove Override', c=repeated_callback( set_shape_attribute_wrapper, attr_name, -1 ), bgc=(0, 0.5, 1) ) pm.separator() color.change() pm.button( l='Setup Z-Layer', c=repeated_callback(Render.create_z_layer), ann=Render.create_z_layer.__doc__, bgc=color.color ) pm.button( l='Setup EA Matte', c=repeated_callback(Render.create_ea_matte), ann=Render.create_ea_matte.__doc__, bgc=color.color ) color.change() pm.text(l='===== BarnDoor Simulator =====') pm.button( 'barn_door_simulator_setup_button', l='Setup', c=repeated_callback(Render.barndoor_simulator_setup), ann='Creates a arnold barn door simulator to the selected ' 'light', bgc=color.color ) pm.button( 'barn_door_simulator_unsetup_button', l='Un-Setup', c=repeated_callback(Render.barndoor_simulator_unsetup), ann='Removes the barn door simulator nodes from the selected ' 'light', bgc=color.color ) pm.button( 'fix_barndoors_button', l='Fix BarnDoors', c=repeated_callback(Render.fix_barndoors), ann=Render.fix_barndoors.__doc__, bgc=color.color ) color.change() pm.button( 'ai_skin_sss_to_ai_skin_button', l='aiSkinSSS --> aiSkin', c=repeated_callback(Render.convert_aiSkinSSS_to_aiSkin), ann=Render.convert_aiSkinSSS_to_aiSkin.__doc__, bgc=color.color ) pm.button( 'normalize_sss_weights_button', l='Normalize SSS Weights', c=repeated_callback(Render.normalize_sss_weights), ann=Render.normalize_sss_weights.__doc__, bgc=color.color ) # store commands __commands__.extend(obsolete_columnLayout.children()) pm.tabLayout( main_tab_layout, edit=True, tabLabel=[ (general_column_layout, "Gen"), (reference_columnLayout, "Ref"), (modeling_column_layout, "Mod"), (rigging_columnLayout, "Rig"), (render_columnLayout, "Ren"), (previs_columnLayout, "Prev"), (animation_columnLayout, "Ani"), (obsolete_columnLayout, "Obs") ], cc=functools.partial(store_tab_index, main_tab_layout) ) dock_control = pm.dockControl( "toolbox_dockControl", l='toolbox', content=toolbox_window, area="left", allowedArea=["left", "right"], width=width ) # switch to last tab last_tab_index = get_last_tab_index() if last_tab_index: pm.tabLayout( main_tab_layout, e=1, sti=last_tab_index ) def store_tab_index(tab_layout): val = pm.tabLayout(tab_layout, q=1, sti=1) os.environ[__last_tab__] = str(val) def get_last_tab_index(): """returns the last tab index from settings """ return int(os.environ.get(__last_tab__, 0))
37.608459
128
0.48752
import functools import os from anima.env.mayaEnv.animation import Animation from anima.env.mayaEnv.general import General from anima.env.mayaEnv.modeling import Modeling from anima.env.mayaEnv.previs import Previs from anima.env.mayaEnv.reference import Reference from anima.env.mayaEnv.render import Render from anima.env.mayaEnv.rigging import Rigging import pymel.core as pm import maya.mel as mel from anima.env.mayaEnv import auxiliary, camera_tools __last_commands__ = [] __last_tab__ = 'ANIMA_TOOLBOX_LAST_TAB_INDEX' __commands__ = [] def repeater(index): global __last_commands__ try: call_data = __last_commands__[index] return call_data[0](*call_data[1], **call_data[2]) except IndexError: return None def repeat_last(call_data): global __last_commands__ index = len(__last_commands__) callable_ = call_data[0] args = call_data[1] kwargs = call_data[2] command = \ 'print \\"\\";python(\\\"from anima.env.mayaEnv.toolbox import ' \ 'repeater; repeater(%s);\\\");' % index repeat_last_command = 'repeatLast -ac "%(command)s" -acl "%(label)s";' % { 'command': command, 'label': callable_.__name__ } print(repeat_last_command) pm.mel.eval(repeat_last_command) __last_commands__.append(call_data) callable_(*args, **kwargs) def repeated_callback(callable_, *args, **kwargs): return pm.Callback( repeat_last, [callable_, args, kwargs] ) class Color(object): colors = [ (1.000, 0.500, 0.666), (1.000, 0.833, 0.500), (0.666, 1.000, 0.500), (0.500, 1.000, 0.833), (0.500, 0.666, 1.000), (0.833, 0.500, 1.000) ] def __init__(self, index=0): self.index = index self.max_colors = len(self.colors) def change(self): self.index = int((self.index + 1) % self.max_colors) def reset(self): self.index = 0 @property def color(self): return self.colors[self.index] def filter_tools(search_text): for command in __commands__: uitype = command.type() if uitype == 'button': label = command.getLabel() if search_text.lower() not in label.lower(): command.setVisible(False) else: command.setVisible(True) elif uitype == 'rowLayout': children = command.children() matched_children = False for c in children: c_uitype = c.type() if c_uitype in ['button', 'staticText'] and \ search_text in c.getLabel().lower(): matched_children = True break if not matched_children: command.setVisible(False) else: command.setVisible(True) def UI(): width = 260 height = 650 row_spacing = 3 color = Color() global __commands__ __commands__ = [] if pm.dockControl("toolbox_dockControl", q=True, ex=True): pm.deleteUI("toolbox_dockControl") window_name = "toolbox_window" if pm.window(window_name, q=True, ex=True): pm.deleteUI(window_name, wnd=True) toolbox_window = pm.window( window_name, wh=(width, height), title="Anima ToolBox" ) main_form_layout = pm.formLayout( 'main_form_layout', nd=100, parent=toolbox_window ) search_field = pm.textField( 'search_text_field', tcc=filter_tools, placeholderText='Search...', parent=main_form_layout ) main_tab_layout = pm.tabLayout( 'main_tab_layout', scr=True, cr=True, parent=main_form_layout ) pm.formLayout( main_form_layout, edit=True, attachForm=[ (search_field, "top", 0), (search_field, "left", 0), (search_field, "right", 0), (main_tab_layout, "bottom", 0), (main_tab_layout, "left", 0), (main_tab_layout, "right", 0) ], attachNone=[ (search_field, "bottom") ], attachControl=[ (main_tab_layout, "top", 0, search_field) ] ) with main_tab_layout: general_column_layout = pm.columnLayout( 'general_column_layout', adj=True, cal="center", rs=row_spacing ) with general_column_layout: color.change() pm.button( 'open_version_button', l="Open Version", c=repeated_callback(General.version_dialog, mode=1), ann="Open Version", bgc=color.color ) pm.button( 'save_as_version_button', l="Save As Version", c=repeated_callback(General.version_dialog, mode=0), ann="Save As Version", bgc=color.color ) color.change() pm.button( 'selectionManager_button', l="Selection Manager", c=repeated_callback(General.selection_manager), ann="Selection Manager", bgc=color.color ) color.change() pm.button( 'publishChecker_button', l="Publish Checker", c=repeated_callback(General.publish_checker), ann="Publish Checker", bgc=color.color ) color.change() pm.button( 'rename_unique_button', l='Rename Unique', c=repeated_callback(General.rename_unique), ann=General.rename_unique.__doc__, bgc=color.color ) pm.button( 'removeColonFromNames_button', l="remove colon(:) from node names", c=repeated_callback(General.remove_colon_from_names), ann="removes the colon (:) character from all " "selected object names", bgc=color.color ) pm.button( 'removePastedFromNames_button', l="remove \"pasted_\" from node names", c=repeated_callback(General.remove_pasted), ann="removes the \"passed__\" from all selected " "object names", bgc=color.color ) color.change() pm.button( 'togglePolyMeshes_button', l="toggle polymesh visibility", c=repeated_callback(General.toggle_poly_meshes), ann="toggles the polymesh display in the active model " "panel", bgc=color.color ) color.change() pm.button( 'selectSetMembers_button', l="select set members", c=repeated_callback(General.select_set_members), ann="selects the selected set members in correct " "order", bgc=color.color ) color.change() pm.button( 'delete_unused_intermediate_shapes_button', l='Delete Unused Intermediate Shape Nodes', c=repeated_callback(General.delete_unused_intermediate_shapes), ann='Deletes unused (no connection) intermediate shape nodes', bgc=color.color ) color.change() pm.button( 'export_transform_info_button', l='Export Transform Info', c=repeated_callback(General.export_transform_info), ann='exports transform info', bgc=color.color ) pm.button( 'import_transform_info_button', l='Import Transform Info', c=repeated_callback(General.import_transform_info), ann='imports transform info', bgc=color.color ) color.change() pm.button( 'export_global_transform_info_button', l='Export Global Transform Info', c=repeated_callback(General.export_transform_info, True), ann='exports global transform info', bgc=color.color ) pm.button( 'import_global_transform_info_button', l='Import Global Transform Info', c=repeated_callback(General.import_transform_info, True), ann='imports global transform info', bgc=color.color ) color.change() pm.button( 'export_component_transform_info_button', l='Export Component Transform Info', c=repeated_callback(General.export_component_transform_info), ann='exports component transform info', bgc=color.color ) pm.button( 'import_component_transform_info_button', l='Import Component Transform Info', c=repeated_callback(General.import_component_transform_info), ann='imports component transform info', bgc=color.color ) color.change() pm.button( 'import_rsproxy_data_from_houdini_button', l='Import RSProxy Data From Houdini', c=repeated_callback(General.rsproxy_data_importer), ann=General.rsproxy_data_importer.__doc__, bgc=color.color ) color.change() pm.button( 'generate_thumbnail_button', l='Generate Thumbnail', c=repeated_callback(General.generate_thumbnail), ann='Generates thumbnail for current scene', bgc=color.color ) color.change() pm.button( 'cleanup_light_cameras_button', l='Cleanup Light Cameras', c=repeated_callback(General.cleanup_light_cameras), ann=General.cleanup_light_cameras.__doc__, bgc=color.color ) color.change() from anima.env.mayaEnv.general import unknown_plugin_cleaner_ui pm.button( 'cleanup_plugins_button', l='Cleanup Unknown Plugins', c=repeated_callback(unknown_plugin_cleaner_ui), ann=unknown_plugin_cleaner_ui.__doc__, bgc=color.color ) color.change() pm.button( 'unshape_parent_node_button', l='Unshape Parent Nodes', c=repeated_callback(General.unshape_parent_nodes), ann=General.unshape_parent_nodes.__doc__, bgc=color.color ) __commands__.extend(general_column_layout.children()) reference_columnLayout = pm.columnLayout( 'reference_columnLayout', adj=True, cal="center", rs=row_spacing) with reference_columnLayout: color.reset() pm.text(l='===== Reference Tools =====') pm.button( 'nsDelete_button', l="nsDelete", c=repeated_callback(General.namespace_deleter), ann=General.namespace_deleter.__doc__, bgc=color.color ) color.change() pm.button( 'duplicate_selected_reference_button', l='Duplicate Selected Reference', c=repeated_callback(Reference.duplicate_selected_reference), ann='Duplicates the selected reference', bgc=color.color ) color.change() pm.button( 'select_reference_in_reference_editor_button', l='Select Reference In Reference Editor', c=repeated_callback( Reference.select_reference_in_reference_editor ), ann=Reference.select_reference_in_reference_editor.__doc__, bgc=color.color ) color.change() pm.button( 'get_selected_reference_path_button', l='Get Selected Reference Path', c=repeated_callback(Reference.get_selected_reference_path), ann='Prints the selected reference full path', bgc=color.color ) pm.button( 'open_selected_reference_button', l='Open Selected Reference in New Maya', c=repeated_callback(Reference.open_reference_in_new_maya), ann='Opens the selected reference in new Maya ' 'instance', bgc=color.color ) color.change() pm.button( 'publish_model_as_look_dev_button', l='Model -> LookDev', c=repeated_callback(Reference.publish_model_as_look_dev), ann='References the current Model scene to the LookDev scene ' 'of the same task, creates the LookDev scene if ' 'necessary, also reopens the current model scene.', bgc=color.color ) color.change() pm.button( 'fix_reference_namespace_button', l='Fix Reference Namespace', c=repeated_callback(Reference.fix_reference_namespace), ann='Fixes old style reference namespaces with new one, ' 'creates new versions if necessary.', bgc=color.color ) color.change() pm.button( 'fix_reference_paths_button', l='Fix Reference Paths', c=repeated_callback(Reference.fix_reference_paths), ann='Fixes reference paths deeply, so they will use' '$REPO env var.', bgc=color.color ) pm.button( 'fix_student_license_on_references_button', l='Fix Student License Error On References', c=repeated_callback( Reference.fix_student_license_on_references ), ann=Reference.fix_student_license.__doc__, bgc=color.color ) pm.button( 'fix_student_license_on_files_button', l='Fix Student License Error On Selected Files', c=repeated_callback( Reference.fix_student_license_on_selected_file ), ann=Reference.fix_student_license.__doc__, bgc=color.color ) color.change() pm.button( 'archive_button', l='Archive Current Scene', c=repeated_callback(Reference.archive_current_scene), ann='Creates a ZIP file containing the current scene and its' 'references in a flat Maya default project folder ' 'structure', bgc=color.color ) pm.button( 'bind_to_original_button', l='Bind To Original', c=repeated_callback(Reference.bind_to_original), ann='Binds the current local references to the ones on the ' 'repository', bgc=color.color ) pm.button( 'unload_selected_references_button', l='Unload Selected References', c=repeated_callback(Reference.unload_selected_references), ann='Unloads the highest references that is related with the selected objects', bgc=color.color ) pm.button( 'unload_unselected_references_button', l='Unload UnSelected References', c=repeated_callback(Reference.unload_unselected_references), ann='Unloads any references that is not related with the ' 'selected objects', bgc=color.color ) color.change() pm.button( 'remove_selected_references_button', l='Remove Selected References', c=repeated_callback(Reference.remove_selected_references), ann='Removes the highest references that is related with the selected objects', bgc=color.color ) color.change() pm.text(l='===== Representation Tools =====') with pm.rowLayout(nc=2, adj=1): pm.checkBoxGrp( 'generate_repr_types_checkbox_grp', l='Reprs', numberOfCheckBoxes=3, labelArray3=['GPU', 'ASS', 'RS'], cl4=['left', 'left', 'left', 'left'], cw4=[51, 50, 50, 50], valueArray3=[1, 1, 1] ) pm.checkBox( 'generate_repr_skip_existing_checkBox', l='Skip existing Reprs.', value=0 ) pm.button( 'generate_repr_of_all_references_button', l='Deep Generate Repr Of All References', c=repeated_callback( Reference.generate_repr_of_all_references_caller ), ann='Deeply generates desired Representations of all ' 'references of this scene', bgc=color.color ) pm.button( 'generate_repr_of_scene_button', l='Generate Repr Of This Scene', c=repeated_callback(Reference.generate_repr_of_scene_caller), ann='Generates desired Representations of this scene', bgc=color.color ) color.change() with pm.rowLayout(nc=2, adj=1): pm.radioButtonGrp( 'repr_apply_to_radio_button_grp', l='Apply To', labelArray2=['Selected', 'All References'], numberOfRadioButtons=2, cl3=['left', 'left', 'left'], cw3=[50, 65, 65], sl=1 ) pm.button( 'to_base_button', l='To Base', c=repeated_callback(Reference.to_base), ann='Convert selected to Base representation', bgc=color.color ) pm.button( 'to_gpu_button', l='To GPU', c=repeated_callback(Reference.to_gpu), ann='Convert selected to GPU representation', bgc=color.color ) pm.button( 'to_ass_button', l='To ASS', c=repeated_callback(Reference.to_ass), ann='Convert selected to ASS representation', bgc=color.color ) pm.button( 'to_rs_button', l='To RS', c=repeated_callback(Reference.to_rs), ann='Convert selected to RS representation', bgc=color.color ) color.change() pm.button( 'update_alembic_references_button', l='Update Alembic References', c=repeated_callback(auxiliary.update_alembic_references), ann=auxiliary.update_alembic_references.__doc__, bgc=color.color ) __commands__.extend(reference_columnLayout.children()) modeling_column_layout = pm.columnLayout( 'modeling_column_layout', adj=True, cal="center", rs=row_spacing) with modeling_column_layout: color.reset() pm.button('toggleFaceNormalDisplay_button', l="toggle face normal display", c=repeated_callback( pm.runtime.ToggleFaceNormalDisplay), ann="toggles face normal display", bgc=color.color) pm.button('reverseNormals_button', l="reverse normals", c=repeated_callback(Modeling.reverse_normals), ann="reverse normals", bgc=color.color) pm.button('fixNormals_button', l="fix normals", c=repeated_callback(Modeling.fix_normals), ann="applies setToFace then conform and then " "soften edge to all selected objects", bgc=color.color) color.change() pm.button( 'oyHierarchyInstancer_button', l="hierarchy_instancer on selected", c=repeated_callback(Modeling.hierarchy_instancer), ann="hierarchy_instancer on selected", bgc=color.color ) color.change() pm.button( 'relax_verts_button', l="Relax Vertices", c=repeated_callback(Modeling.relax_vertices), ann="opens relax_vertices", bgc=color.color ) with pm.rowLayout(nc=4, adj=1): def smooth_edges_callback(): iteration = pm.intSliderGrp( "smooth_edges_iteration_intField", q=1, v=1 ) Modeling.smooth_edges(iteration=iteration) pm.button( 'smooth_edges_button', l="Smooth Edges", c=repeated_callback(smooth_edges_callback), ann=Modeling.smooth_edges.__doc__, bgc=color.color ) pm.intSliderGrp( 'smooth_edges_iteration_intField', v=100, min=0, max=100 ) color.change() pm.button( 'create_curve_from_mesh_edges_button', l="Curve From Mesh Edges", c=repeated_callback(Modeling.create_curve_from_mesh_edges), ann="Creates a curve from selected mesh edges", bgc=color.color ) color.change() pm.button( 'vertex_aligned_locator_button', l="Vertex Aligned Locator", c=repeated_callback(Modeling.vertex_aligned_locator), ann="Creates an aligned locator from selected vertices", bgc=color.color ) color.change() with pm.rowLayout(nc=8, rat=(1, "both", 0), adj=1): pm.text('set_pivot_text', l='Set Pivot', bgc=color.color) pm.button( 'center_button', l="C", c=repeated_callback( Modeling.set_pivot, 0 ), bgc=(0.8, 0.8, 0.8) ) pm.button( 'minus_X_button', l="-X", c=repeated_callback( Modeling.set_pivot, 1 ), bgc=(1.000, 0.500, 0.666) ) pm.button( 'plus_X_button', l="+X", c=repeated_callback( Modeling.set_pivot, 2 ), bgc=(1.000, 0.500, 0.666) ) pm.button( 'minus_Y_button', l="-Y", c=repeated_callback( Modeling.set_pivot, 3 ), bgc=(0.666, 1.000, 0.500) ) pm.button( 'plus_Y_button', l="+Y", c=repeated_callback( Modeling.set_pivot, 4 ), bgc=(0.666, 1.000, 0.500) ) pm.button( 'minus_Z_button', l="-X", c=repeated_callback( Modeling.set_pivot, 5 ), bgc=(0.500, 0.666, 1.000) ) pm.button( 'plus_Z_button', l="+X", c=repeated_callback( Modeling.set_pivot, 6 ), bgc=(0.500, 0.666, 1.000) ) color.change() with pm.rowLayout(nc=7, rat=(1, "both", 0), adj=1): pm.text(l='Text. Res', bgc=color.color) pm.button( l="128", c=repeated_callback( Modeling.set_texture_res, 128 ), bgc=Color.colors[0] ) pm.button( l="256", c=repeated_callback( Modeling.set_texture_res, 256 ), bgc=Color.colors[1] ) pm.button( l="512", c=repeated_callback( Modeling.set_texture_res, 512 ), bgc=Color.colors[2] ) pm.button( l="1024", c=repeated_callback( Modeling.set_texture_res, 1024 ), bgc=Color.colors[3] ) pm.button( l='2048', c=repeated_callback( Modeling.set_texture_res, 2048 ), bgc=Color.colors[4] ) pm.button( l='4096', c=repeated_callback( Modeling.set_texture_res, 4096 ), bgc=Color.colors[5] ) pm.text(l='========== UV Tools =============') color.change() pm.button( 'fix_uvsets_button', l="Fix UVSets (DiffuseUV -> map1)", c=repeated_callback(Modeling.fix_uvsets), ann=Modeling.fix_uvsets, bgc=color.color ) color.change() pm.button( 'select_zero_uv_area_faces_button', l="Filter Zero UV Area Faces", c=repeated_callback(Modeling.select_zero_uv_area_faces), ann="Selects faces with zero uv area", bgc=color.color ) color.change() pm.button( 'create_auto_uvmap_button', l='Create Auto UVMap', c=repeated_callback(Modeling.create_auto_uvmap), ann=Modeling.create_auto_uvmap.__doc__, bgc=color.color ) with pm.rowLayout(nc=6, adj=1): def transfer_uvs_button_callback(*args, **kwargs): label_lut = { 'W': 0, 'L': 1, 'UV': 2, 'C': 3, 'T': 4 } sample_space = label_lut[ pm.radioCollection( 'transfer_uvs_radio_collection', q=1, sl=1 ) ] Modeling.transfer_uvs(sample_space=sample_space) pm.button('transfer_uvs_button', l="Transfer UVs", c=repeated_callback(transfer_uvs_button_callback), ann="Transfers UVs from one group to other, use it" "for LookDev -> Alembic", bgc=color.color) pm.radioCollection('transfer_uvs_radio_collection') button_with = 40 pm.radioButton( 'W', w=button_with, al='left', ann='World' ) pm.radioButton( 'L', w=button_with, al='left', ann='Local' ) pm.radioButton( 'UV', w=button_with, al='left', ann='UV' ) pm.radioButton( 'C', w=button_with, al='left', ann='Component', sl=1 ) pm.radioButton( 'T', w=button_with, al='left', ann='Topology' ) color.change() pm.text(l='======= Manipulator Tools =======') pm.button('set_to_point_button', l="Set To Point", c=repeated_callback(pm.mel.eval, "manipMoveOrient 1;"), ann="Set manipulator to the point", bgc=color.color) pm.button('set_to_edge_button', l="Set To Edge", c=repeated_callback(pm.mel.eval, "manipMoveOrient 2;"), ann="Set manipulator to the edge", bgc=color.color) pm.button('set_to_face_button', l="Set To Face", c=repeated_callback(pm.mel.eval, "manipMoveOrient 3;"), ann="Set manipulator to the face", bgc=color.color) color.change() pm.button('create_bbox_from_selection_button', l="Create BBOX from selection", c=repeated_callback(Modeling.bbox_from_selection), ann=Modeling.bbox_from_selection.__doc__, bgc=color.color) __commands__.extend(modeling_column_layout.children()) rigging_columnLayout = pm.columnLayout( 'rigging_columnLayout', adj=True, cal="center", rs=row_spacing ) with rigging_columnLayout: color.reset() pm.button( 'create_joints_on_curve_ui_button', l="Create Joints On Curve UI", c=repeated_callback(Rigging.create_joints_on_curve_ui), ann=Rigging.create_joints_on_curve_ui.__doc__, bgc=color.color ) pm.button( 'mirror_transformation_button', l="Mirror Transformation", c=repeated_callback(Rigging.mirror_transformation), ann=Rigging.mirror_transformation.__doc__, bgc=color.color ) color.change() pm.button( 'IKFKLimbRigger_button', l="IK/FK Limb Rigger", c=repeated_callback(Rigging.ik_fk_limb_rigger), ann=Rigging.ik_fk_limb_rigger.__doc__, bgc=color.color ) with pm.rowLayout(nc=2, adj=1): def ik_fk_limb_rigger_callback(): subdivision = pm.intField('bendy_ik_fk_subdivision_count_field', q=1, v=1) Rigging.bendy_ik_fk_limb_rigger(subdivision=subdivision) pm.button( 'bendy_ik_fk_limb_rigger_button', l='IK/FK Limb Rigger (Bendy)', c=repeated_callback(ik_fk_limb_rigger_callback), ann=Rigging.bendy_ik_fk_limb_rigger.__doc__, bgc=color.color ) pm.intField('bendy_ik_fk_subdivision_count_field', min=0, v=2) pm.button( 'ReverseFootRigger_button', l="Reverse Foot Rigger", c=repeated_callback(Rigging.reverse_foot_rigger), ann=Rigging.reverse_foot_rigger.__doc__, bgc=color.color ) pm.button( 'squashStretchBendRigger_button', l="Squash/Stretch/Bend Rigger", c=repeated_callback(Rigging.squash_stretch_bend_rigger), ann=Rigging.squash_stretch_bend_rigger.__doc__, bgc=color.color ) pm.button( 'setupStretchySplineIKCurve_button', l="setup stretchy splineIK curve", c=repeated_callback(Rigging.setup_stretchy_spline_ik_curve), ann="connects necessary nodes to calculate arcLength " "change in percent", bgc=color.color ) pm.button( 'selectJointsDeformingTheObject_button', l="select joints deforming the object", c=repeated_callback(Rigging.select_joints_deforming_object), ann="select joints that deform the object", bgc=color.color ) color.change() pm.button( 'create_axial_correction_group_button', l="Create Axial Correction Groups", c=repeated_callback(Rigging.axial_correction_group), ann=Rigging.axial_correction_group.__doc__, bgc=color.color ) pm.button( 'create_zv_parent_compatible_groups_button', l="Create ZV Parent Compatible Groups", c=repeated_callback(Rigging.create_zv_parent_compatible_groups), ann=Rigging.axial_correction_group.__doc__, bgc=color.color ) color.change() pm.button( 'setClustersToAbsolute_button', l="set selected clusters to absolute", c=repeated_callback(Rigging.set_clusters_relative_state, 0), ann="set Clusters to Absolute", bgc=color.color ) pm.button( 'setClustersToRelative_button', l="set selected clusters to relative", c=repeated_callback( Rigging.set_clusters_relative_state, 1 ), ann="set Clusters to Relative", bgc=color.color ) color.change() pm.button( 'addControllerShape_button', l="add controller shape", c=repeated_callback(Rigging.add_controller_shape), ann="add the shape in the selected joint", bgc=color.color ) pm.button( 'replaceControllerShape_button', l="replace controller shape", c=repeated_callback(Rigging.replace_controller_shape), ann="replaces the shape in the selected joint", bgc=color.color ) color.change() def pin_controller_callback(color, *args): from anima.env.mayaEnv import rigging vertex = pm.ls(sl=1)[0] pc = rigging.PinController() pc.color = color pc.pin_to_vertex = vertex pc.setup() with pm.rowLayout(nc=4, adj=1): pm.text(l="Pin Controller") pm.button('pin_controller_red_button', l="R", c=repeated_callback(pin_controller_callback, [1, 0, 0]), ann=pin_controller_callback.__doc__, bgc=[1, 0, 0]) pm.button('pin_controller_green_button', l="G", c=repeated_callback(pin_controller_callback, [0, 1, 0]), ann=pin_controller_callback.__doc__, bgc=[0, 1, 0]) pm.button('pin_controller_blue_button', l="B", c=repeated_callback(pin_controller_callback, [0, 0, 1]), ann=pin_controller_callback.__doc__, bgc=[0, 0, 1]) pm.button('rivet_button', l="create rivet", c=repeated_callback(mel.eval, 'rivet'), ann="create rivet", bgc=color.color) pm.button('oyAutoRivet_button', l="auto rivet", c=repeated_callback(mel.eval, 'oyAutoRivet'), ann="auto rivet", bgc=color.color) pm.button( 'oyAutoRivetFollicle_button', l="auto rivet (Follicle)", c=repeated_callback(auxiliary.auto_rivet), ann="creates a rivet setup by using hair follicles", bgc=color.color ) pm.button( 'rivet_per_face_button', l="rivet per face (Follicle)", c=repeated_callback(auxiliary.rivet_per_face), ann="creates a rivet setup per selected face by using hair " "follicles", bgc=color.color ) pm.button('create_hair_from_curves_button', l="Create Hair From Curves", c=repeated_callback(auxiliary.hair_from_curves), ann="creates hair from curves", bgc=color.color) color.change() pm.button('artPaintSkinWeightsTool_button', l="paint weights tool", c=repeated_callback(mel.eval, 'ArtPaintSkinWeightsTool'), ann="paint weights tool", bgc=color.color) def skin_tools_ui_caller(*args): from anima.env.mayaEnv.rigging import SkinToolsUI st = SkinToolsUI() st.ui() pm.button('skin_tools_button', l="Skin Tools", c=skin_tools_ui_caller, ann="skin tools", bgc=color.color) pm.button('oyFixBoundJoint_button', l="fix_bound_joint", c=repeated_callback(Rigging.fix_bound_joint), ann="fix_bound_joint", bgc=color.color) pm.button('toggle_local_rotation_axes_button', l="Toggle Local Rotation Axes", c=repeated_callback(General.toggle_attributes, "displayLocalAxis"), ann="Toggle Local Rotation Axes", bgc=color.color) pm.button('toggle_display_rotate_pivot_button', l="Toggle Display Rotate Pivot", c=repeated_callback(General.toggle_attributes, "displayRotatePivot"), ann="Toggle Display Rotate Pivot", bgc=color.color) pm.button('seroBlendController_button', l="seroBlendController", c=repeated_callback(mel.eval, 'seroBlendController'), ann="seroBlendController", bgc=color.color) pm.button('align_to_pole_vector_button', l="Align To Pole Vector", c=repeated_callback(auxiliary.align_to_pole_vector), ann="align to pole vector", bgc=color.color) color.change() pm.button('oyResetCharSet_button', l="oyResetCharSet", c=repeated_callback(mel.eval, 'oyResetCharSet'), ann="reset char set", bgc=color.color) pm.button('export_blend_connections_button', l="Export blend connections", c=repeated_callback(auxiliary.export_blend_connections), ann="export blend connections", bgc=color.color) color.change() pm.button('createFollicles_button', l="create follicles", c=repeated_callback(Rigging.create_follicles), ann="create follicles", bgc=color.color) color.change() pm.button('oyResetTweaks_button', l="reset tweaks", c=repeated_callback(Rigging.reset_tweaks), ann="reset tweaks", bgc=color.color) color.change() def add_cacheable_attribute_callback(): for node in pm.selected(): Rigging.add_cacheable_attribute(node) pm.button('add_cacheable_attr_button', l="add `cacheable` attribute", c=repeated_callback(add_cacheable_attribute_callback), ann=add_cacheable_attribute_callback.__doc__, bgc=color.color) __commands__.extend(rigging_columnLayout.children()) render_columnLayout = pm.columnLayout( 'render_columnLayout', adj=True, cal="center", rs=row_spacing ) with render_columnLayout: color.reset() color.change() pm.button( 'update_render_settings_button', l="Update Render Settings", c=repeated_callback(Render.update_render_settings), ann=Render.update_render_settings.__doc__, bgc=color.color ) color.change() pm.button( 'delete_render_layers_button', l="Delete Render Layers", c=repeated_callback(Render.delete_render_layers), ann=Render.delete_render_layers.__doc__, bgc=color.color ) pm.button( 'delete_display_layers_button', l="Delete Display Layers", c=repeated_callback(Render.delete_display_layers), ann=Render.delete_display_layers.__doc__, bgc=color.color ) pm.button( 'delete_render_and_display_layers_button', l="Delete Render and Display Layers", c=repeated_callback(Render.delete_render_and_display_layers), ann=Render.delete_render_and_display_layers.__doc__, bgc=color.color ) color.change() pm.button( 'delete_unused_shading_nodes_button', l="Delete Unused Shading Nodes", c=repeated_callback(Render.delete_unused_shading_nodes), ann=Render.delete_unused_shading_nodes.__doc__, bgc=color.color ) color.change() pm.button( 'duplicate_input_graph_button', l="Duplicate Input Graph", c=repeated_callback(Render.duplicate_input_graph), ann=Render.duplicate_input_graph.__doc__, bgc=color.color ) pm.button( 'duplicate_with_connections_button', l="Duplicate With Connections To Network", c=repeated_callback(Render.duplicate_with_connections), ann=Render.duplicate_with_connections.__doc__, bgc=color.color ) color.change() pm.text(l='=========== RedShift Tools ===========') pm.button( 'generate_rs_from_selection_button', l='Generate RSProxy From Selection', c=repeated_callback(Render.generate_rsproxy_from_selection), ann=Render.generate_rsproxy_from_selection.__doc__, bgc=color.color ) pm.button( 'generate_rs_from_selection_per_selection_button', l='Generate RSProxy From Selection (Per Selection)', c=repeated_callback(Render.generate_rsproxy_from_selection, True), ann=Render.generate_rsproxy_from_selection.__doc__, bgc=color.color ) pm.button( 'set_rsproxy_to_bbox_button', l='RSProxy -> Bounding Box', c=repeated_callback(Render.rsproxy_to_bounding_box), ann=Render.rsproxy_to_bounding_box.__doc__, bgc=color.color ) pm.button( 'set_rsproxy_to_preview_mesh_button', l='RSProxy -> Preview Mesh', c=repeated_callback(Render.rsproxy_to_preview_mesh), ann=Render.rsproxy_to_preview_mesh.__doc__, bgc=color.color ) color.change() pm.text(l='===== RedShift IC + IPC Bake =====') pm.button( 'redshift_ic_ipc_bake_button', l="Do Bake", c=repeated_callback(Render.redshift_ic_ipc_bake), ann=Render.redshift_ic_ipc_bake.__doc__, bgc=color.color ) pm.button( 'redshift_ic_ipc_bake_restore_button', l="Restore Settings", c=repeated_callback(Render.redshift_ic_ipc_bake_restore), ann=Render.redshift_ic_ipc_bake_restore.__doc__, bgc=color.color ) pm.text(l='======================================') color.change() pm.button( 'submit_afanasy_button', l="Afanasy Job Submitter", c=repeated_callback(Render.afanasy_job_submitter), ann=Render.afanasy_job_submitter.__doc__, bgc=color.color ) color.change() pm.button( 'open_node_in_browser_button', l="Open node in browser", c=repeated_callback(Render.open_node_in_browser), ann="Open node in browser", bgc=color.color ) color.change() pm.button('auto_convert_to_redshift_button', l="Auto Convert Scene To RedShift (BETA)", c=repeated_callback(Render.auto_convert_to_redshift), ann="Automatically converts the scene from Arnold to " "Redshift, including materials and lights", bgc=color.color) pm.button('convert_nodes_to_redshift_button', l="Convert Selected To RedShift (BETA)", c=repeated_callback(Render.convert_nodes_to_redshift), ann="Automatically converts the selected node from " "Arnold to Redshift", bgc=color.color) def set_shape_attribute_wrapper(attr_name, value): apply_to_hierarchy = pm.checkBox( apply_to_hierarchy_checkBox, q=True, v=True ) disable_undo = pm.checkBox( disable_undo_queue_check_box, q=True, v=True ) Render.set_shape_attribute( attr_name, value, apply_to_hierarchy, disable_undo ) with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1): pm.text('renderThumbnailUpdate_text', l="renderThumbnailUpdate", bgc=color.color) pm.button('set_renderThumbnailUpdate_ON_button', l="ON", c=repeated_callback(pm.renderThumbnailUpdate, 1), bgc=(0, 1, 0)) pm.button('set_renderThumbnailUpdate_OFF_button', l="OFF", c=repeated_callback(pm.renderThumbnailUpdate, 0), bgc=(1, 0, 0)) color.change() pm.button('replaceShadersWithLast_button', l="replace shaders with last", c=repeated_callback(Render.replace_shaders_with_last), ann="replace shaders with last", bgc=color.color) color.change() pm.button('createTextureRefObject_button', l="create texture ref. object", c=repeated_callback(Render.create_texture_ref_object), ann="create texture ref. object", bgc=color.color) pm.text(l='========== Texture Tools =============') color.change() pm.button('assign_substance_textures_button', l="Assign Substance Textures", c=repeated_callback(Render.assign_substance_textures), ann=Render.assign_substance_textures.__doc__, bgc=color.color) color.change() pm.button('normalize_texture_paths_button', l="Normalize Texture Paths (remove $)", c=repeated_callback(Render.normalize_texture_paths), ann=Render.normalize_texture_paths.__doc__, bgc=color.color) pm.button('unnormalize_texture_paths_button', l="Unnormalize Texture Paths (add $)", c=repeated_callback(Render.unnormalize_texture_paths), ann=Render.unnormalize_texture_paths.__doc__, bgc=color.color) color.change() pm.button('assign_random_material_color_button', l="Assign Material with Random Color", c=repeated_callback(Render.assign_random_material_color), ann=Render.assign_random_material_color.__doc__, bgc=color.color) pm.button('randomize_material_color_button', l="Randomize Material Color", c=repeated_callback(Render.randomize_material_color), ann=Render.randomize_material_color.__doc__, bgc=color.color) color.change() pm.button('import_image_as_plane_button', l="Import Image as Plane", c=repeated_callback(Render.import_image_as_plane), ann=Render.import_image_as_plane.__doc__, bgc=color.color) pm.text(l='============ Camera Tools ============') color.change() pm.button( 'CameraFilmOffsetTool_button', l="Camera Film Offset Tool", c=repeated_callback( camera_tools.camera_film_offset_tool ), ann="Camera Film Offset Tool", bgc=color.color ) def camera_focus_plane_tool_callback(): camera = pm.ls(sl=1)[0] camera_tools.camera_focus_plane_tool(camera) pm.button( 'CameraFocusPlaneTool_button', l="Camera Focus Plane Tool", c=repeated_callback(camera_focus_plane_tool_callback), ann="Camera Film Offset Tool", bgc=color.color ) pm.button( 'lock_tracked_camera_channels_button', l="Lock Tracked Camera Channels", c=repeated_callback(camera_tools.lock_tracked_camera_channels), ann=camera_tools.lock_tracked_camera_channels.__doc__, bgc=color.color ) color.change() pm.text(l='===== Vertigo =====') pm.button('vertigo_setup_look_at_button', l="Setup -> Look At", c=repeated_callback(Render.vertigo_setup_look_at), ann="Setup Look At", bgc=color.color) pm.button('vertigo_setup_vertigo_button', l="Setup -> Vertigo", c=repeated_callback(Render.vertigo_setup_vertigo), ann="Setup Vertigo", bgc=color.color) pm.button('vertigo_delete_button', l="Delete", c=repeated_callback(Render.vertigo_delete), ann="Delete", bgc=color.color) pm.text(l='===================') pm.button('oyTracker2Null_button', l="oyTracker2Null", c=repeated_callback(mel.eval, 'oyTracker2Null'), ann="Tracker2Null", bgc=color.color) with pm.rowLayout(nc=3, adj=1): def import_3dequalizer_points_callback(): cam_width = pm.intField('import_3DEqualizer_points_width_int_field', q=1, v=1) cam_height = pm.intField('import_3DEqualizer_points_height_int_field', q=1, v=1) camera_tools.import_3dequalizer_points(cam_width, cam_height) pm.button( 'import_3DEqualizer_points_button', l="Import 3DEqualizer Points", c=repeated_callback(import_3dequalizer_points_callback), ann=camera_tools.import_3dequalizer_points.__doc__, bgc=color.color ) pm.intField('import_3DEqualizer_points_width_int_field', min=1, v=1920) pm.intField('import_3DEqualizer_points_height_int_field', min=1, v=1080) pm.text(l='===================') color.change() pm.button('reloadFileTextures_button', l="reload file textures", c=repeated_callback(Render.reload_file_textures), ann="reload file textures", bgc=color.color) color.change() pm.button('transfer_shaders_button', l="Transfer Shaders", c=repeated_callback(Render.transfer_shaders), ann="Transfers shaders from one group to other, use it" "for LookDev -> Alembic", bgc=color.color) color.change() pm.button('fitPlacementToUV_button', l="fit placement to UV", c=repeated_callback(Render.fit_placement_to_UV), ann="fit placement to UV", bgc=color.color) pm.button( 'connect_placement2d_to_file_texture_button', l='Connect Placement2D to File Texture', c=repeated_callback(Render.connect_placement2d_to_file), ann=Render.connect_placement2d_to_file.__doc__, bgc=color.color ) color.change() with pm.rowLayout(nc=2, adj=1): def enable_subdiv_callback(): max_tess = pm.intField('enable_subdiv_int_field', q=1, v=1) Render.enable_subdiv_on_selected( max_subdiv=max_tess, fixed_tes=False ) pm.button( 'enable_subdiv_on_selected_objects_button', l='Enable Subdiv (Adaptive)', c=repeated_callback(enable_subdiv_callback), ann='Enables Arnold/RedShift Subdiv (catclark) on ' 'selected objects', bgc=color.color ) pm.intField('enable_subdiv_int_field', min=0, v=3) with pm.rowLayout(nc=2, adj=1): def fixed_tess_callback(): max_tess = pm.intField('fixed_tess_int_field', q=1, v=1) Render.enable_subdiv_on_selected( fixed_tes=True, max_subdiv=max_tess ) pm.button( 'enable_fixed_subdiv_on_selected_objects_button', l='Enable Subdiv (Fixed Tes.)', c=repeated_callback(fixed_tess_callback), ann='Enables Arnold/RedShift Subdiv (catclark) on selected ' 'objects with fixed tessellation', bgc=color.color ) pm.intField('fixed_tess_int_field', min=0, v=1) pm.button( 'disable_subdiv_on_selected_objects_button', l='Disable Subdiv', c=repeated_callback(Render.disable_subdiv_on_selected), ann=Render.disable_subdiv.__doc__, bgc=color.color ) color.change() pm.button( 'export_shader_data_button', l='Export Shader Attributes', c=repeated_callback(Render.export_shader_attributes), ann=Render.export_shader_attributes.__doc__, bgc=color.color ) pm.button( 'import_shader_data_button', l='Import Shader Attributes', c=repeated_callback(Render.import_shader_attributes), ann=Render.import_shader_attributes.__doc__, bgc=color.color ) color.change() pm.button( 'export_shader_to_houdini_button', l='Export Shader Assignments To Houdini', c=repeated_callback(Render.export_shader_assignments_to_houdini), ann=Render.export_shader_assignments_to_houdini.__doc__, bgc=color.color ) color.change() pm.button( 'create_eye_shader_and_controls_button', l='Create Eye Shader and Controls', c=repeated_callback(Render.create_eye_shader_and_controls), ann='Creates eye shaders and controls for the selected eyes', bgc=color.color ) pm.button( 'setup_outer_eye_render_attributes_button', l='Setup Outer Eye Render Attributes', c=repeated_callback(Render.setup_outer_eye_render_attributes), ann=Render.setup_outer_eye_render_attributes.__doc__, bgc=color.color ) pm.button( 'setup_window_glass_render_attributes_button', l='Setup **Window Glass** Render Attributes', c=repeated_callback(Render.setup_window_glass_render_attributes), ann=Render.setup_window_glass_render_attributes.__doc__, bgc=color.color ) pm.button( 'setup_dummy_window_light_button', l='Setup/Update **Dummy Window** Light Plane', c=repeated_callback(Render.dummy_window_light_plane), ann=Render.dummy_window_light_plane.__doc__, bgc=color.color ) color.change() pm.button( 'create_generic_tooth_shader_button', l='Create Generic TOOTH Shader', c=repeated_callback(Render.create_generic_tooth_shader), ann=Render.create_generic_gum_shader.__doc__, bgc=color.color ) pm.button( 'create_generic_gum_shader_button', l='Create Generic GUM Shader', c=repeated_callback(Render.create_generic_gum_shader), ann=Render.create_generic_gum_shader.__doc__, bgc=color.color ) pm.button( 'create_generic_tongue_shader_button', l='Create Generic TONGUE Shader', c=repeated_callback(Render.create_generic_tongue_shader), ann=Render.create_generic_tongue_shader.__doc__, bgc=color.color ) color.change() pm.button('convert_to_ai_image_button', l="To aiImage", c=repeated_callback( Render.convert_file_node_to_ai_image_node), ann="Converts the selected File (file texture) nodes to " "aiImage nodes, also connects the place2dTexture " "node if necessary", bgc=color.color) color.change() pm.button('to_bbox_button', l="aiStandIn To BBox", c=repeated_callback(Render.standin_to_bbox), ann="Convert selected stand ins to bbox", bgc=color.color) pm.button('to_polywire_button', l="aiStandIn To Polywire", c=repeated_callback(Render.standin_to_polywire), ann="Convert selected stand ins to polywire", bgc=color.color) color.change() with pm.rowLayout(nc=3, adj=3, bgc=color.color): min_range_field = pm.floatField( minValue=1000, maxValue=50000, step=1, pre=0, value=3500, w=50, bgc=color.color, ann='Min Value' ) max_range_field = pm.floatField( minValue=1000, maxValue=50000, step=1, pre=0, value=6500, w=50, bgc=color.color, ann='Max Value' ) pm.button( ann="Randomize Color Temperature", l="Randomize Color Temp.", w=70, c=repeated_callback( Render.randomize_light_color_temp, min_range_field, max_range_field ), bgc=color.color ) with pm.rowLayout(nc=3, adj=3, bgc=color.color): min_range_field = pm.floatField( minValue=0, maxValue=200, step=0.1, pre=1, value=10, w=50, bgc=color.color, ann='Min Value' ) max_range_field = pm.floatField( minValue=0, maxValue=200, step=0.1, pre=1, value=20, w=50, bgc=color.color, ann='Max Value' ) pm.button( ann="Randomize Exposure", l="Randomize Exposure", w=70, c=repeated_callback( Render.randomize_light_intensity, min_range_field, max_range_field ), bgc=color.color ) color.change() pm.button( ann="Create Reflection Curve", l="Reflection Curve", c=repeated_callback( Render.generate_reflection_curve ), bgc=color.color ) color.change() pm.button( ann="Import GPU Content", l="Import GPU Content", c=repeated_callback( Render.import_gpu_content ), bgc=color.color ) color.change() with pm.rowLayout(nc=3, adj=3, bgc=color.color): source_driver_field = pm.textField( text='S:', w=50, bgc=color.color, ann='Source Driver' ) target_driver_field = pm.textField( text='L:', w=50, bgc=color.color, ann='Target Driver' ) pm.button( ann="Move Cache Files to Another Location", l="Move Cache Files", w=70, c=repeated_callback( Render.move_cache_files_wrapper, source_driver_field, target_driver_field ), bgc=color.color ) __commands__.extend(render_columnLayout.children()) previs_columnLayout = pm.columnLayout( 'previs_columnLayout', adj=True, cal="center", rs=row_spacing ) with previs_columnLayout: color.reset() pm.button('split_camera_button', l="Split Camera", c=repeated_callback(Previs.split_camera), ann=Previs.split_camera.__doc__, bgc=color.color) color.change() pm.button('shots_from_camera_button', l="Shots From Camera", c=repeated_callback(Previs.shots_from_cams), ann=Previs.shots_from_cams.__doc__, bgc=color.color) color.change() pm.button('auto_rename_shots_button', l="Auto Rename Shots", c=repeated_callback(Previs.auto_rename_shots), ann=Previs.auto_rename_shots.__doc__, bgc=color.color) color.change() pm.button('save_previs_to_shots_button', l="Save Previs To Shots", c=repeated_callback(Previs.save_previs_to_shots), ann=Previs.save_previs_to_shots.__doc__, bgc=color.color) color.change() pm.button('very_nice_camera_rig_button', l="Create a Very Nice Camera Rig", c=repeated_callback(camera_tools.very_nice_camera_rig), ann=camera_tools.very_nice_camera_rig.__doc__, bgc=color.color) __commands__.extend(previs_columnLayout.children()) animation_columnLayout = pm.columnLayout( 'animation_columnLayout', adj=True, cal="center", rs=row_spacing ) with animation_columnLayout: color.reset() color.change() from anima.env.mayaEnv import picker pm.text(l='===== Object Picker =====') pm.button('picker_setParent_button', l="Set Parent", c=repeated_callback(picker.set_parent), ann="Set Parent", bgc=color.color) pm.button('picker_releaseObject_button', l="Release", c=repeated_callback(picker.release_object), ann="Release Object", bgc=color.color) pm.button('picker_editKeyframes_button', l="Edit Keyframes", c=repeated_callback(picker.edit_keyframes), ann="Edit Keyframes", bgc=color.color) pm.button('picker_fixJump_button', l="Fix Jump", c=repeated_callback(picker.fix_jump), ann="Fix Jump", bgc=color.color) pm.button('picker_explodeSetup_button', l="Explode", c=repeated_callback(picker.explode_setup), ann="Explode Setup", bgc=color.color) color.change() from anima.env.mayaEnv import pivot_switcher pm.text(l='===== Pivot Switcher =====') pm.button('oyPivotSwitcher_setupPivot_button', l="Setup", c=repeated_callback(pivot_switcher.setup_pivot), ann="Setup Pivot", bgc=color.color) pm.button('oyPivotSwitcher_switchPivot_button', l="Switch", c=repeated_callback(pivot_switcher.switch_pivot), ann="Switch Pivot", bgc=color.color) pm.button('oyPivotSwitcher_togglePivot_button', l="Toggle", c=repeated_callback(pivot_switcher.toggle_pivot), ann="Toggle Pivot", bgc=color.color) color.change() pm.text(l='===== Alembic Tools =====') pm.button('bake_all_constraints_button', l="Bake All Constraints", c=repeated_callback(Animation.bake_all_constraints), ann=Animation.bake_all_constraints.__doc__, bgc=color.color) pm.button('bake_alembic_animations_button', l="Bake Alembic Animations", c=repeated_callback(Animation.bake_alembic_animations), ann=Animation.bake_alembic_animations.__doc__, bgc=color.color) rowLayout = pm.rowLayout(nc=2, adj=1, bgc=color.color) with rowLayout: pm.button( 'abc_from_selected_button', l='From Selected', c=repeated_callback(Animation.create_alembic_command), ann='Creates Alembic Cache from selected nodes', bgc=color.color ) from_top_node_checkBox = pm.checkBox( 'from_top_node_checkBox', l="Top Node", value=True, bgc=color.color ) pm.text(l='===== EXPORT =====') with pm.rowLayout(nc=3, adj=3): pm.checkBoxGrp( 'export_alembic_of_nodes_checkbox_grp', l='Alembic Options', numberOfCheckBoxes=2, labelArray2=['Isolate', 'Unload Refs'], cl3=['left', 'left', 'left'], cw3=[100, 60, 60], valueArray2=[1, 1] ) pm.intFieldGrp( 'export_alembic_of_nodes_handles_int_slider_grp', l='Handles', el='frames', nf=1, adj=2, cw3=[65, 1, 20], v1=1, ) def export_alembic_callback_with_options(func): isolate, unload_refs = pm.checkBoxGrp( 'export_alembic_of_nodes_checkbox_grp', q=1, valueArray2=1 ) handles = pm.intFieldGrp('export_alembic_of_nodes_handles_int_slider_grp', q=1, v1=1) func(isolate=isolate, unload_refs=unload_refs, handles=handles) pm.button( 'export_alembic_of_selected_cacheable_nodes_button', l='Selected Cacheable Nodes', c=repeated_callback(export_alembic_callback_with_options, auxiliary.export_alembic_of_selected_cacheable_nodes), ann=auxiliary.export_alembic_of_selected_cacheable_nodes.__doc__.split('\n')[0], bgc=color.color ) pm.button( 'export_alembic_of_all_cacheable_nodes_button', l='ALL Cacheable Nodes', c=repeated_callback(export_alembic_callback_with_options, auxiliary.export_alembic_of_all_cacheable_nodes), ann=auxiliary.export_alembic_of_all_cacheable_nodes.__doc__.split('\n')[0], bgc=color.color ) pm.button( 'export_alembic_on_farm_button', l='Export Alembic On Farm', c=repeated_callback(Animation.export_alembics_on_farm), ann=Animation.export_alembics_on_farm.__doc__.split('\n')[0], bgc=color.color ) pm.text(l='===== Playblast Tools =====') color.change() pm.button( 'playblast_on_farm_button', l='PLayblast On Farm', c=repeated_callback(Animation.playblast_on_farm), ann=Animation.playblast_on_farm.__doc__.split('\n')[0], bgc=color.color ) pm.text(l='===== Exporters =====') color.change() rowLayout = pm.rowLayout(nc=3, adj=3, bgc=color.color) with rowLayout: start = int(pm.playbackOptions(q=1, minTime=1)) end = int(pm.playbackOptions(q=1, maxTime=1)) startButtonField = pm.textField( text=start, w=50, bgc=color.color, ann='start frame' ) endButtonField = pm.textField( text=end, w=50, bgc=color.color, ann='end frame' ) pm.button(ann="Exports maya camera to nuke", l="cam2chan", w=70, c=repeated_callback( Animation.cam_2_chan, startButtonField, endButtonField ), bgc=color.color) pm.text(l='===== Component Animation =====') color.change() smooth_selected_keyframes_text_fbg = pm.textFieldButtonGrp( 'smooth_selected_keyframes_text_fbg_button', bl="Smooth Selected Keyframes", adj=2, tx=1, cw=(1, 40), ann="select keyframes in graph editor to smooth", bgc=color.color ) def smooth_selected_keyframes_text_fbg_callback(): iteration = int( pm.textFieldButtonGrp( "smooth_selected_keyframes_text_fbg_button", q=1, tx=1 ) ) Animation.smooth_selected_keyframes(iteration) pm.textFieldButtonGrp( smooth_selected_keyframes_text_fbg, e=1, bc=repeated_callback( smooth_selected_keyframes_text_fbg_callback ) ) smooth_component_anim = pm.textFieldButtonGrp( 'oySmoothComponentAnimation_button', bl="Smooth Component Animation", adj=2, tx=1, cw=(1, 40), ann="select components to smooth", bgc=color.color ) pm.textFieldButtonGrp( smooth_component_anim, e=1, bc=repeated_callback( Animation.smooth_component_animation, smooth_component_anim ) ) color.change() pm.button( 'bake_component_animation_button', l='Bake component animation to Locator', c=repeated_callback(Animation.bake_component_animation), ann='Creates a locator at the center of selected components ' 'and moves it with the components along the current ' 'frame range', bgc=color.color ) pm.button( 'create_follicle_button', l='Attach Follicle', c=repeated_callback(Animation.attach_follicle), ann='Attaches a follicle in the selected components', bgc=color.color ) pm.button( 'equalize_node_speed_button', l='Equalize Node Speed', c=repeated_callback(Animation.equalize_node_speed), ann=Animation.equalize_node_speed.__doc__, bgc=color.color ) pm.text(l='===== Generic Tools =====') color.change() pm.button( 'set_range_from_shot_node_button', l='Range From Shot', c=repeated_callback(Animation.set_range_from_shot), ann='Sets the playback range from the shot node in the scene', bgc=color.color ) color.change() pm.button( 'delete_base_anim_layer_button', l='Delete Base Anim Layer', c=repeated_callback(Animation.delete_base_anim_layer), ann=Animation.delete_base_anim_layer.__doc__, bgc=color.color ) __commands__.extend(animation_columnLayout.children()) obsolete_columnLayout = pm.columnLayout( 'obsolete_columnLayout', adj=True, cal="center", ann="Obsolete", rs=row_spacing ) with obsolete_columnLayout: color.reset() pm.button('addMiLabel_button', l="add miLabel to selected", c=repeated_callback(Render.add_miLabel), ann="add miLabel to selected", bgc=color.color) color.change() pm.button('connectFacingRatioToVCoord_button', l="connect facingRatio to vCoord", c=repeated_callback( Render.connect_facingRatio_to_vCoord), ann="connect facingRatio to vCoord", bgc=color.color) color.change() with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1): pm.text('miFinalGatherCast_text', l="miFinalGatherCast", bgc=color.color) pm.button('set_miFinalGatherCast_ON_button', l="ON", c=repeated_callback( set_shape_attribute_wrapper, "miFinalGatherCast", 1 ), bgc=(0, 1, 0)) pm.button('set_miFinalGatherCast_OFF_button', l="OFF", c=repeated_callback( set_shape_attribute_wrapper, "miFinalGatherCast", 0 ), bgc=(1, 0, 0)) with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1): pm.text('miFinalGatherReceive_text', l="miFinalGatherReceive", bgc=color.color) pm.button('set_miFinalGatherReceive_ON_button', l="ON", c=repeated_callback( set_shape_attribute_wrapper, "miFinalGatherReceive", 1 ), bgc=(0, 1, 0)) pm.button('set_miFinalGatherReceive_OFF_button', l="OFF", c=repeated_callback( set_shape_attribute_wrapper, "miFinalGatherReceive", 0 ), bgc=(1, 0, 0)) with pm.rowLayout(nc=3, rat=(1, "both", 0), adj=1): pm.text('miFinalGatherHide_text', l="miFinalGatherHide", bgc=color.color) pm.button('set_miFinalGatherHide_ON_button', l="ON", c=repeated_callback(Render.set_finalGatherHide, 1), bgc=(0, 1, 0)) pm.button('set_miFinalGatherHide_OFF_button', l="OFF", c=repeated_callback(Render.set_finalGatherHide, 0), bgc=(1, 0, 0)) color.change() pm.button('convertToMRTexture_button', l="use mib_texture_filter_lookup", c=repeated_callback( Render.use_mib_texture_filter_lookup), ann=( "adds an mib_texture_filter_lookup node in \n" + "between the file nodes and their outputs, to \n" + "get a sharper look output from the texture file"), bgc=color.color) pm.button('convertToLinear_button', l="convert to Linear texture", c=repeated_callback(Render.convert_to_linear), ann="convert to Linear texture", bgc=color.color) pm.button('useImageSequence_button', l="use image sequence for \nmentalrayTexture", c=repeated_callback(Render.use_image_sequence), ann="use image sequence for \nmentalrayTexture", bgc=color.color) color.change() pm.button('oyAddToSelectedContainer_button', l="add to selected container", c=repeated_callback(Render.add_to_selected_container), ann="add to selected container", bgc=color.color) pm.button('oyRemoveFromContainer_button', l="remove from selected container", c=repeated_callback(Render.remove_from_container), ann="remove from selected container", bgc=color.color) color.change() pm.button('oySmedgeRenderSlicer_button', l="oySmedgeRenderSlicer", c=repeated_callback(mel.eval, 'oySmedgeRenderSlicer'), ann="SmedgeRenderSlicer", bgc=color.color) color.change() pm.button( 'exponentialSmooth_button', l="exponential smooth", c=repeated_callback(Modeling.polySmoothFace, 0), ann="applies exponential smooth to selected objects", bgc=color.color ) pm.button( 'linearSmooth_button', l="linear smooth", c=repeated_callback(Modeling.polySmoothFace, 1), ann="applies linear smooth to selected objects", bgc=color.color ) pm.button( 'deActivateSmooth_button', l="deActivate smooth", c=repeated_callback(Modeling.activate_deActivate_smooth, 1), ann="deActivates all polySmoothFace nodes in the " "scene", bgc=color.color ) pm.button( 'activateSmooth_button', l="activate smooth", c=repeated_callback(Modeling.activate_deActivate_smooth, 0), ann="activates all deActivated polySmoothFace nodes " "in the scene", bgc=color.color ) pm.button( 'deleteSmooth_button', l="delete smooth", c=repeated_callback(Modeling.delete_smooth), ann="deletes all the polySmoothFace nodes from the " "scene", bgc=color.color ) pm.button( 'deleteSmoothOnSelected_button', l="delete smooth on selected", c=repeated_callback(Modeling.delete_smooth_on_selected), ann="deletes selected polySmoothFace nodes from scene", bgc=color.color ) color.change() pm.button( 'deleteAllSound_button', l="delete all sound", c=repeated_callback(General.delete_all_sound), ann="delete all sound", bgc=color.color ) pm.button( 'displayHandlesOfSelectedObjects_button', l="toggle handles of selected objects", c=repeated_callback( General.toggle_attributes, "displayHandle" ), ann="select objects to toggle handle", bgc=color.color ) color.change() pm.button( 'referenceSelectedObjects_button', l="reference selected objects", c=repeated_callback( General.reference_selected_objects ), ann="sets objects display override to reference", bgc=color.color ) pm.button( 'dereferenceSelectedObjects_button', l="de-reference selected objects", c=repeated_callback( General.dereference_selected_objects ), ann="sets objects display override to reference", bgc=color.color ) color.change() pm.button( 'oyDeReferencer_button', l="dereferencer", c=repeated_callback(General.dereferencer), ann="sets all objects display override to normal", bgc=color.color ) color.change() enable_matte_row_layout = pm.rowLayout(nc=6, adj=1) with enable_matte_row_layout: pm.text( l='Enable Arnold Matte', ) pm.button( l='Default', c=repeated_callback(Render.enable_matte, 0), ann='Enables Arnold Matte on selected objects with <b>No Color</b>', bgc=color.color ) pm.button( l='R', c=repeated_callback(Render.enable_matte, 1), ann='Enables Arnold Matte on selected objects with <b>Red</b>', bgc=[1, 0, 0] ) pm.button( l='G', c=repeated_callback(Render.enable_matte, 2), ann='Enables Arnold Matte on selected objects with <b>Green</b>', bgc=[0, 1, 0] ) pm.button( l='B', c=repeated_callback(Render.enable_matte, 3), ann='Enables Arnold Matte on selected objects with <b>Blue</b>', bgc=[0, 0, 1] ) pm.button( l='A', c=repeated_callback(Render.enable_matte, 4), ann='Enables Arnold Matte on selected objects with <b>Alpha</b>', bgc=[0.5, 0.5, 0.5] ) color.change() pm.button( 'fix_render_layer_out_adjustment_errors_button', l="fixRenderLayerOutAdjustmentErrors", c='pm.mel.eval("fixRenderLayerOutAdjustmentErrors();")', ann="fixRenderLayerOutAdjustmentErrors", bgc=color.color ) pm.separator() color.change() with pm.rowLayout(nc=2, adj=2): apply_to_hierarchy_checkBox = pm.checkBox( 'apply_to_hierarchy_checkBox', l="Apply to Hierarchy", value=True, bgc=color.color ) disable_undo_queue_check_box = pm.checkBox( 'disable_undo_queue_checkBox', l="Disable Undo", value=False, bgc=color.color ) attr_names = [ 'castsShadows', 'receiveShadows', 'motionBlur', 'primaryVisibility', 'visibleInReflections', 'visibleInRefractions', 'aiSelfShadows', 'aiOpaque', 'aiVisibleInDiffuse', 'aiVisibleInGlossy', 'aiMatte', 'overrideShaders' ] for attr_name in attr_names: with pm.rowLayout(nc=4, rat=(1, "both", 0), adj=1): pm.text('%s_text' % attr_name, l=attr_name, bgc=color.color) pm.button( 'set_%s_ON_button' % attr_name, l="ON", c=repeated_callback( set_shape_attribute_wrapper, attr_name, 1, ), bgc=(0, 1, 0) ) pm.button( 'set_%s_OFF_button' % attr_name, l="OFF", c=repeated_callback( set_shape_attribute_wrapper, attr_name, 0 ), bgc=(1, 0, 0) ) pm.button( 'set_%s_REMOVE_button' % attr_name, l="REM", ann='Remove Override', c=repeated_callback( set_shape_attribute_wrapper, attr_name, -1 ), bgc=(0, 0.5, 1) ) pm.separator() color.change() pm.button( l='Setup Z-Layer', c=repeated_callback(Render.create_z_layer), ann=Render.create_z_layer.__doc__, bgc=color.color ) pm.button( l='Setup EA Matte', c=repeated_callback(Render.create_ea_matte), ann=Render.create_ea_matte.__doc__, bgc=color.color ) color.change() pm.text(l='===== BarnDoor Simulator =====') pm.button( 'barn_door_simulator_setup_button', l='Setup', c=repeated_callback(Render.barndoor_simulator_setup), ann='Creates a arnold barn door simulator to the selected ' 'light', bgc=color.color ) pm.button( 'barn_door_simulator_unsetup_button', l='Un-Setup', c=repeated_callback(Render.barndoor_simulator_unsetup), ann='Removes the barn door simulator nodes from the selected ' 'light', bgc=color.color ) pm.button( 'fix_barndoors_button', l='Fix BarnDoors', c=repeated_callback(Render.fix_barndoors), ann=Render.fix_barndoors.__doc__, bgc=color.color ) color.change() pm.button( 'ai_skin_sss_to_ai_skin_button', l='aiSkinSSS --> aiSkin', c=repeated_callback(Render.convert_aiSkinSSS_to_aiSkin), ann=Render.convert_aiSkinSSS_to_aiSkin.__doc__, bgc=color.color ) pm.button( 'normalize_sss_weights_button', l='Normalize SSS Weights', c=repeated_callback(Render.normalize_sss_weights), ann=Render.normalize_sss_weights.__doc__, bgc=color.color ) __commands__.extend(obsolete_columnLayout.children()) pm.tabLayout( main_tab_layout, edit=True, tabLabel=[ (general_column_layout, "Gen"), (reference_columnLayout, "Ref"), (modeling_column_layout, "Mod"), (rigging_columnLayout, "Rig"), (render_columnLayout, "Ren"), (previs_columnLayout, "Prev"), (animation_columnLayout, "Ani"), (obsolete_columnLayout, "Obs") ], cc=functools.partial(store_tab_index, main_tab_layout) ) dock_control = pm.dockControl( "toolbox_dockControl", l='toolbox', content=toolbox_window, area="left", allowedArea=["left", "right"], width=width ) last_tab_index = get_last_tab_index() if last_tab_index: pm.tabLayout( main_tab_layout, e=1, sti=last_tab_index ) def store_tab_index(tab_layout): val = pm.tabLayout(tab_layout, q=1, sti=1) os.environ[__last_tab__] = str(val) def get_last_tab_index(): return int(os.environ.get(__last_tab__, 0))
true
true
f71cf84b76e986982228c0447aa806b21c91314f
1,429
py
Python
tests/test_distribution/test_von_mises_fisher.py
mdeegen/pb_bss
e8c380e27d82707e8d2b2d83c5c918d47ea5d89f
[ "MIT" ]
171
2018-10-22T09:34:45.000Z
2022-03-19T16:09:20.000Z
tests/test_distribution/test_von_mises_fisher.py
mdeegen/pb_bss
e8c380e27d82707e8d2b2d83c5c918d47ea5d89f
[ "MIT" ]
19
2019-03-14T09:42:58.000Z
2021-09-03T07:13:03.000Z
tests/test_distribution/test_von_mises_fisher.py
mdeegen/pb_bss
e8c380e27d82707e8d2b2d83c5c918d47ea5d89f
[ "MIT" ]
40
2018-10-11T08:01:54.000Z
2022-03-05T13:26:15.000Z
import numpy as np from numpy.testing import assert_allclose, assert_equal import unittest from pb_bss.distribution import VonMisesFisher from pb_bss.distribution import VonMisesFisherTrainer class TestGaussian(unittest.TestCase): def test_shapes(self): samples = 10000 mean = np.ones((3,)) covariance = np.eye(3) x = np.random.multivariate_normal(mean, covariance, size=(samples,)) model = VonMisesFisherTrainer().fit(x) assert_equal(model.mean.shape, mean.shape) assert_equal(model.concentration.shape, ()) def test_shapes_independent_dims(self): samples = 10000 mean = np.ones((3,)) covariance = np.eye(3) x = np.random.multivariate_normal(mean, covariance, size=(13, samples,)) model = VonMisesFisherTrainer().fit(x) assert_equal(model.mean.shape, np.tile(mean, (13, 1)).shape) assert_equal(model.concentration.shape, (13,)) def test_von_mises_fisher(self): samples = 10000 mean = np.ones((3,)) mean /= np.linalg.norm(mean, axis=-1) concentration = 50 # ToDo: Implement VonMisesFisher(...).sample(...) return x = VonMisesFisher(mean, concentration).sample(size=(samples,)) model = VonMisesFisherTrainer().fit(x) assert_allclose(model.mean, mean, atol=0.1) assert_allclose(model.covariance, concentration, atol=0.1)
35.725
80
0.660602
import numpy as np from numpy.testing import assert_allclose, assert_equal import unittest from pb_bss.distribution import VonMisesFisher from pb_bss.distribution import VonMisesFisherTrainer class TestGaussian(unittest.TestCase): def test_shapes(self): samples = 10000 mean = np.ones((3,)) covariance = np.eye(3) x = np.random.multivariate_normal(mean, covariance, size=(samples,)) model = VonMisesFisherTrainer().fit(x) assert_equal(model.mean.shape, mean.shape) assert_equal(model.concentration.shape, ()) def test_shapes_independent_dims(self): samples = 10000 mean = np.ones((3,)) covariance = np.eye(3) x = np.random.multivariate_normal(mean, covariance, size=(13, samples,)) model = VonMisesFisherTrainer().fit(x) assert_equal(model.mean.shape, np.tile(mean, (13, 1)).shape) assert_equal(model.concentration.shape, (13,)) def test_von_mises_fisher(self): samples = 10000 mean = np.ones((3,)) mean /= np.linalg.norm(mean, axis=-1) concentration = 50 return x = VonMisesFisher(mean, concentration).sample(size=(samples,)) model = VonMisesFisherTrainer().fit(x) assert_allclose(model.mean, mean, atol=0.1) assert_allclose(model.covariance, concentration, atol=0.1)
true
true
f71cf8aa46d7092006946a492f21beefc661135b
443
py
Python
src/constants/database_constants.py
davendiy/QWERTY_messenger
6bfa5a6ceb7b63f3e57d3d7779a1cda26cd55616
[ "MIT" ]
null
null
null
src/constants/database_constants.py
davendiy/QWERTY_messenger
6bfa5a6ceb7b63f3e57d3d7779a1cda26cd55616
[ "MIT" ]
null
null
null
src/constants/database_constants.py
davendiy/QWERTY_messenger
6bfa5a6ceb7b63f3e57d3d7779a1cda26cd55616
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*-encoding: utf-8-*- # created: 25.11.2019 # by David Zashkolny # 3 course, comp math # Taras Shevchenko National University of Kyiv # email: davendiy@gmail.com TEXT = 0 IMAGE = 1 AUDIO = 2 VIDEO = 3 DOCUMENT = 4 MESSAGE_TYPES = { TEXT, IMAGE, AUDIO, VIDEO, DOCUMENT, } CHANNELS = "Channels" CHATS = "Chats" USERS_CHATS = 'UsersChats' USERS_CHANNELS = 'UsersChannels' PRIVATE = 1 PUBLIC = 0
13.84375
46
0.665914
TEXT = 0 IMAGE = 1 AUDIO = 2 VIDEO = 3 DOCUMENT = 4 MESSAGE_TYPES = { TEXT, IMAGE, AUDIO, VIDEO, DOCUMENT, } CHANNELS = "Channels" CHATS = "Chats" USERS_CHATS = 'UsersChats' USERS_CHANNELS = 'UsersChannels' PRIVATE = 1 PUBLIC = 0
true
true
f71cf9824ffb509cab55ce293165655a7f35f31c
1,730
py
Python
code/extras/highway_layer.py
vamships/RelationPrediction
45f48e8d09331e7244a7fe8d2d9d0fefa7e1f76b
[ "MIT" ]
376
2017-09-10T14:29:16.000Z
2022-03-17T04:01:53.000Z
code/extras/highway_layer.py
vamships/RelationPrediction
45f48e8d09331e7244a7fe8d2d9d0fefa7e1f76b
[ "MIT" ]
15
2018-07-28T23:44:53.000Z
2021-08-21T17:33:55.000Z
code/extras/highway_layer.py
vamships/RelationPrediction
45f48e8d09331e7244a7fe8d2d9d0fefa7e1f76b
[ "MIT" ]
104
2017-11-20T13:50:04.000Z
2022-03-31T14:30:47.000Z
import numpy as np import tensorflow as tf from model import Model from common.shared_functions import glorot_variance, make_tf_variable, make_tf_bias class HighwayLayer(Model): vertex_embedding_function = {'train': None, 'test': None} def __init__(self, shape, next_component=None, next_component_2=None): self.next_component = next_component self.next_component_2 = next_component_2 self.shape = shape def compute_vertex_embeddings(self, mode='train'): if self.vertex_embedding_function[mode] is None: code_1 = self.next_component.get_all_codes(mode=mode)[0] code_2 = self.next_component_2.get_all_codes(mode=mode)[0] gates = self.get_gates(mode=mode) self.vertex_embedding_function[mode] = gates * code_1 + (1-gates) * code_2 return self.vertex_embedding_function[mode] def local_initialize_train(self): variance = glorot_variance(self.shape) self.W = make_tf_variable(0, variance, self.shape) self.b = make_tf_bias(self.shape[1], init=1) def local_get_weights(self): return [self.W, self.b] def get_gates(self, mode='train'): code = self.next_component_2.get_all_codes(mode=mode)[0] hidden = tf.matmul(code, self.W) + self.b return tf.nn.sigmoid(hidden) def get_all_codes(self, mode='train'): collected_messages = self.compute_vertex_embeddings(mode=mode) return collected_messages, None, collected_messages def get_all_subject_codes(self, mode='train'): return self.compute_vertex_embeddings(mode=mode) def get_all_object_codes(self, mode='train'): return self.compute_vertex_embeddings(mode=mode)
34.6
86
0.701734
import numpy as np import tensorflow as tf from model import Model from common.shared_functions import glorot_variance, make_tf_variable, make_tf_bias class HighwayLayer(Model): vertex_embedding_function = {'train': None, 'test': None} def __init__(self, shape, next_component=None, next_component_2=None): self.next_component = next_component self.next_component_2 = next_component_2 self.shape = shape def compute_vertex_embeddings(self, mode='train'): if self.vertex_embedding_function[mode] is None: code_1 = self.next_component.get_all_codes(mode=mode)[0] code_2 = self.next_component_2.get_all_codes(mode=mode)[0] gates = self.get_gates(mode=mode) self.vertex_embedding_function[mode] = gates * code_1 + (1-gates) * code_2 return self.vertex_embedding_function[mode] def local_initialize_train(self): variance = glorot_variance(self.shape) self.W = make_tf_variable(0, variance, self.shape) self.b = make_tf_bias(self.shape[1], init=1) def local_get_weights(self): return [self.W, self.b] def get_gates(self, mode='train'): code = self.next_component_2.get_all_codes(mode=mode)[0] hidden = tf.matmul(code, self.W) + self.b return tf.nn.sigmoid(hidden) def get_all_codes(self, mode='train'): collected_messages = self.compute_vertex_embeddings(mode=mode) return collected_messages, None, collected_messages def get_all_subject_codes(self, mode='train'): return self.compute_vertex_embeddings(mode=mode) def get_all_object_codes(self, mode='train'): return self.compute_vertex_embeddings(mode=mode)
true
true
f71cfa170d9e79942034aff33de0c5092954646d
668
py
Python
etc/check_fonts.py
UO-CIS211/panels
a97e814a44244cb53cbed8165056f3df69a3541e
[ "MIT" ]
1
2019-03-20T18:18:12.000Z
2019-03-20T18:18:12.000Z
etc/check_fonts.py
UO-CIS211/panels
a97e814a44244cb53cbed8165056f3df69a3541e
[ "MIT" ]
null
null
null
etc/check_fonts.py
UO-CIS211/panels
a97e814a44244cb53cbed8165056f3df69a3541e
[ "MIT" ]
null
null
null
# # Utility to check availability and location of fonts # for pygame # import pygame pygame.font.init() # Required or SysFont will break candidates = [ "Helvetica", "helvetica", # "helvetica.ttf", "Avenir Next", "AvenirNext" ] default = pygame.font.get_default_font() print("System default font is '{}'".format(default)) for can in candidates: path = pygame.font.match_font(can) sysfont = pygame.font.SysFont(can, 12) # Breaks print("{} => {}".format(can, path)) print("Sysfont {} => {}".format(can,sysfont)) print("Found fonts:") fonts = pygame.font.get_fonts() for font in fonts: print("-- {}".format(font))
20.242424
53
0.642216
import pygame pygame.font.init() candidates = [ "Helvetica", "helvetica", "Avenir Next", "AvenirNext" ] default = pygame.font.get_default_font() print("System default font is '{}'".format(default)) for can in candidates: path = pygame.font.match_font(can) sysfont = pygame.font.SysFont(can, 12) print("{} => {}".format(can, path)) print("Sysfont {} => {}".format(can,sysfont)) print("Found fonts:") fonts = pygame.font.get_fonts() for font in fonts: print("-- {}".format(font))
true
true
f71cfa2946ae7d25b13601e4e5fd1d6d17827f18
3,823
py
Python
AutomatedTesting/Gem/PythonTests/Physics/tests/joints/Joints_Fixed2BodiesConstrained.py
whywhywhyw/o3de
8e09f66799d4c8f188d45861d821e8656a554cb1
[ "Apache-2.0", "MIT" ]
11
2021-07-08T09:58:26.000Z
2022-03-17T17:59:26.000Z
AutomatedTesting/Gem/PythonTests/Physics/tests/joints/Joints_Fixed2BodiesConstrained.py
RoddieKieley/o3de
e804fd2a4241b039a42d9fa54eaae17dc94a7a92
[ "Apache-2.0", "MIT" ]
29
2021-07-06T19:33:52.000Z
2022-03-22T10:27:49.000Z
AutomatedTesting/Gem/PythonTests/Physics/tests/joints/Joints_Fixed2BodiesConstrained.py
RoddieKieley/o3de
e804fd2a4241b039a42d9fa54eaae17dc94a7a92
[ "Apache-2.0", "MIT" ]
4
2021-07-06T19:24:43.000Z
2022-03-31T12:42:27.000Z
""" Copyright (c) Contributors to the Open 3D Engine Project. For complete copyright and license terms please see the LICENSE at the root of this distribution. SPDX-License-Identifier: Apache-2.0 OR MIT """ # Test case ID : C18243580 # Test Case Title : Check that fixed joint constrains 2 bodies # fmt: off class Tests: enter_game_mode = ("Entered game mode", "Failed to enter game mode") exit_game_mode = ("Exited game mode", "Couldn't exit game mode") lead_found = ("Found lead", "Did not find lead") follower_found = ("Found follower", "Did not find follower") check_lead_position = ("Lead moved in X direction", "Lead did not move in X direction") check_follower_position = ("Follower moved in X direction", "Follower did not move in X direction") # fmt: on def Joints_Fixed2BodiesConstrained(): """ Summary: Check that fixed joint constrains 2 bodies Level Description: lead - Starts above follower entity follower - Starts below lead entity. Constrained to lead entity with fixed joint. Starts with initial velocity of (5, 0, 0) in positive X direction. Expected Behavior: The follower entity moves in the positive X direction and the lead entity is dragged along towards the positive X direction. The x position of the lead entity is incremented from its original. Test Steps: 1) Open Level 2) Enter Game Mode 3) Create and Validate Entities 4) Wait for several seconds 5) Check to see if lead entity and follower entity moved in positive X direction. 6) Exit Game Mode 7) Close Editor Note: - This test file must be called from the Open 3D Engine Editor command terminal - Any passed and failed tests are written to the Editor.log file. Parsing the file or running a log_monitor are required to observe the test results. :return: None """ import os import sys from editor_python_test_tools.utils import Report from editor_python_test_tools.utils import TestHelper as helper import azlmbr.legacy.general as general import azlmbr.bus from JointsHelper import JointEntity # Helper Entity class class Entity(JointEntity): def criticalEntityFound(self): # Override function to use local Test dictionary Report.critical_result(Tests.__dict__[self.name + "_found"], self.id.isValid()) # Main Script helper.init_idle() # 1) Open Level helper.open_level("Physics", "Joints_Fixed2BodiesConstrained") # 2) Enter Game Mode helper.enter_game_mode(Tests.enter_game_mode) # 3) Create and Validate Entities lead = Entity("lead") follower = Entity("follower") Report.info_vector3(lead.position, "lead initial position:") Report.info_vector3(follower.position, "follower initial position:") leadInitialPosition = lead.position.x followerInitialPosition = follower.position.x # 4) Wait for several seconds general.idle_wait(1.0) # wait for lead and follower to move # 5) Check to see if lead entity and follower entity moved in positive X direction. Report.info_vector3(lead.position, "lead position after 1 second:") Report.info_vector3(follower.position, "follower position after 1 second:") Report.critical_result(Tests.check_lead_position, lead.position.x > leadInitialPosition) Report.critical_result(Tests.check_follower_position, follower.position.x > followerInitialPosition) # 6) Exit Game Mode helper.exit_game_mode(Tests.exit_game_mode) if __name__ == "__main__": from editor_python_test_tools.utils import Report Report.start_test(Joints_Fixed2BodiesConstrained)
38.616162
152
0.699974
class Tests: enter_game_mode = ("Entered game mode", "Failed to enter game mode") exit_game_mode = ("Exited game mode", "Couldn't exit game mode") lead_found = ("Found lead", "Did not find lead") follower_found = ("Found follower", "Did not find follower") check_lead_position = ("Lead moved in X direction", "Lead did not move in X direction") check_follower_position = ("Follower moved in X direction", "Follower did not move in X direction") # fmt: on def Joints_Fixed2BodiesConstrained(): import os import sys from editor_python_test_tools.utils import Report from editor_python_test_tools.utils import TestHelper as helper import azlmbr.legacy.general as general import azlmbr.bus from JointsHelper import JointEntity # Helper Entity class class Entity(JointEntity): def criticalEntityFound(self): # Override function to use local Test dictionary Report.critical_result(Tests.__dict__[self.name + "_found"], self.id.isValid()) # Main Script helper.init_idle() # 1) Open Level helper.open_level("Physics", "Joints_Fixed2BodiesConstrained") # 2) Enter Game Mode helper.enter_game_mode(Tests.enter_game_mode) # 3) Create and Validate Entities lead = Entity("lead") follower = Entity("follower") Report.info_vector3(lead.position, "lead initial position:") Report.info_vector3(follower.position, "follower initial position:") leadInitialPosition = lead.position.x followerInitialPosition = follower.position.x # 4) Wait for several seconds general.idle_wait(1.0) # wait for lead and follower to move # 5) Check to see if lead entity and follower entity moved in positive X direction. Report.info_vector3(lead.position, "lead position after 1 second:") Report.info_vector3(follower.position, "follower position after 1 second:") Report.critical_result(Tests.check_lead_position, lead.position.x > leadInitialPosition) Report.critical_result(Tests.check_follower_position, follower.position.x > followerInitialPosition) # 6) Exit Game Mode helper.exit_game_mode(Tests.exit_game_mode) if __name__ == "__main__": from editor_python_test_tools.utils import Report Report.start_test(Joints_Fixed2BodiesConstrained)
true
true
f71cfa61b2b9cce8bfbbb52298ff209aec5dcc32
126
py
Python
tests/regression/RandomReg_100/ws_RandomReg_100_Ridge_sqlite_code_gen.py
antoinecarme/sklearn2sql_heroku
d680db10683daa419324461eeea851dd8b103ad5
[ "BSD-3-Clause" ]
1
2019-07-09T14:45:18.000Z
2019-07-09T14:45:18.000Z
tests/regression/RandomReg_100/ws_RandomReg_100_Ridge_sqlite_code_gen.py
antoinecarme/sklearn2sql_heroku
d680db10683daa419324461eeea851dd8b103ad5
[ "BSD-3-Clause" ]
5
2017-11-13T13:35:37.000Z
2021-11-11T12:57:20.000Z
tests/regression/RandomReg_100/ws_RandomReg_100_Ridge_sqlite_code_gen.py
antoinecarme/sklearn2sql_heroku
d680db10683daa419324461eeea851dd8b103ad5
[ "BSD-3-Clause" ]
1
2021-09-19T15:05:33.000Z
2021-09-19T15:05:33.000Z
from sklearn2sql_heroku.tests.regression import generic as reg_gen reg_gen.test_model("Ridge" , "RandomReg_100" , "sqlite")
25.2
66
0.793651
from sklearn2sql_heroku.tests.regression import generic as reg_gen reg_gen.test_model("Ridge" , "RandomReg_100" , "sqlite")
true
true
f71cfab1efdc6e0bd803f231c2d34fb3ee25c532
14,305
py
Python
tests/providers/hashicorp/secrets/test_vault.py
emilioego/airflow
3457c7847cd24413ff5b622e65c27d8370f94502
[ "Apache-2.0", "BSD-2-Clause", "MIT", "ECL-2.0", "BSD-3-Clause" ]
79
2021-10-15T07:32:27.000Z
2022-03-28T04:10:19.000Z
tests/providers/hashicorp/secrets/test_vault.py
emilioego/airflow
3457c7847cd24413ff5b622e65c27d8370f94502
[ "Apache-2.0", "BSD-2-Clause", "MIT", "ECL-2.0", "BSD-3-Clause" ]
153
2021-10-15T05:23:46.000Z
2022-02-23T06:07:10.000Z
tests/providers/hashicorp/secrets/test_vault.py
emilioego/airflow
3457c7847cd24413ff5b622e65c27d8370f94502
[ "Apache-2.0", "BSD-2-Clause", "MIT", "ECL-2.0", "BSD-3-Clause" ]
23
2021-10-15T02:36:37.000Z
2022-03-17T02:59:27.000Z
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from unittest import TestCase, mock from hvac.exceptions import InvalidPath, VaultError from airflow.providers.hashicorp.secrets.vault import VaultBackend class TestVaultSecrets(TestCase): @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_conn_uri(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v2.read_secret_version.return_value = { 'request_id': '94011e25-f8dc-ec29-221b-1f9c1d9ad2ae', 'lease_id': '', 'renewable': False, 'lease_duration': 0, 'data': { 'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'}, 'metadata': { 'created_time': '2020-03-16T21:01:43.331126Z', 'deletion_time': '', 'destroyed': False, 'version': 1, }, }, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "connections_path": "connections", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", } test_client = VaultBackend(**kwargs) returned_uri = test_client.get_conn_uri(conn_id="test_postgres") self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_conn_uri_engine_version_1(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v1.read_secret.return_value = { 'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b', 'lease_id': '', 'renewable': False, 'lease_duration': 2764800, 'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'}, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "connections_path": "connections", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", "kv_engine_version": 1, } test_client = VaultBackend(**kwargs) returned_uri = test_client.get_conn_uri(conn_id="test_postgres") mock_client.secrets.kv.v1.read_secret.assert_called_once_with( mount_point='airflow', path='connections/test_postgres' ) self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_conn_uri_engine_version_1_custom_auth_mount_point(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v1.read_secret.return_value = { 'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b', 'lease_id': '', 'renewable': False, 'lease_duration': 2764800, 'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'}, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "connections_path": "connections", "mount_point": "airflow", "auth_mount_point": "custom", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", "kv_engine_version": 1, } test_client = VaultBackend(**kwargs) self.assertEqual("custom", test_client.vault_client.auth_mount_point) returned_uri = test_client.get_conn_uri(conn_id="test_postgres") mock_client.secrets.kv.v1.read_secret.assert_called_once_with( mount_point='airflow', path='connections/test_postgres' ) self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri) @mock.patch.dict( 'os.environ', { 'AIRFLOW_CONN_TEST_MYSQL': 'mysql://airflow:airflow@host:5432/airflow', }, ) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_conn_uri_non_existent_key(self, mock_hvac): """ Test that if the key with connection ID is not present in Vault, _VaultClient.get_connections should return None """ mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client # Response does not contain the requested key mock_client.secrets.kv.v2.read_secret_version.side_effect = InvalidPath() kwargs = { "connections_path": "connections", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", } test_client = VaultBackend(**kwargs) self.assertIsNone(test_client.get_conn_uri(conn_id="test_mysql")) mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point='airflow', path='connections/test_mysql', version=None ) self.assertEqual([], test_client.get_connections(conn_id="test_mysql")) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_variable_value(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v2.read_secret_version.return_value = { 'request_id': '2d48a2ad-6bcb-e5b6-429d-da35fdf31f56', 'lease_id': '', 'renewable': False, 'lease_duration': 0, 'data': { 'data': {'value': 'world'}, 'metadata': { 'created_time': '2020-03-28T02:10:54.301784Z', 'deletion_time': '', 'destroyed': False, 'version': 1, }, }, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "variables_path": "variables", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", } test_client = VaultBackend(**kwargs) returned_uri = test_client.get_variable("hello") self.assertEqual('world', returned_uri) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_variable_value_engine_version_1(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v1.read_secret.return_value = { 'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b', 'lease_id': '', 'renewable': False, 'lease_duration': 2764800, 'data': {'value': 'world'}, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "variables_path": "variables", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", "kv_engine_version": 1, } test_client = VaultBackend(**kwargs) returned_uri = test_client.get_variable("hello") mock_client.secrets.kv.v1.read_secret.assert_called_once_with( mount_point='airflow', path='variables/hello' ) self.assertEqual('world', returned_uri) @mock.patch.dict( 'os.environ', { 'AIRFLOW_VAR_HELLO': 'world', }, ) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_variable_value_non_existent_key(self, mock_hvac): """ Test that if the key with connection ID is not present in Vault, _VaultClient.get_connections should return None """ mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client # Response does not contain the requested key mock_client.secrets.kv.v2.read_secret_version.side_effect = InvalidPath() kwargs = { "variables_path": "variables", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", } test_client = VaultBackend(**kwargs) self.assertIsNone(test_client.get_variable("hello")) mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point='airflow', path='variables/hello', version=None ) self.assertIsNone(test_client.get_variable("hello")) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_auth_failure_raises_error(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.is_authenticated.return_value = False kwargs = { "connections_path": "connections", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "test_wrong_token", } with self.assertRaisesRegex(VaultError, "Vault Authentication Error!"): VaultBackend(**kwargs).get_connections(conn_id='test') def test_auth_type_kubernetes_with_unreadable_jwt_raises_error(self): path = "/var/tmp/this_does_not_exist/334e918ef11987d3ef2f9553458ea09f" kwargs = { "auth_type": "kubernetes", "kubernetes_role": "default", "kubernetes_jwt_path": path, "url": "http://127.0.0.1:8200", } with self.assertRaisesRegex(FileNotFoundError, path): VaultBackend(**kwargs).get_connections(conn_id='test') @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_config_value(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v2.read_secret_version.return_value = { 'request_id': '2d48a2ad-6bcb-e5b6-429d-da35fdf31f56', 'lease_id': '', 'renewable': False, 'lease_duration': 0, 'data': { 'data': {'value': 'sqlite:////Users/airflow/airflow/airflow.db'}, 'metadata': { 'created_time': '2020-03-28T02:10:54.301784Z', 'deletion_time': '', 'destroyed': False, 'version': 1, }, }, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "configs_path": "configurations", "mount_point": "secret", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.FnL7qg0YnHZDpf4zKKuFy0UK", } test_client = VaultBackend(**kwargs) returned_uri = test_client.get_config("sql_alchemy_conn") self.assertEqual('sqlite:////Users/airflow/airflow/airflow.db', returned_uri) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_connections_path_none_value(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client kwargs = { "connections_path": None, "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.FnL7qg0YnHZDpf4zKKuFy0UK", } test_client = VaultBackend(**kwargs) self.assertIsNone(test_client.get_conn_uri(conn_id="test")) mock_hvac.Client.assert_not_called() @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_variables_path_none_value(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client kwargs = { "variables_path": None, "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.FnL7qg0YnHZDpf4zKKuFy0UK", } test_client = VaultBackend(**kwargs) self.assertIsNone(test_client.get_variable("hello")) mock_hvac.Client.assert_not_called() @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_config_path_none_value(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client kwargs = { "config_path": None, "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.FnL7qg0YnHZDpf4zKKuFy0UK", } test_client = VaultBackend(**kwargs) self.assertIsNone(test_client.get_config("test")) mock_hvac.Client.assert_not_called()
38.766938
101
0.602517
from unittest import TestCase, mock from hvac.exceptions import InvalidPath, VaultError from airflow.providers.hashicorp.secrets.vault import VaultBackend class TestVaultSecrets(TestCase): @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_conn_uri(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v2.read_secret_version.return_value = { 'request_id': '94011e25-f8dc-ec29-221b-1f9c1d9ad2ae', 'lease_id': '', 'renewable': False, 'lease_duration': 0, 'data': { 'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'}, 'metadata': { 'created_time': '2020-03-16T21:01:43.331126Z', 'deletion_time': '', 'destroyed': False, 'version': 1, }, }, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "connections_path": "connections", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", } test_client = VaultBackend(**kwargs) returned_uri = test_client.get_conn_uri(conn_id="test_postgres") self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_conn_uri_engine_version_1(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v1.read_secret.return_value = { 'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b', 'lease_id': '', 'renewable': False, 'lease_duration': 2764800, 'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'}, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "connections_path": "connections", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", "kv_engine_version": 1, } test_client = VaultBackend(**kwargs) returned_uri = test_client.get_conn_uri(conn_id="test_postgres") mock_client.secrets.kv.v1.read_secret.assert_called_once_with( mount_point='airflow', path='connections/test_postgres' ) self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_conn_uri_engine_version_1_custom_auth_mount_point(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v1.read_secret.return_value = { 'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b', 'lease_id': '', 'renewable': False, 'lease_duration': 2764800, 'data': {'conn_uri': 'postgresql://airflow:airflow@host:5432/airflow'}, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "connections_path": "connections", "mount_point": "airflow", "auth_mount_point": "custom", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", "kv_engine_version": 1, } test_client = VaultBackend(**kwargs) self.assertEqual("custom", test_client.vault_client.auth_mount_point) returned_uri = test_client.get_conn_uri(conn_id="test_postgres") mock_client.secrets.kv.v1.read_secret.assert_called_once_with( mount_point='airflow', path='connections/test_postgres' ) self.assertEqual('postgresql://airflow:airflow@host:5432/airflow', returned_uri) @mock.patch.dict( 'os.environ', { 'AIRFLOW_CONN_TEST_MYSQL': 'mysql://airflow:airflow@host:5432/airflow', }, ) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_conn_uri_non_existent_key(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v2.read_secret_version.side_effect = InvalidPath() kwargs = { "connections_path": "connections", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", } test_client = VaultBackend(**kwargs) self.assertIsNone(test_client.get_conn_uri(conn_id="test_mysql")) mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point='airflow', path='connections/test_mysql', version=None ) self.assertEqual([], test_client.get_connections(conn_id="test_mysql")) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_variable_value(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v2.read_secret_version.return_value = { 'request_id': '2d48a2ad-6bcb-e5b6-429d-da35fdf31f56', 'lease_id': '', 'renewable': False, 'lease_duration': 0, 'data': { 'data': {'value': 'world'}, 'metadata': { 'created_time': '2020-03-28T02:10:54.301784Z', 'deletion_time': '', 'destroyed': False, 'version': 1, }, }, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "variables_path": "variables", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", } test_client = VaultBackend(**kwargs) returned_uri = test_client.get_variable("hello") self.assertEqual('world', returned_uri) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_variable_value_engine_version_1(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v1.read_secret.return_value = { 'request_id': '182d0673-618c-9889-4cba-4e1f4cfe4b4b', 'lease_id': '', 'renewable': False, 'lease_duration': 2764800, 'data': {'value': 'world'}, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "variables_path": "variables", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", "kv_engine_version": 1, } test_client = VaultBackend(**kwargs) returned_uri = test_client.get_variable("hello") mock_client.secrets.kv.v1.read_secret.assert_called_once_with( mount_point='airflow', path='variables/hello' ) self.assertEqual('world', returned_uri) @mock.patch.dict( 'os.environ', { 'AIRFLOW_VAR_HELLO': 'world', }, ) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_variable_value_non_existent_key(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v2.read_secret_version.side_effect = InvalidPath() kwargs = { "variables_path": "variables", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.7AU0I51yv1Q1lxOIg1F3ZRAS", } test_client = VaultBackend(**kwargs) self.assertIsNone(test_client.get_variable("hello")) mock_client.secrets.kv.v2.read_secret_version.assert_called_once_with( mount_point='airflow', path='variables/hello', version=None ) self.assertIsNone(test_client.get_variable("hello")) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_auth_failure_raises_error(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.is_authenticated.return_value = False kwargs = { "connections_path": "connections", "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "test_wrong_token", } with self.assertRaisesRegex(VaultError, "Vault Authentication Error!"): VaultBackend(**kwargs).get_connections(conn_id='test') def test_auth_type_kubernetes_with_unreadable_jwt_raises_error(self): path = "/var/tmp/this_does_not_exist/334e918ef11987d3ef2f9553458ea09f" kwargs = { "auth_type": "kubernetes", "kubernetes_role": "default", "kubernetes_jwt_path": path, "url": "http://127.0.0.1:8200", } with self.assertRaisesRegex(FileNotFoundError, path): VaultBackend(**kwargs).get_connections(conn_id='test') @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_get_config_value(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client mock_client.secrets.kv.v2.read_secret_version.return_value = { 'request_id': '2d48a2ad-6bcb-e5b6-429d-da35fdf31f56', 'lease_id': '', 'renewable': False, 'lease_duration': 0, 'data': { 'data': {'value': 'sqlite:////Users/airflow/airflow/airflow.db'}, 'metadata': { 'created_time': '2020-03-28T02:10:54.301784Z', 'deletion_time': '', 'destroyed': False, 'version': 1, }, }, 'wrap_info': None, 'warnings': None, 'auth': None, } kwargs = { "configs_path": "configurations", "mount_point": "secret", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.FnL7qg0YnHZDpf4zKKuFy0UK", } test_client = VaultBackend(**kwargs) returned_uri = test_client.get_config("sql_alchemy_conn") self.assertEqual('sqlite:////Users/airflow/airflow/airflow.db', returned_uri) @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_connections_path_none_value(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client kwargs = { "connections_path": None, "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.FnL7qg0YnHZDpf4zKKuFy0UK", } test_client = VaultBackend(**kwargs) self.assertIsNone(test_client.get_conn_uri(conn_id="test")) mock_hvac.Client.assert_not_called() @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_variables_path_none_value(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client kwargs = { "variables_path": None, "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.FnL7qg0YnHZDpf4zKKuFy0UK", } test_client = VaultBackend(**kwargs) self.assertIsNone(test_client.get_variable("hello")) mock_hvac.Client.assert_not_called() @mock.patch("airflow.providers.hashicorp._internal_client.vault_client.hvac") def test_config_path_none_value(self, mock_hvac): mock_client = mock.MagicMock() mock_hvac.Client.return_value = mock_client kwargs = { "config_path": None, "mount_point": "airflow", "auth_type": "token", "url": "http://127.0.0.1:8200", "token": "s.FnL7qg0YnHZDpf4zKKuFy0UK", } test_client = VaultBackend(**kwargs) self.assertIsNone(test_client.get_config("test")) mock_hvac.Client.assert_not_called()
true
true
f71cfb79c7c2c9361f5f9e6f721e707abbbcb15a
16,113
py
Python
qa/rpc-tests/p2p-fullblocktest.py
mirzaei-ce/core-alisinabit
9929923df19fc9f03eb02fa056f325c9a284cfcf
[ "MIT" ]
null
null
null
qa/rpc-tests/p2p-fullblocktest.py
mirzaei-ce/core-alisinabit
9929923df19fc9f03eb02fa056f325c9a284cfcf
[ "MIT" ]
null
null
null
qa/rpc-tests/p2p-fullblocktest.py
mirzaei-ce/core-alisinabit
9929923df19fc9f03eb02fa056f325c9a284cfcf
[ "MIT" ]
null
null
null
#!/usr/bin/env python2 # # Distributed under the MIT/X11 software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # from test_framework.test_framework import ComparisonTestFramework from test_framework.util import * from test_framework.comptool import TestManager, TestInstance, RejectResult from test_framework.blocktools import * import time from test_framework.key import CECKey from test_framework.script import CScript, SignatureHash, SIGHASH_ALL, OP_TRUE, OP_FALSE class PreviousSpendableOutput(object): def __init__(self, tx = CTransaction(), n = -1): self.tx = tx self.n = n # the output we're spending ''' This reimplements tests from the alisinabitj/FullBlockTestGenerator used by the pull-tester. We use the testing framework in which we expect a particular answer from each test. ''' class FullBlockTest(ComparisonTestFramework): ''' Can either run this test as 1 node with expected answers, or two and compare them. Change the "outcome" variable from each TestInstance object to only do the comparison. ''' def __init__(self): self.num_nodes = 1 self.block_heights = {} self.coinbase_key = CECKey() self.coinbase_key.set_secretbytes(bytes("horsebattery")) self.coinbase_pubkey = self.coinbase_key.get_pubkey() self.block_time = int(time.time())+1 self.tip = None self.blocks = {} def run_test(self): test = TestManager(self, self.options.tmpdir) test.add_all_connections(self.nodes) NetworkThread().start() # Start up network handling in another thread test.run() def add_transactions_to_block(self, block, tx_list): [ tx.rehash() for tx in tx_list ] block.vtx.extend(tx_list) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() return block # Create a block on top of self.tip, and advance self.tip to point to the new block # if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend output, # and rest will go to fees. def next_block(self, number, spend=None, additional_coinbase_value=0, script=None): if self.tip == None: base_block_hash = self.genesis_hash else: base_block_hash = self.tip.sha256 # First create the coinbase height = self.block_heights[base_block_hash] + 1 coinbase = create_coinbase(height, self.coinbase_pubkey) coinbase.vout[0].nValue += additional_coinbase_value if (spend != None): coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees coinbase.rehash() block = create_block(base_block_hash, coinbase, self.block_time) if (spend != None): tx = CTransaction() tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), "", 0xffffffff)) # no signature yet # This copies the java comparison tool testing behavior: the first # txout has a garbage scriptPubKey, "to make sure we're not # pre-verifying too much" (?) tx.vout.append(CTxOut(0, CScript([random.randint(0,255), height & 255]))) if script == None: tx.vout.append(CTxOut(1, CScript([OP_TRUE]))) else: tx.vout.append(CTxOut(1, script)) # Now sign it if necessary scriptSig = "" scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey) if (scriptPubKey[0] == OP_TRUE): # looks like an anyone-can-spend scriptSig = CScript([OP_TRUE]) else: # We have to actually sign it (sighash, err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx, 0, SIGHASH_ALL) scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))]) tx.vin[0].scriptSig = scriptSig # Now add the transaction to the block block = self.add_transactions_to_block(block, [tx]) block.solve() self.tip = block self.block_heights[block.sha256] = height self.block_time += 1 assert number not in self.blocks self.blocks[number] = block return block def get_tests(self): self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 spendable_outputs = [] # save the current tip so it can be spent by a later block def save_spendable_output(): spendable_outputs.append(self.tip) # get an output that we previous marked as spendable def get_spendable_output(): return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0) # returns a test case that asserts that the current tip was accepted def accepted(): return TestInstance([[self.tip, True]]) # returns a test case that asserts that the current tip was rejected def rejected(reject = None): if reject is None: return TestInstance([[self.tip, False]]) else: return TestInstance([[self.tip, reject]]) # move the tip back to a previous block def tip(number): self.tip = self.blocks[number] # add transactions to a block produced by next_block def update_block(block_number, new_transactions): block = self.blocks[block_number] old_hash = block.sha256 self.add_transactions_to_block(block, new_transactions) block.solve() # Update the internal state just like in next_block self.tip = block self.block_heights[block.sha256] = self.block_heights[old_hash] del self.block_heights[old_hash] self.blocks[block_number] = block return block # creates a new block and advances the tip to that block block = self.next_block # Create a new block block(0) save_spendable_output() yield accepted() # Now we need that block to mature so we can spend the coinbase. test = TestInstance(sync_every_block=False) for i in range(99): block(1000 + i) test.blocks_and_transactions.append([self.tip, True]) save_spendable_output() yield test # Start by building a couple of blocks on top (which output is spent is # in parentheses): # genesis -> b1 (0) -> b2 (1) out0 = get_spendable_output() block(1, spend=out0) save_spendable_output() yield accepted() out1 = get_spendable_output() b2 = block(2, spend=out1) yield accepted() # so fork like this: # # genesis -> b1 (0) -> b2 (1) # \-> b3 (1) # # Nothing should happen at this point. We saw b2 first so it takes priority. tip(1) b3 = block(3, spend=out1) txout_b3 = PreviousSpendableOutput(b3.vtx[1], 1) yield rejected() # Now we add another block to make the alternative chain longer. # # genesis -> b1 (0) -> b2 (1) # \-> b3 (1) -> b4 (2) out2 = get_spendable_output() block(4, spend=out2) yield accepted() # ... and back to the first chain. # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b3 (1) -> b4 (2) tip(2) block(5, spend=out2) save_spendable_output() yield rejected() out3 = get_spendable_output() block(6, spend=out3) yield accepted() # Try to create a fork that double-spends # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b7 (2) -> b8 (4) # \-> b3 (1) -> b4 (2) tip(5) block(7, spend=out2) yield rejected() out4 = get_spendable_output() block(8, spend=out4) yield rejected() # Try to create a block that has too much fee # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b9 (4) # \-> b3 (1) -> b4 (2) tip(6) block(9, spend=out4, additional_coinbase_value=1) yield rejected(RejectResult(16, 'bad-cb-amount')) # Create a fork that ends in a block with too much fee (the one that causes the reorg) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b10 (3) -> b11 (4) # \-> b3 (1) -> b4 (2) tip(5) block(10, spend=out3) yield rejected() block(11, spend=out4, additional_coinbase_value=1) yield rejected(RejectResult(16, 'bad-cb-amount')) # Try again, but with a valid fork first # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b14 (5) # (b12 added last) # \-> b3 (1) -> b4 (2) tip(5) b12 = block(12, spend=out3) save_spendable_output() #yield TestInstance([[b12, False]]) b13 = block(13, spend=out4) # Deliver the block header for b12, and the block b13. # b13 should be accepted but the tip won't advance until b12 is delivered. yield TestInstance([[CBlockHeader(b12), None], [b13, False]]) save_spendable_output() out5 = get_spendable_output() # b14 is invalid, but the node won't know that until it tries to connect # Tip still can't advance because b12 is missing block(14, spend=out5, additional_coinbase_value=1) yield rejected() yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13. # Add a block with MAX_BLOCK_SIGOPS and one with one more sigop # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6) # \-> b3 (1) -> b4 (2) # Test that a block with a lot of checksigs is okay lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50 - 1)) tip(13) block(15, spend=out5, script=lots_of_checksigs) yield accepted() # Test that a block with too many checksigs is rejected out6 = get_spendable_output() too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50)) block(16, spend=out6, script=too_many_checksigs) yield rejected(RejectResult(16, 'bad-blk-sigops')) # Attempt to spend a transaction created on a different fork # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1]) # \-> b3 (1) -> b4 (2) tip(15) block(17, spend=txout_b3) yield rejected(RejectResult(16, 'bad-txns-inputs-missingorspent')) # Attempt to spend a transaction created on a different fork (on a fork this time) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) # \-> b18 (b3.vtx[1]) -> b19 (6) # \-> b3 (1) -> b4 (2) tip(13) block(18, spend=txout_b3) yield rejected() block(19, spend=out6) yield rejected() # Attempt to spend a coinbase at depth too low # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7) # \-> b3 (1) -> b4 (2) tip(15) out7 = get_spendable_output() block(20, spend=out7) yield rejected(RejectResult(16, 'bad-txns-premature-spend-of-coinbase')) # Attempt to spend a coinbase at depth too low (on a fork this time) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) # \-> b21 (6) -> b22 (5) # \-> b3 (1) -> b4 (2) tip(13) block(21, spend=out6) yield rejected() block(22, spend=out5) yield rejected() # Create a block on either side of MAX_BLOCK_SIZE and make sure its accepted/rejected # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) # \-> b24 (6) -> b25 (7) # \-> b3 (1) -> b4 (2) tip(15) b23 = block(23, spend=out6) old_hash = b23.sha256 tx = CTransaction() script_length = MAX_BLOCK_SIZE - len(b23.serialize()) - 69 script_output = CScript([chr(0)*script_length]) tx.vout.append(CTxOut(0, script_output)) tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 1))) b23 = update_block(23, [tx]) # Make sure the math above worked out to produce a max-sized block assert_equal(len(b23.serialize()), MAX_BLOCK_SIZE) yield accepted() # Make the next block one byte bigger and check that it fails tip(15) b24 = block(24, spend=out6) script_length = MAX_BLOCK_SIZE - len(b24.serialize()) - 69 script_output = CScript([chr(0)*(script_length+1)]) tx.vout = [CTxOut(0, script_output)] b24 = update_block(24, [tx]) assert_equal(len(b24.serialize()), MAX_BLOCK_SIZE+1) yield rejected(RejectResult(16, 'bad-blk-length')) b25 = block(25, spend=out7) yield rejected() # Create blocks with a coinbase input script size out of range # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) # \-> ... (6) -> ... (7) # \-> b3 (1) -> b4 (2) tip(15) b26 = block(26, spend=out6) b26.vtx[0].vin[0].scriptSig = chr(0) b26.vtx[0].rehash() # update_block causes the merkle root to get updated, even with no new # transactions, and updates the required state. b26 = update_block(26, []) yield rejected(RejectResult(16, 'bad-cb-length')) # Extend the b26 chain to make sure alisinabitd isn't accepting b26 b27 = block(27, spend=out7) yield rejected() # Now try a too-large-coinbase script tip(15) b28 = block(28, spend=out6) b28.vtx[0].vin[0].scriptSig = chr(0)*101 b28.vtx[0].rehash() b28 = update_block(28, []) yield rejected(RejectResult(16, 'bad-cb-length')) # Extend the b28 chain to make sure alisinabitd isn't accepted b28 b29 = block(29, spend=out7) # TODO: Should get a reject message back with "bad-prevblk", except # there's a bug that prevents this from being detected. Just note # failure for now, and add the reject result later. yield rejected() # b30 has a max-sized coinbase scriptSig. tip(23) b30 = block(30) b30.vtx[0].vin[0].scriptSig = chr(0)*100 b30.vtx[0].rehash() b30 = update_block(30, []) yield accepted() if __name__ == '__main__': FullBlockTest().main()
40.08209
106
0.536461
from test_framework.test_framework import ComparisonTestFramework from test_framework.util import * from test_framework.comptool import TestManager, TestInstance, RejectResult from test_framework.blocktools import * import time from test_framework.key import CECKey from test_framework.script import CScript, SignatureHash, SIGHASH_ALL, OP_TRUE, OP_FALSE class PreviousSpendableOutput(object): def __init__(self, tx = CTransaction(), n = -1): self.tx = tx self.n = n class FullBlockTest(ComparisonTestFramework): def __init__(self): self.num_nodes = 1 self.block_heights = {} self.coinbase_key = CECKey() self.coinbase_key.set_secretbytes(bytes("horsebattery")) self.coinbase_pubkey = self.coinbase_key.get_pubkey() self.block_time = int(time.time())+1 self.tip = None self.blocks = {} def run_test(self): test = TestManager(self, self.options.tmpdir) test.add_all_connections(self.nodes) NetworkThread().start() # Start up network handling in another thread test.run() def add_transactions_to_block(self, block, tx_list): [ tx.rehash() for tx in tx_list ] block.vtx.extend(tx_list) block.hashMerkleRoot = block.calc_merkle_root() block.rehash() return block # Create a block on top of self.tip, and advance self.tip to point to the new block # if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend output, # and rest will go to fees. def next_block(self, number, spend=None, additional_coinbase_value=0, script=None): if self.tip == None: base_block_hash = self.genesis_hash else: base_block_hash = self.tip.sha256 # First create the coinbase height = self.block_heights[base_block_hash] + 1 coinbase = create_coinbase(height, self.coinbase_pubkey) coinbase.vout[0].nValue += additional_coinbase_value if (spend != None): coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees coinbase.rehash() block = create_block(base_block_hash, coinbase, self.block_time) if (spend != None): tx = CTransaction() tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), "", 0xffffffff)) # no signature yet # This copies the java comparison tool testing behavior: the first # txout has a garbage scriptPubKey, "to make sure we're not # pre-verifying too much" (?) tx.vout.append(CTxOut(0, CScript([random.randint(0,255), height & 255]))) if script == None: tx.vout.append(CTxOut(1, CScript([OP_TRUE]))) else: tx.vout.append(CTxOut(1, script)) scriptSig = "" scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey) if (scriptPubKey[0] == OP_TRUE): scriptSig = CScript([OP_TRUE]) else: (sighash, err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx, 0, SIGHASH_ALL) scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))]) tx.vin[0].scriptSig = scriptSig block = self.add_transactions_to_block(block, [tx]) block.solve() self.tip = block self.block_heights[block.sha256] = height self.block_time += 1 assert number not in self.blocks self.blocks[number] = block return block def get_tests(self): self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16) self.block_heights[self.genesis_hash] = 0 spendable_outputs = [] def save_spendable_output(): spendable_outputs.append(self.tip) def get_spendable_output(): return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0) def accepted(): return TestInstance([[self.tip, True]]) def rejected(reject = None): if reject is None: return TestInstance([[self.tip, False]]) else: return TestInstance([[self.tip, reject]]) def tip(number): self.tip = self.blocks[number] def update_block(block_number, new_transactions): block = self.blocks[block_number] old_hash = block.sha256 self.add_transactions_to_block(block, new_transactions) block.solve() self.tip = block self.block_heights[block.sha256] = self.block_heights[old_hash] del self.block_heights[old_hash] self.blocks[block_number] = block return block block = self.next_block block(0) save_spendable_output() yield accepted() test = TestInstance(sync_every_block=False) for i in range(99): block(1000 + i) test.blocks_and_transactions.append([self.tip, True]) save_spendable_output() yield test out0 = get_spendable_output() block(1, spend=out0) save_spendable_output() yield accepted() out1 = get_spendable_output() b2 = block(2, spend=out1) yield accepted() tip(1) b3 = block(3, spend=out1) txout_b3 = PreviousSpendableOutput(b3.vtx[1], 1) yield rejected() out2 = get_spendable_output() block(4, spend=out2) yield accepted() tip(2) block(5, spend=out2) save_spendable_output() yield rejected() out3 = get_spendable_output() block(6, spend=out3) yield accepted() tip(5) block(7, spend=out2) yield rejected() out4 = get_spendable_output() block(8, spend=out4) yield rejected() tip(6) block(9, spend=out4, additional_coinbase_value=1) yield rejected(RejectResult(16, 'bad-cb-amount')) tip(5) block(10, spend=out3) yield rejected() block(11, spend=out4, additional_coinbase_value=1) yield rejected(RejectResult(16, 'bad-cb-amount')) tip(5) b12 = block(12, spend=out3) save_spendable_output() b13 = block(13, spend=out4) yield TestInstance([[CBlockHeader(b12), None], [b13, False]]) save_spendable_output() out5 = get_spendable_output() # b14 is invalid, but the node won't know that until it tries to connect block(14, spend=out5, additional_coinbase_value=1) yield rejected() yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13. # Add a block with MAX_BLOCK_SIGOPS and one with one more sigop # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b16 (6) # \-> b3 (1) -> b4 (2) # Test that a block with a lot of checksigs is okay lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50 - 1)) tip(13) block(15, spend=out5, script=lots_of_checksigs) yield accepted() # Test that a block with too many checksigs is rejected out6 = get_spendable_output() too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50)) block(16, spend=out6, script=too_many_checksigs) yield rejected(RejectResult(16, 'bad-blk-sigops')) # Attempt to spend a transaction created on a different fork # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b17 (b3.vtx[1]) # \-> b3 (1) -> b4 (2) tip(15) block(17, spend=txout_b3) yield rejected(RejectResult(16, 'bad-txns-inputs-missingorspent')) # Attempt to spend a transaction created on a different fork (on a fork this time) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) # \-> b18 (b3.vtx[1]) -> b19 (6) # \-> b3 (1) -> b4 (2) tip(13) block(18, spend=txout_b3) yield rejected() block(19, spend=out6) yield rejected() # Attempt to spend a coinbase at depth too low # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b20 (7) # \-> b3 (1) -> b4 (2) tip(15) out7 = get_spendable_output() block(20, spend=out7) yield rejected(RejectResult(16, 'bad-txns-premature-spend-of-coinbase')) # Attempt to spend a coinbase at depth too low (on a fork this time) # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) # \-> b21 (6) -> b22 (5) # \-> b3 (1) -> b4 (2) tip(13) block(21, spend=out6) yield rejected() block(22, spend=out5) yield rejected() # Create a block on either side of MAX_BLOCK_SIZE and make sure its accepted/rejected # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) # \-> b24 (6) -> b25 (7) # \-> b3 (1) -> b4 (2) tip(15) b23 = block(23, spend=out6) old_hash = b23.sha256 tx = CTransaction() script_length = MAX_BLOCK_SIZE - len(b23.serialize()) - 69 script_output = CScript([chr(0)*script_length]) tx.vout.append(CTxOut(0, script_output)) tx.vin.append(CTxIn(COutPoint(b23.vtx[1].sha256, 1))) b23 = update_block(23, [tx]) # Make sure the math above worked out to produce a max-sized block assert_equal(len(b23.serialize()), MAX_BLOCK_SIZE) yield accepted() # Make the next block one byte bigger and check that it fails tip(15) b24 = block(24, spend=out6) script_length = MAX_BLOCK_SIZE - len(b24.serialize()) - 69 script_output = CScript([chr(0)*(script_length+1)]) tx.vout = [CTxOut(0, script_output)] b24 = update_block(24, [tx]) assert_equal(len(b24.serialize()), MAX_BLOCK_SIZE+1) yield rejected(RejectResult(16, 'bad-blk-length')) b25 = block(25, spend=out7) yield rejected() # Create blocks with a coinbase input script size out of range # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3) # \-> b12 (3) -> b13 (4) -> b15 (5) -> b23 (6) -> b30 (7) # \-> ... (6) -> ... (7) # \-> b3 (1) -> b4 (2) tip(15) b26 = block(26, spend=out6) b26.vtx[0].vin[0].scriptSig = chr(0) b26.vtx[0].rehash() # update_block causes the merkle root to get updated, even with no new # transactions, and updates the required state. b26 = update_block(26, []) yield rejected(RejectResult(16, 'bad-cb-length')) # Extend the b26 chain to make sure alisinabitd isn't accepting b26 b27 = block(27, spend=out7) yield rejected() tip(15) b28 = block(28, spend=out6) b28.vtx[0].vin[0].scriptSig = chr(0)*101 b28.vtx[0].rehash() b28 = update_block(28, []) yield rejected(RejectResult(16, 'bad-cb-length')) b29 = block(29, spend=out7) # TODO: Should get a reject message back with "bad-prevblk", except # there's a bug that prevents this from being detected. Just note yield rejected() tip(23) b30 = block(30) b30.vtx[0].vin[0].scriptSig = chr(0)*100 b30.vtx[0].rehash() b30 = update_block(30, []) yield accepted() if __name__ == '__main__': FullBlockTest().main()
true
true
f71cfd0aee39e98c974f9c4ad5bc7792c8b07739
1,316
py
Python
python/oneflow/test/modules/test_consistent_dot.py
L-Net-1992/oneflow
4dc08d65caea36fdd137841ac95551218897e730
[ "Apache-2.0" ]
1
2022-03-14T11:17:56.000Z
2022-03-14T11:17:56.000Z
python/oneflow/test/modules/test_consistent_dot.py
L-Net-1992/oneflow
4dc08d65caea36fdd137841ac95551218897e730
[ "Apache-2.0" ]
null
null
null
python/oneflow/test/modules/test_consistent_dot.py
L-Net-1992/oneflow
4dc08d65caea36fdd137841ac95551218897e730
[ "Apache-2.0" ]
1
2021-12-15T02:14:49.000Z
2021-12-15T02:14:49.000Z
""" Copyright 2020 The OneFlow Authors. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest import oneflow as flow import oneflow.unittest from oneflow.test_utils.automated_test_util import * @autotest(n=1, check_graph=False) def do_test_dot_impl(test_case, placement, sbp): k = random(100, 1000) * 8 x = random_tensor(ndim=1, dim0=k).to_global(placement=placement, sbp=sbp) y = random_tensor(ndim=1, dim0=k).to_global(placement=placement, sbp=sbp) z = torch.dot(x, y) return z class TestDotConsistent(flow.unittest.TestCase): @globaltest def test_dot(test_case): for placement in all_placement(): for sbp in all_sbp(placement, max_dim=1): do_test_dot_impl(test_case, placement, sbp) if __name__ == "__main__": unittest.main()
31.333333
77
0.740122
import unittest import oneflow as flow import oneflow.unittest from oneflow.test_utils.automated_test_util import * @autotest(n=1, check_graph=False) def do_test_dot_impl(test_case, placement, sbp): k = random(100, 1000) * 8 x = random_tensor(ndim=1, dim0=k).to_global(placement=placement, sbp=sbp) y = random_tensor(ndim=1, dim0=k).to_global(placement=placement, sbp=sbp) z = torch.dot(x, y) return z class TestDotConsistent(flow.unittest.TestCase): @globaltest def test_dot(test_case): for placement in all_placement(): for sbp in all_sbp(placement, max_dim=1): do_test_dot_impl(test_case, placement, sbp) if __name__ == "__main__": unittest.main()
true
true
f71cfdceb8c455928ba52221223c74b392f337c7
5,558
py
Python
sympy/concrete/gosper.py
shilpiprd/sympy
556e9c61b31d0d5f101cd56b43e843fbf3bcf121
[ "BSD-3-Clause" ]
8,323
2015-01-02T15:51:43.000Z
2022-03-31T13:13:19.000Z
sympy/concrete/gosper.py
shilpiprd/sympy
556e9c61b31d0d5f101cd56b43e843fbf3bcf121
[ "BSD-3-Clause" ]
15,102
2015-01-01T01:33:17.000Z
2022-03-31T22:53:13.000Z
sympy/concrete/gosper.py
shilpiprd/sympy
556e9c61b31d0d5f101cd56b43e843fbf3bcf121
[ "BSD-3-Clause" ]
4,490
2015-01-01T17:48:07.000Z
2022-03-31T17:24:05.000Z
"""Gosper's algorithm for hypergeometric summation. """ from sympy.core import S, Dummy, symbols from sympy.core.compatibility import is_sequence from sympy.polys import Poly, parallel_poly_from_expr, factor from sympy.solvers import solve from sympy.simplify import hypersimp def gosper_normal(f, g, n, polys=True): r""" Compute the Gosper's normal form of ``f`` and ``g``. Explanation =========== Given relatively prime univariate polynomials ``f`` and ``g``, rewrite their quotient to a normal form defined as follows: .. math:: \frac{f(n)}{g(n)} = Z \cdot \frac{A(n) C(n+1)}{B(n) C(n)} where ``Z`` is an arbitrary constant and ``A``, ``B``, ``C`` are monic polynomials in ``n`` with the following properties: 1. `\gcd(A(n), B(n+h)) = 1 \forall h \in \mathbb{N}` 2. `\gcd(B(n), C(n+1)) = 1` 3. `\gcd(A(n), C(n)) = 1` This normal form, or rational factorization in other words, is a crucial step in Gosper's algorithm and in solving of difference equations. It can be also used to decide if two hypergeometric terms are similar or not. This procedure will return a tuple containing elements of this factorization in the form ``(Z*A, B, C)``. Examples ======== >>> from sympy.concrete.gosper import gosper_normal >>> from sympy.abc import n >>> gosper_normal(4*n+5, 2*(4*n+1)*(2*n+3), n, polys=False) (1/4, n + 3/2, n + 1/4) """ (p, q), opt = parallel_poly_from_expr( (f, g), n, field=True, extension=True) a, A = p.LC(), p.monic() b, B = q.LC(), q.monic() C, Z = A.one, a/b h = Dummy('h') D = Poly(n + h, n, h, domain=opt.domain) R = A.resultant(B.compose(D)) roots = set(R.ground_roots().keys()) for r in set(roots): if not r.is_Integer or r < 0: roots.remove(r) for i in sorted(roots): d = A.gcd(B.shift(+i)) A = A.quo(d) B = B.quo(d.shift(-i)) for j in range(1, i + 1): C *= d.shift(-j) A = A.mul_ground(Z) if not polys: A = A.as_expr() B = B.as_expr() C = C.as_expr() return A, B, C def gosper_term(f, n): r""" Compute Gosper's hypergeometric term for ``f``. Explanation =========== Suppose ``f`` is a hypergeometric term such that: .. math:: s_n = \sum_{k=0}^{n-1} f_k and `f_k` doesn't depend on `n`. Returns a hypergeometric term `g_n` such that `g_{n+1} - g_n = f_n`. Examples ======== >>> from sympy.concrete.gosper import gosper_term >>> from sympy.functions import factorial >>> from sympy.abc import n >>> gosper_term((4*n + 1)*factorial(n)/factorial(2*n + 1), n) (-n - 1/2)/(n + 1/4) """ r = hypersimp(f, n) if r is None: return None # 'f' is *not* a hypergeometric term p, q = r.as_numer_denom() A, B, C = gosper_normal(p, q, n) B = B.shift(-1) N = S(A.degree()) M = S(B.degree()) K = S(C.degree()) if (N != M) or (A.LC() != B.LC()): D = {K - max(N, M)} elif not N: D = {K - N + 1, S.Zero} else: D = {K - N + 1, (B.nth(N - 1) - A.nth(N - 1))/A.LC()} for d in set(D): if not d.is_Integer or d < 0: D.remove(d) if not D: return None # 'f(n)' is *not* Gosper-summable d = max(D) coeffs = symbols('c:%s' % (d + 1), cls=Dummy) domain = A.get_domain().inject(*coeffs) x = Poly(coeffs, n, domain=domain) H = A*x.shift(1) - B*x - C solution = solve(H.coeffs(), coeffs) if solution is None: return None # 'f(n)' is *not* Gosper-summable x = x.as_expr().subs(solution) for coeff in coeffs: if coeff not in solution: x = x.subs(coeff, 0) if x.is_zero: return None # 'f(n)' is *not* Gosper-summable else: return B.as_expr()*x/C.as_expr() def gosper_sum(f, k): r""" Gosper's hypergeometric summation algorithm. Explanation =========== Given a hypergeometric term ``f`` such that: .. math :: s_n = \sum_{k=0}^{n-1} f_k and `f(n)` doesn't depend on `n`, returns `g_{n} - g(0)` where `g_{n+1} - g_n = f_n`, or ``None`` if `s_n` cannot be expressed in closed form as a sum of hypergeometric terms. Examples ======== >>> from sympy.concrete.gosper import gosper_sum >>> from sympy.functions import factorial >>> from sympy.abc import n, k >>> f = (4*k + 1)*factorial(k)/factorial(2*k + 1) >>> gosper_sum(f, (k, 0, n)) (-factorial(n) + 2*factorial(2*n + 1))/factorial(2*n + 1) >>> _.subs(n, 2) == sum(f.subs(k, i) for i in [0, 1, 2]) True >>> gosper_sum(f, (k, 3, n)) (-60*factorial(n) + factorial(2*n + 1))/(60*factorial(2*n + 1)) >>> _.subs(n, 5) == sum(f.subs(k, i) for i in [3, 4, 5]) True References ========== .. [1] Marko Petkovsek, Herbert S. Wilf, Doron Zeilberger, A = B, AK Peters, Ltd., Wellesley, MA, USA, 1997, pp. 73--100 """ indefinite = False if is_sequence(k): k, a, b = k else: indefinite = True g = gosper_term(f, k) if g is None: return None if indefinite: result = f*g else: result = (f*(g + 1)).subs(k, b) - (f*g).subs(k, a) if result is S.NaN: try: result = (f*(g + 1)).limit(k, b) - (f*g).limit(k, a) except NotImplementedError: result = None return factor(result)
24.377193
69
0.536704
from sympy.core import S, Dummy, symbols from sympy.core.compatibility import is_sequence from sympy.polys import Poly, parallel_poly_from_expr, factor from sympy.solvers import solve from sympy.simplify import hypersimp def gosper_normal(f, g, n, polys=True): (p, q), opt = parallel_poly_from_expr( (f, g), n, field=True, extension=True) a, A = p.LC(), p.monic() b, B = q.LC(), q.monic() C, Z = A.one, a/b h = Dummy('h') D = Poly(n + h, n, h, domain=opt.domain) R = A.resultant(B.compose(D)) roots = set(R.ground_roots().keys()) for r in set(roots): if not r.is_Integer or r < 0: roots.remove(r) for i in sorted(roots): d = A.gcd(B.shift(+i)) A = A.quo(d) B = B.quo(d.shift(-i)) for j in range(1, i + 1): C *= d.shift(-j) A = A.mul_ground(Z) if not polys: A = A.as_expr() B = B.as_expr() C = C.as_expr() return A, B, C def gosper_term(f, n): r = hypersimp(f, n) if r is None: return None p, q = r.as_numer_denom() A, B, C = gosper_normal(p, q, n) B = B.shift(-1) N = S(A.degree()) M = S(B.degree()) K = S(C.degree()) if (N != M) or (A.LC() != B.LC()): D = {K - max(N, M)} elif not N: D = {K - N + 1, S.Zero} else: D = {K - N + 1, (B.nth(N - 1) - A.nth(N - 1))/A.LC()} for d in set(D): if not d.is_Integer or d < 0: D.remove(d) if not D: return None d = max(D) coeffs = symbols('c:%s' % (d + 1), cls=Dummy) domain = A.get_domain().inject(*coeffs) x = Poly(coeffs, n, domain=domain) H = A*x.shift(1) - B*x - C solution = solve(H.coeffs(), coeffs) if solution is None: return None x = x.as_expr().subs(solution) for coeff in coeffs: if coeff not in solution: x = x.subs(coeff, 0) if x.is_zero: return None else: return B.as_expr()*x/C.as_expr() def gosper_sum(f, k): indefinite = False if is_sequence(k): k, a, b = k else: indefinite = True g = gosper_term(f, k) if g is None: return None if indefinite: result = f*g else: result = (f*(g + 1)).subs(k, b) - (f*g).subs(k, a) if result is S.NaN: try: result = (f*(g + 1)).limit(k, b) - (f*g).limit(k, a) except NotImplementedError: result = None return factor(result)
true
true
f71cfee5730a3abc93dbfc1a9dac679b29cf53c2
17,212
py
Python
tests/test_authentication.py
crawlersick/mysql-connector-python
9a224b96250ba81f2bcc279496befcff309ae88a
[ "BSD-3-Clause" ]
1
2021-08-04T21:37:23.000Z
2021-08-04T21:37:23.000Z
tests/test_authentication.py
maximmasiutin/mysql-connector-python
9d5e6f532a0342795f380ee1cfeeb93adbe333a0
[ "BSD-3-Clause" ]
null
null
null
tests/test_authentication.py
maximmasiutin/mysql-connector-python
9d5e6f532a0342795f380ee1cfeeb93adbe333a0
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # Copyright (c) 2014, 2020, Oracle and/or its affiliates. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License, version 2.0, as # published by the Free Software Foundation. # # This program is also distributed with certain software (including # but not limited to OpenSSL) that is licensed under separate terms, # as designated in a particular file or component or in included license # documentation. The authors of MySQL hereby grant you an # additional permission to link the program and your derivative works # with the separately licensed software that they have included with # MySQL. # # Without limiting anything contained in the foregoing, this file, # which is part of MySQL Connector/Python, is also subject to the # Universal FOSS Exception, version 1.0, a copy of which can be found at # http://oss.oracle.com/licenses/universal-foss-exception. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License, version 2.0, for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA """Test module for authentication """ import inspect import sys import mysql.connector from mysql.connector import authentication from mysql.connector.errors import InterfaceError import tests _STANDARD_PLUGINS = ( 'mysql_native_password', 'mysql_clear_password', 'sha256_password', ) class AuthenticationModuleTests(tests.MySQLConnectorTests): """Tests globals and functions of the authentication module""" def test_get_auth_plugin(self): self.assertRaises(mysql.connector.NotSupportedError, authentication.get_auth_plugin, 'spam') self.assertRaises(mysql.connector.NotSupportedError, authentication.get_auth_plugin, '') # Test using standard plugins plugin_classes = {} for name, obj in inspect.getmembers(authentication): if inspect.isclass(obj) and hasattr(obj, 'plugin_name'): if obj.plugin_name: plugin_classes[obj.plugin_name] = obj for plugin_name in _STANDARD_PLUGINS: self.assertEqual(plugin_classes[plugin_name], authentication.get_auth_plugin(plugin_name), "Failed getting class for {0}".format(plugin_name)) class BaseAuthPluginTests(tests.MySQLConnectorTests): """Tests authentication.BaseAuthPlugin""" def test_class(self): self.assertEqual('', authentication.BaseAuthPlugin.plugin_name) self.assertEqual(False, authentication.BaseAuthPlugin.requires_ssl) def test___init__(self): base = authentication.BaseAuthPlugin('ham') self.assertEqual('ham', base._auth_data) self.assertEqual(None, base._username) self.assertEqual(None, base._password) self.assertEqual(None, base._database) self.assertEqual(False, base._ssl_enabled) base = authentication.BaseAuthPlugin( 'spam', username='ham', password='secret', database='test', ssl_enabled=True) self.assertEqual('spam', base._auth_data) self.assertEqual('ham', base._username) self.assertEqual('secret', base._password) self.assertEqual('test', base._database) self.assertEqual(True, base._ssl_enabled) def test_prepare_password(self): base = authentication.BaseAuthPlugin('ham') self.assertRaises(NotImplementedError, base.prepare_password) def test_auth_response(self): base = authentication.BaseAuthPlugin('ham') self.assertRaises(NotImplementedError, base.auth_response) base.requires_ssl = True self.assertRaises(mysql.connector.InterfaceError, base.auth_response) class MySQLNativePasswordAuthPluginTests(tests.MySQLConnectorTests): """Tests authentication.MySQLNativePasswordAuthPlugin""" def setUp(self): self.plugin_class = authentication.MySQLNativePasswordAuthPlugin def test_class(self): self.assertEqual('mysql_native_password', self.plugin_class.plugin_name) self.assertEqual(False, self.plugin_class.requires_ssl) def test_prepare_password(self): auth_plugin = self.plugin_class(None, password='spam') self.assertRaises(mysql.connector.InterfaceError, auth_plugin.prepare_password) auth_plugin = self.plugin_class(123456, password='spam') # too long self.assertRaises(mysql.connector.InterfaceError, auth_plugin.prepare_password) empty = b'' auth_data = ( b'\x2d\x3e\x33\x25\x5b\x7d\x25\x3c\x40\x6b' b'\x7b\x47\x30\x5b\x57\x25\x51\x48\x55\x53' ) auth_response = ( b'\x73\xb8\xf0\x4b\x3a\xa5\x7c\x46\xb9\x84' b'\x90\x50\xab\xc0\x3a\x0f\x8f\xad\x51\xa3' ) auth_plugin = self.plugin_class('\x3f'*20, password=None) self.assertEqual(empty, auth_plugin.prepare_password()) auth_plugin = self.plugin_class(auth_data, password='spam') self.assertEqual(auth_response, auth_plugin.prepare_password()) self.assertEqual(auth_response, auth_plugin.auth_response()) class MySQLClearPasswordAuthPluginTests(tests.MySQLConnectorTests): """Tests authentication.MySQLClearPasswordAuthPlugin""" def setUp(self): self.plugin_class = authentication.MySQLClearPasswordAuthPlugin def test_class(self): self.assertEqual('mysql_clear_password', self.plugin_class.plugin_name) self.assertEqual(True, self.plugin_class.requires_ssl) def test_prepare_password(self): exp = b'spam\x00' auth_plugin = self.plugin_class(None, password='spam', ssl_enabled=True) self.assertEqual(exp, auth_plugin.prepare_password()) self.assertEqual(exp, auth_plugin.auth_response()) class MySQLSHA256PasswordAuthPluginTests(tests.MySQLConnectorTests): """Tests authentication.MySQLSHA256PasswordAuthPlugin""" def setUp(self): self.plugin_class = authentication.MySQLSHA256PasswordAuthPlugin def test_class(self): self.assertEqual('sha256_password', self.plugin_class.plugin_name) self.assertEqual(True, self.plugin_class.requires_ssl) def test_prepare_password(self): exp = b'spam\x00' auth_plugin = self.plugin_class(None, password='spam', ssl_enabled=True) self.assertEqual(exp, auth_plugin.prepare_password()) self.assertEqual(exp, auth_plugin.auth_response()) class MySQLLdapSaslPasswordAuthPluginTests(tests.MySQLConnectorTests): """Tests authentication.MySQLLdapSaslPasswordAuthPlugin""" def setUp(self): self.plugin_class = authentication.MySQLLdapSaslPasswordAuthPlugin def test_class(self): self.assertEqual("authentication_ldap_sasl_client", self.plugin_class.plugin_name) self.assertEqual(False, self.plugin_class.requires_ssl) def test_auth_response(self): # Test unsupported mechanism error message auth_data = b'UNKOWN-METHOD' auth_plugin = self.plugin_class(auth_data, username="user", password="spam") with self.assertRaises(InterfaceError) as context: auth_plugin.auth_response() self.assertIn("sasl authentication method", context.exception.msg, "not the expected error {}".format(context.exception.msg)) self.assertIn("is not supported", context.exception.msg, "not the expected error {}".format(context.exception.msg)) with self.assertRaises(NotImplementedError) as context: auth_plugin.prepare_password() # Test SCRAM-SHA-1 mechanism is accepted auth_data = b'SCRAM-SHA-1' auth_plugin = self.plugin_class(auth_data, username="", password="") # Verify the format of the first message from client. exp = b'n,a=,n=,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) auth_plugin = self.plugin_class(auth_data, username="user", password="spam") # Verify the length of the client's nonce in r= cnonce = client_first_nsg[(len(b'n,a=,n=,r=')):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce))) # Verify the format of the first message from client. exp = b'n,a=user,n=user,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) # Verify the length of the client's nonce in r= cnonce = client_first_nsg[(len(exp)):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected cnonce legth {}, response {}" "".format(len(cnonce), client_first_nsg)) # Verify that a user name that requires character mapping is mapped auth_plugin = self.plugin_class(auth_data, username=u"u\u1680ser", password="spam") exp = b'n,a=u ser,n=u ser,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) # Verify the length of the client's nonce in r= cnonce = client_first_nsg[(len(exp)):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce))) bad_responses = [None, "", "v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123] for bad_res in bad_responses: # verify an error is shown if server response is not as expected. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue(bad_res) self.assertIn("Unexpected server message", context.exception.msg, "not the expected: {}".format(context.exception.msg)) # verify an error is shown if server response is not well formated. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue( bytearray("r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,w54".encode())) self.assertIn("Incomplete reponse", context.exception.msg, "not the expected error {}".format(context.exception.msg)) # verify an error is shown if server does not authenticate response. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue( bytearray("r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,i=40".encode())) self.assertIn("Unable to authenticate resp", context.exception.msg, "not the expected error {}".format(context.exception.msg)) bad_proofs = [None, "", b"5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123] for bad_proof in bad_proofs: # verify an error is shown if server proof is not well formated. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_finalize(bad_proof) self.assertIn("proof is not well formated.", context.exception.msg, "not the expected: {}".format(context.exception.msg)) # verify an error is shown it the server can not prove it self. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_finalize( bytearray(b"v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=")) self.assertIn("Unable to proof server identity", context.exception.msg, "not the expected error {}".format(context.exception.msg)) def test_auth_response256(self): # Test unsupported mechanism error message auth_data = b'UNKOWN-METHOD' auth_plugin = self.plugin_class(auth_data, username="user", password="spam") with self.assertRaises(InterfaceError) as context: auth_plugin.auth_response() self.assertIn('sasl authentication method "UNKOWN-METHOD"', context.exception.msg, "not the expected error {}" "".format(context.exception.msg)) self.assertIn("is not supported", context.exception.msg, "not the expected error {}".format(context.exception.msg)) with self.assertRaises(NotImplementedError) as context: auth_plugin.prepare_password() # Test SCRAM-SHA-256 mechanism is accepted auth_data = b'SCRAM-SHA-256' auth_plugin = self.plugin_class(auth_data, username="", password="") # Verify the format of the first message from client. exp = b'n,a=,n=,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) auth_plugin = self.plugin_class(auth_data, username="user", password="spam") # Verify the length of the client's nonce in r= cnonce = client_first_nsg[(len(b'n,a=,n=,r=')):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce))) # Verify the format of the first message from client. exp = b'n,a=user,n=user,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) # Verify the length of the client's nonce in r= cnonce = client_first_nsg[(len(exp)):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected cnonce legth {}, response {}" "".format(len(cnonce), client_first_nsg)) # Verify that a user name that requires character mapping is mapped auth_plugin = self.plugin_class(auth_data, username=u"u\u1680ser", password="spam") exp = b'n,a=u ser,n=u ser,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) # Verify the length of the client's nonce in r= cnonce = client_first_nsg[(len(exp)):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce))) bad_responses = [None, "", "v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123] for bad_res in bad_responses: # verify an error is shown if server response is not as expected. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue(bad_res) self.assertIn("Unexpected server message", context.exception.msg, "not the expected: {}".format(context.exception.msg)) # verify an error is shown if server response is not well formated. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue( bytearray(b"r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,w54")) self.assertIn("Incomplete reponse", context.exception.msg, "not the expected error {}".format(context.exception.msg)) # verify an error is shown if server does not authenticate response. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue( bytearray(b"r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,i=40")) self.assertIn("Unable to authenticate resp", context.exception.msg, "not the expected error {}".format(context.exception.msg)) bad_proofs = [None, "", b"5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123] for bad_proof in bad_proofs: # verify an error is shown if server proof is not well formated. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_finalize(bad_proof) self.assertIn("proof is not well formated.", context.exception.msg, "not the expected: {}".format(context.exception.msg)) # verify an error is shown it the server can not prove it self. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_finalize( bytearray(b"v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=")) self.assertIn("Unable to proof server identity", context.exception.msg, "not the expected error {}".format(context.exception.msg))
44.246787
80
0.6528
import inspect import sys import mysql.connector from mysql.connector import authentication from mysql.connector.errors import InterfaceError import tests _STANDARD_PLUGINS = ( 'mysql_native_password', 'mysql_clear_password', 'sha256_password', ) class AuthenticationModuleTests(tests.MySQLConnectorTests): def test_get_auth_plugin(self): self.assertRaises(mysql.connector.NotSupportedError, authentication.get_auth_plugin, 'spam') self.assertRaises(mysql.connector.NotSupportedError, authentication.get_auth_plugin, '') plugin_classes = {} for name, obj in inspect.getmembers(authentication): if inspect.isclass(obj) and hasattr(obj, 'plugin_name'): if obj.plugin_name: plugin_classes[obj.plugin_name] = obj for plugin_name in _STANDARD_PLUGINS: self.assertEqual(plugin_classes[plugin_name], authentication.get_auth_plugin(plugin_name), "Failed getting class for {0}".format(plugin_name)) class BaseAuthPluginTests(tests.MySQLConnectorTests): def test_class(self): self.assertEqual('', authentication.BaseAuthPlugin.plugin_name) self.assertEqual(False, authentication.BaseAuthPlugin.requires_ssl) def test___init__(self): base = authentication.BaseAuthPlugin('ham') self.assertEqual('ham', base._auth_data) self.assertEqual(None, base._username) self.assertEqual(None, base._password) self.assertEqual(None, base._database) self.assertEqual(False, base._ssl_enabled) base = authentication.BaseAuthPlugin( 'spam', username='ham', password='secret', database='test', ssl_enabled=True) self.assertEqual('spam', base._auth_data) self.assertEqual('ham', base._username) self.assertEqual('secret', base._password) self.assertEqual('test', base._database) self.assertEqual(True, base._ssl_enabled) def test_prepare_password(self): base = authentication.BaseAuthPlugin('ham') self.assertRaises(NotImplementedError, base.prepare_password) def test_auth_response(self): base = authentication.BaseAuthPlugin('ham') self.assertRaises(NotImplementedError, base.auth_response) base.requires_ssl = True self.assertRaises(mysql.connector.InterfaceError, base.auth_response) class MySQLNativePasswordAuthPluginTests(tests.MySQLConnectorTests): def setUp(self): self.plugin_class = authentication.MySQLNativePasswordAuthPlugin def test_class(self): self.assertEqual('mysql_native_password', self.plugin_class.plugin_name) self.assertEqual(False, self.plugin_class.requires_ssl) def test_prepare_password(self): auth_plugin = self.plugin_class(None, password='spam') self.assertRaises(mysql.connector.InterfaceError, auth_plugin.prepare_password) auth_plugin = self.plugin_class(123456, password='spam') self.assertRaises(mysql.connector.InterfaceError, auth_plugin.prepare_password) empty = b'' auth_data = ( b'\x2d\x3e\x33\x25\x5b\x7d\x25\x3c\x40\x6b' b'\x7b\x47\x30\x5b\x57\x25\x51\x48\x55\x53' ) auth_response = ( b'\x73\xb8\xf0\x4b\x3a\xa5\x7c\x46\xb9\x84' b'\x90\x50\xab\xc0\x3a\x0f\x8f\xad\x51\xa3' ) auth_plugin = self.plugin_class('\x3f'*20, password=None) self.assertEqual(empty, auth_plugin.prepare_password()) auth_plugin = self.plugin_class(auth_data, password='spam') self.assertEqual(auth_response, auth_plugin.prepare_password()) self.assertEqual(auth_response, auth_plugin.auth_response()) class MySQLClearPasswordAuthPluginTests(tests.MySQLConnectorTests): def setUp(self): self.plugin_class = authentication.MySQLClearPasswordAuthPlugin def test_class(self): self.assertEqual('mysql_clear_password', self.plugin_class.plugin_name) self.assertEqual(True, self.plugin_class.requires_ssl) def test_prepare_password(self): exp = b'spam\x00' auth_plugin = self.plugin_class(None, password='spam', ssl_enabled=True) self.assertEqual(exp, auth_plugin.prepare_password()) self.assertEqual(exp, auth_plugin.auth_response()) class MySQLSHA256PasswordAuthPluginTests(tests.MySQLConnectorTests): def setUp(self): self.plugin_class = authentication.MySQLSHA256PasswordAuthPlugin def test_class(self): self.assertEqual('sha256_password', self.plugin_class.plugin_name) self.assertEqual(True, self.plugin_class.requires_ssl) def test_prepare_password(self): exp = b'spam\x00' auth_plugin = self.plugin_class(None, password='spam', ssl_enabled=True) self.assertEqual(exp, auth_plugin.prepare_password()) self.assertEqual(exp, auth_plugin.auth_response()) class MySQLLdapSaslPasswordAuthPluginTests(tests.MySQLConnectorTests): def setUp(self): self.plugin_class = authentication.MySQLLdapSaslPasswordAuthPlugin def test_class(self): self.assertEqual("authentication_ldap_sasl_client", self.plugin_class.plugin_name) self.assertEqual(False, self.plugin_class.requires_ssl) def test_auth_response(self): auth_data = b'UNKOWN-METHOD' auth_plugin = self.plugin_class(auth_data, username="user", password="spam") with self.assertRaises(InterfaceError) as context: auth_plugin.auth_response() self.assertIn("sasl authentication method", context.exception.msg, "not the expected error {}".format(context.exception.msg)) self.assertIn("is not supported", context.exception.msg, "not the expected error {}".format(context.exception.msg)) with self.assertRaises(NotImplementedError) as context: auth_plugin.prepare_password() auth_data = b'SCRAM-SHA-1' auth_plugin = self.plugin_class(auth_data, username="", password="") exp = b'n,a=,n=,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) auth_plugin = self.plugin_class(auth_data, username="user", password="spam") cnonce = client_first_nsg[(len(b'n,a=,n=,r=')):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce))) # Verify the format of the first message from client. exp = b'n,a=user,n=user,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) # Verify the length of the client's nonce in r= cnonce = client_first_nsg[(len(exp)):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected cnonce legth {}, response {}" "".format(len(cnonce), client_first_nsg)) auth_plugin = self.plugin_class(auth_data, username=u"u\u1680ser", password="spam") exp = b'n,a=u ser,n=u ser,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) cnonce = client_first_nsg[(len(exp)):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce))) bad_responses = [None, "", "v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123] for bad_res in bad_responses: # verify an error is shown if server response is not as expected. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue(bad_res) self.assertIn("Unexpected server message", context.exception.msg, "not the expected: {}".format(context.exception.msg)) # verify an error is shown if server response is not well formated. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue( bytearray("r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,w54".encode())) self.assertIn("Incomplete reponse", context.exception.msg, "not the expected error {}".format(context.exception.msg)) # verify an error is shown if server does not authenticate response. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue( bytearray("r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,i=40".encode())) self.assertIn("Unable to authenticate resp", context.exception.msg, "not the expected error {}".format(context.exception.msg)) bad_proofs = [None, "", b"5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123] for bad_proof in bad_proofs: # verify an error is shown if server proof is not well formated. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_finalize(bad_proof) self.assertIn("proof is not well formated.", context.exception.msg, "not the expected: {}".format(context.exception.msg)) # verify an error is shown it the server can not prove it self. with self.assertRaises(InterfaceError) as context: auth_plugin.auth_finalize( bytearray(b"v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=")) self.assertIn("Unable to proof server identity", context.exception.msg, "not the expected error {}".format(context.exception.msg)) def test_auth_response256(self): # Test unsupported mechanism error message auth_data = b'UNKOWN-METHOD' auth_plugin = self.plugin_class(auth_data, username="user", password="spam") with self.assertRaises(InterfaceError) as context: auth_plugin.auth_response() self.assertIn('sasl authentication method "UNKOWN-METHOD"', context.exception.msg, "not the expected error {}" "".format(context.exception.msg)) self.assertIn("is not supported", context.exception.msg, "not the expected error {}".format(context.exception.msg)) with self.assertRaises(NotImplementedError) as context: auth_plugin.prepare_password() # Test SCRAM-SHA-256 mechanism is accepted auth_data = b'SCRAM-SHA-256' auth_plugin = self.plugin_class(auth_data, username="", password="") # Verify the format of the first message from client. exp = b'n,a=,n=,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) auth_plugin = self.plugin_class(auth_data, username="user", password="spam") # Verify the length of the client's nonce in r= cnonce = client_first_nsg[(len(b'n,a=,n=,r=')):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce))) exp = b'n,a=user,n=user,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) cnonce = client_first_nsg[(len(exp)):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected cnonce legth {}, response {}" "".format(len(cnonce), client_first_nsg)) # Verify that a user name that requires character mapping is mapped auth_plugin = self.plugin_class(auth_data, username=u"u\u1680ser", password="spam") exp = b'n,a=u ser,n=u ser,r=' client_first_nsg = auth_plugin.auth_response() self.assertTrue(client_first_nsg.startswith(exp), "got header: {}".format(auth_plugin.auth_response())) # Verify the length of the client's nonce in r= cnonce = client_first_nsg[(len(exp)):] r_len = len(cnonce) self.assertEqual(32, r_len, "Unexpected legth {}".format(len(cnonce))) bad_responses = [None, "", "v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123] for bad_res in bad_responses: with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue(bad_res) self.assertIn("Unexpected server message", context.exception.msg, "not the expected: {}".format(context.exception.msg)) with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue( bytearray(b"r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,w54")) self.assertIn("Incomplete reponse", context.exception.msg, "not the expected error {}".format(context.exception.msg)) with self.assertRaises(InterfaceError) as context: auth_plugin.auth_continue( bytearray(b"r=/ZT33fXoR/BZT,s=IApa7ZwqQ/ZT,i=40")) self.assertIn("Unable to authenticate resp", context.exception.msg, "not the expected error {}".format(context.exception.msg)) bad_proofs = [None, "", b"5H6b+IApa7ZwqQ/ZT33fXoR/BTM=", b"", 123] for bad_proof in bad_proofs: with self.assertRaises(InterfaceError) as context: auth_plugin.auth_finalize(bad_proof) self.assertIn("proof is not well formated.", context.exception.msg, "not the expected: {}".format(context.exception.msg)) with self.assertRaises(InterfaceError) as context: auth_plugin.auth_finalize( bytearray(b"v=5H6b+IApa7ZwqQ/ZT33fXoR/BTM=")) self.assertIn("Unable to proof server identity", context.exception.msg, "not the expected error {}".format(context.exception.msg))
true
true
f71cffb860b58eeb76624d1f1b619b2ed27a057c
1,477
py
Python
main.py
Chise1/bilibili-live-tools
23cef9ae6a42aeb89b64fc1558c4a94b1075444b
[ "MIT" ]
1
2020-07-02T08:39:20.000Z
2020-07-02T08:39:20.000Z
main.py
Chise1/bilibili-live-tools
23cef9ae6a42aeb89b64fc1558c4a94b1075444b
[ "MIT" ]
null
null
null
main.py
Chise1/bilibili-live-tools
23cef9ae6a42aeb89b64fc1558c4a94b1075444b
[ "MIT" ]
null
null
null
# !/usr/bin/python # -*- coding:utf-8 -*- import subprocess, time, sys from subprocess import Popen from typing import Optional TIME = 3600 CMD = "run.py" class Auto_Run(): def __init__(self, sleep_time, cmd): if sys.version_info < (3, 6): print("only support python 3.6 and later version") sys.exit(1111) self.sleep_time = sleep_time self.cmd = cmd self.ext = (cmd[-3:]).lower() self.p:Optional[Popen[str]]= None self.run() try: while 1: time.sleep(sleep_time * 20) self.poll = self.p.poll() if self.p.poll() is None: print("restarting......") self.p.kill() self.run() else: print("starting......") self.run() except KeyboardInterrupt as e: print("exit???") def run(self): if self.ext == ".py": print('start OK!') # use now running python version, think multiple python installed and now use python3.6 to run python_path = sys.executable print("use the absolute path of python to run", python_path) self.p = subprocess.Popen([python_path, '%s' % self.cmd], stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr, shell=False) else: pass app = Auto_Run(TIME, CMD)
32.108696
106
0.510494
import subprocess, time, sys from subprocess import Popen from typing import Optional TIME = 3600 CMD = "run.py" class Auto_Run(): def __init__(self, sleep_time, cmd): if sys.version_info < (3, 6): print("only support python 3.6 and later version") sys.exit(1111) self.sleep_time = sleep_time self.cmd = cmd self.ext = (cmd[-3:]).lower() self.p:Optional[Popen[str]]= None self.run() try: while 1: time.sleep(sleep_time * 20) self.poll = self.p.poll() if self.p.poll() is None: print("restarting......") self.p.kill() self.run() else: print("starting......") self.run() except KeyboardInterrupt as e: print("exit???") def run(self): if self.ext == ".py": print('start OK!') python_path = sys.executable print("use the absolute path of python to run", python_path) self.p = subprocess.Popen([python_path, '%s' % self.cmd], stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr, shell=False) else: pass app = Auto_Run(TIME, CMD)
true
true
f71d0082d003aab3724f4c3350ac16a31aff1d21
11,526
py
Python
ET/alfred/gen/render_trajs_from_et.py
amazon-research/multimodal-neuralslam
530558fdfa31c6e048fc3e7b253f681f6786b04d
[ "MIT-0" ]
4
2022-01-27T01:39:09.000Z
2022-01-30T14:45:15.000Z
ET/alfred/gen/render_trajs_from_et.py
amazon-research/multimodal-neuralslam
530558fdfa31c6e048fc3e7b253f681f6786b04d
[ "MIT-0" ]
1
2022-02-05T14:06:25.000Z
2022-02-05T14:06:25.000Z
ET/alfred/gen/render_trajs_from_et.py
amazon-research/multimodal-neuralslam
530558fdfa31c6e048fc3e7b253f681f6786b04d
[ "MIT-0" ]
1
2022-02-04T07:22:28.000Z
2022-02-04T07:22:28.000Z
import os import sys import json import numpy as np import threading import time import copy import random import glob import shutil import pickle from termcolor import colored from sacred import Ingredient, Experiment from alfred.env.thor_env import ThorEnv from alfred.gen import constants from alfred.gen.utils import augment_util, video_util from alfred.utils import helper_util, model_util args_ingredient = Ingredient('args') ex = Experiment('render_trajs', ingredients=[args_ingredient]) @args_ingredient.config def cfg_args(): # dataset folder to dump frames to data_output = 'generated_2.1.0_exp_from_et_v2' # dataset folder to load jsons from data_input = 'json_2.1.0' # smooth naviagation (like the original data) smooth_nav = False # time delays (like the original data) time_delays = True # whether to shuffle the order of augmenting shuffle = False # number of threads to start in parallel num_threads = 16 # frame size to render render_size = 300 # X server number x_display = '0' # render and save RGB images render_frames = True # render and save depth images render_depth = False # render and save class segmentation masks render_class_masks = False # render and save instance segmentation masks render_instance_masks = False # save object bounding boxes save_detections = False # partitions to render data for partitions = ('tests_unseen',) # whether to overwrite data folder if it already exists overwrite = False def setup_task(env, traj_data, args): # scene setup scene_num = traj_data['scene']['scene_num'] object_poses = traj_data['scene']['object_poses'] object_toggles = traj_data['scene']['object_toggles'] dirty_and_empty = traj_data['scene']['dirty_and_empty'] # reset scene_name = 'FloorPlan%d' % scene_num env.reset(scene_name, silent=True) env.restore_scene(object_poses, object_toggles, dirty_and_empty) assert traj_data['scene']['init_action']['horizon'] == 30 env.step(dict(traj_data['scene']['init_action'])) print('Task: {traj_data["task_id"]}' + ' {traj_data["task_type"]}' if 'task_type' in traj_data else '') if 'task_type' in traj_data: # setup task env.set_task(traj_data, reward_type='dense') augment_util.check_image(env.last_event.frame) def batch(iterable, n=1): l = len(iterable) for ndx in range(0, l, n): yield iterable[ndx:min(ndx + n, l)] def augment_traj(env, json_file, args, video_saver, render_settings, test_mode): # load json data with open(json_file) as f: traj_data = json.load(f) traj_data['images'] = list() root_dir_to, rendered_images_dir, save_settings = augment_util.prepare_for_traj( json_file, args) base_path = '/home/ubuntu/bak/ET/data/preds/et_human_synth' if test_mode: path = os.path.join(base_path, ':'.join(json_file.split('/')[-3:-1] + ['0']) + '.pkl') ##### else: path = os.path.join(base_path, ':'.join(json_file.split('/')[-3:-1]) + '.pkl') with open(path, 'rb') as f: _, actions_ = pickle.load(f) look_up = {'action': 'LookUp', 'forceAction': True} look_down = {'action': 'LookDown', 'forceAction': True} move = {'action': 'MoveAhead', 'forceAction': True} left = {'action': 'RotateLeft', 'forceAction': True} right = {'action': 'RotateRight', 'forceAction': True} actions = ['RotateRight_90'] * 4 for acts in batch(actions_, 8): actions.extend(acts) actions.extend(['RotateRight_90'] * 4) actions = actions[:225] print(len(actions), 'length') success = True for save_idx, init_action in enumerate([look_up, look_down, None]): setup_task(env, traj_data, args) rewards, img_count = [], 0 if init_action is not None: if not isinstance(init_action, list): init_action = [init_action] for act in init_action: _, _ = augment_util.env_navigate( act, env, save_settings, root_dir_to, render_settings, args.smooth_nav, img_count, save=False) for act in actions: if act.startswith('Move'): cmd = move if act.startswith('RotateLeft'): cmd = left if act.startswith('RotateRight'): cmd = right img_count_ = img_count # Save the image before the action execution. event, img_count = augment_util.env_navigate( cmd, env, save_settings, root_dir_to, render_settings, args.smooth_nav, img_count, save_idx=save_idx) if event is None: print(path, 'event is None!!!!!') success = False break if not event.metadata['lastActionSuccess']: print(colored("Replay Failed: %s" % ( env.last_event.metadata['errorMessage']), 'red')) print(path, 'event lastActionSuccess is False!!!!!') success = False break # save 1 frame in the end and increase the counter by 10 # (to be alligned with the train data) augment_util.save_image(env.last_event, root_dir_to, save_settings, img_count, None) img_count += 10 return success def start_worker(worker_id, traj_list, args, lock, processed_files_path, test_mode): ''' worker loop ''' if isinstance(args.x_display, (list, tuple)): x_display = args.x_display[worker_id % len(args.x_display)] else: x_display = args.x_display env = ThorEnv(x_display=x_display, player_screen_width=args.render_size, player_screen_height=args.render_size) video_saver = video_util.VideoSaver() render_settings = { 'renderImage': True, # otherwise other images won't be rendered as well 'renderDepthImage': args.render_depth, 'renderObjectImage': args.render_instance_masks, 'renderClassImage': args.render_class_masks} while len(traj_list) > 0: lock.acquire(timeout=120) json_file = traj_list.pop() json_path = os.path.join(args.data_input, json_file) jsons_left = len(traj_list) lock.release() print('Rendering {} ({} left)'.format(json_path, jsons_left)) augment_success = augment_traj( env, json_path, args, video_saver, render_settings, test_mode) # update processed_files on the disk lock.acquire(timeout=120) with open(processed_files_path, 'a') as f: f.write('{};{}'.format(json_file, int(augment_success)) + '\n') model_util.update_log( args.data_output, stage='augment', update='increase', progress=1) lock.release() env.stop() print("Finished.") @ex.automain def main(args): args = helper_util.AttrDict(**args) if args.data_output is None: raise RuntimeError('Please, specify the name of output dataset') if (not args.render_frames and not args.render_depth and not args.render_instance_masks and not args.render_class_masks): raise RuntimeError('At least one type of images should be rendered') # set up the paths args.data_input = os.path.join(constants.ET_DATA, args.data_input) print('Creating a dataset {} using data from {}'.format( args.data_output, args.data_input)) if not os.path.isdir(args.data_input): raise RuntimeError('The input dataset {} does not exist'.format( args.data_input)) args.data_output = os.path.join(constants.ET_DATA, args.data_output) processed_files_path = os.path.join(args.data_output, 'processed.txt') if os.path.exists(args.data_output) and args.overwrite: print('Erasing the old directory') shutil.rmtree(args.data_output) os.makedirs(args.data_output, exist_ok=True) test_mode = True ##### # make a list of all the traj_data json files traj_list = [] print('Indexing images in {}'.format(args.partitions)) for partition in args.partitions: if test_mode: file_list = glob.glob(os.path.join(args.data_input, partition, '*')) else: file_list = glob.glob(os.path.join(args.data_input, partition, '*/*')) for dir_name in sorted(file_list): if 'trial_' in os.path.basename(dir_name): json_path = os.path.join(dir_name, 'traj_data.json') # if 'trial_T20190909_101117_022448' not in json_path: continue if not os.path.isfile(json_path): continue if test_mode: traj_list.append('/'.join(json_path.split('/')[-3:])) else: traj_list.append('/'.join(json_path.split('/')[-4:])) start_idx = 0 #chunk_size = len(traj_list) // 3 #traj_list = traj_list[chunk_size * start_idx : chunk_size * (start_idx + 1)] num_files, num_processed_files = len(traj_list), 0 # remove jsons that were already processed if os.path.exists(processed_files_path): with open(processed_files_path) as f: processed_files = set( [line.strip().split(';')[0] for line in f.readlines()]) # check whether which files are in the desired partitions processed_files = set( [f for f in processed_files if f.split('/')[0] in args.partitions]) traj_list = [traj for traj in traj_list if traj not in processed_files] num_processed_files += len(processed_files) print('{} jsons were already processed'.format(num_processed_files)) print(colored('The total number of triajectories to process is {}'.format( len(traj_list)), 'yellow')) model_util.save_log(args.data_output, progress=num_processed_files, total=num_files, stage='augment') # random shuffle if args.shuffle: random.shuffle(traj_list) lock = threading.Lock() if args.num_threads > 0 and False: # start threads threads = [] for worker_id in range(min(args.num_threads, len(traj_list))): thread = threading.Thread( target=start_worker, args=(worker_id, traj_list, args, lock, processed_files_path, test_mode)) threads.append(thread) thread.start() time.sleep(1) for thread in threads: thread.join() else: # run in the main thread start_worker(0, traj_list, args, lock, processed_files_path, test_mode) return with open(processed_files_path) as f: num_processed_files = len(f.readlines()) if num_files != num_processed_files: print(colored('{} trajectories were skipped'.format( num_files - num_processed_files), 'red')) else: print(colored('All trajectories were successfully recorded', 'green')) #print('Copying tests folders') #if not os.path.exists(os.path.join(args.data_output, 'tests_seen')): # shutil.copytree(os.path.join(args.data_input, 'tests_seen'), # os.path.join(args.data_output, 'tests_seen')) #if not os.path.exists(os.path.join(args.data_output, 'tests_unseen')): # shutil.copytree(os.path.join(args.data_input, 'tests_unseen'), # os.path.join(args.data_output, 'tests_unseen')) print('The generated dataset is saved to {}'.format(args.data_output))
38.42
100
0.642461
import os import sys import json import numpy as np import threading import time import copy import random import glob import shutil import pickle from termcolor import colored from sacred import Ingredient, Experiment from alfred.env.thor_env import ThorEnv from alfred.gen import constants from alfred.gen.utils import augment_util, video_util from alfred.utils import helper_util, model_util args_ingredient = Ingredient('args') ex = Experiment('render_trajs', ingredients=[args_ingredient]) @args_ingredient.config def cfg_args(): data_output = 'generated_2.1.0_exp_from_et_v2' data_input = 'json_2.1.0' smooth_nav = False time_delays = True shuffle = False num_threads = 16 render_size = 300 x_display = '0' render_frames = True render_depth = False render_class_masks = False render_instance_masks = False save_detections = False partitions = ('tests_unseen',) overwrite = False def setup_task(env, traj_data, args): scene_num = traj_data['scene']['scene_num'] object_poses = traj_data['scene']['object_poses'] object_toggles = traj_data['scene']['object_toggles'] dirty_and_empty = traj_data['scene']['dirty_and_empty'] scene_name = 'FloorPlan%d' % scene_num env.reset(scene_name, silent=True) env.restore_scene(object_poses, object_toggles, dirty_and_empty) assert traj_data['scene']['init_action']['horizon'] == 30 env.step(dict(traj_data['scene']['init_action'])) print('Task: {traj_data["task_id"]}' + ' {traj_data["task_type"]}' if 'task_type' in traj_data else '') if 'task_type' in traj_data: env.set_task(traj_data, reward_type='dense') augment_util.check_image(env.last_event.frame) def batch(iterable, n=1): l = len(iterable) for ndx in range(0, l, n): yield iterable[ndx:min(ndx + n, l)] def augment_traj(env, json_file, args, video_saver, render_settings, test_mode): with open(json_file) as f: traj_data = json.load(f) traj_data['images'] = list() root_dir_to, rendered_images_dir, save_settings = augment_util.prepare_for_traj( json_file, args) base_path = '/home/ubuntu/bak/ET/data/preds/et_human_synth' if test_mode: path = os.path.join(base_path, ':'.join(json_file.split('/')[-3:-1] + ['0']) + '.pkl') path = os.path.join(base_path, ':'.join(json_file.split('/')[-3:-1]) + '.pkl') with open(path, 'rb') as f: _, actions_ = pickle.load(f) look_up = {'action': 'LookUp', 'forceAction': True} look_down = {'action': 'LookDown', 'forceAction': True} move = {'action': 'MoveAhead', 'forceAction': True} left = {'action': 'RotateLeft', 'forceAction': True} right = {'action': 'RotateRight', 'forceAction': True} actions = ['RotateRight_90'] * 4 for acts in batch(actions_, 8): actions.extend(acts) actions.extend(['RotateRight_90'] * 4) actions = actions[:225] print(len(actions), 'length') success = True for save_idx, init_action in enumerate([look_up, look_down, None]): setup_task(env, traj_data, args) rewards, img_count = [], 0 if init_action is not None: if not isinstance(init_action, list): init_action = [init_action] for act in init_action: _, _ = augment_util.env_navigate( act, env, save_settings, root_dir_to, render_settings, args.smooth_nav, img_count, save=False) for act in actions: if act.startswith('Move'): cmd = move if act.startswith('RotateLeft'): cmd = left if act.startswith('RotateRight'): cmd = right img_count_ = img_count event, img_count = augment_util.env_navigate( cmd, env, save_settings, root_dir_to, render_settings, args.smooth_nav, img_count, save_idx=save_idx) if event is None: print(path, 'event is None!!!!!') success = False break if not event.metadata['lastActionSuccess']: print(colored("Replay Failed: %s" % ( env.last_event.metadata['errorMessage']), 'red')) print(path, 'event lastActionSuccess is False!!!!!') success = False break augment_util.save_image(env.last_event, root_dir_to, save_settings, img_count, None) img_count += 10 return success def start_worker(worker_id, traj_list, args, lock, processed_files_path, test_mode): if isinstance(args.x_display, (list, tuple)): x_display = args.x_display[worker_id % len(args.x_display)] else: x_display = args.x_display env = ThorEnv(x_display=x_display, player_screen_width=args.render_size, player_screen_height=args.render_size) video_saver = video_util.VideoSaver() render_settings = { 'renderImage': True, 'renderDepthImage': args.render_depth, 'renderObjectImage': args.render_instance_masks, 'renderClassImage': args.render_class_masks} while len(traj_list) > 0: lock.acquire(timeout=120) json_file = traj_list.pop() json_path = os.path.join(args.data_input, json_file) jsons_left = len(traj_list) lock.release() print('Rendering {} ({} left)'.format(json_path, jsons_left)) augment_success = augment_traj( env, json_path, args, video_saver, render_settings, test_mode) # update processed_files on the disk lock.acquire(timeout=120) with open(processed_files_path, 'a') as f: f.write('{};{}'.format(json_file, int(augment_success)) + '\n') model_util.update_log( args.data_output, stage='augment', update='increase', progress=1) lock.release() env.stop() print("Finished.") @ex.automain def main(args): args = helper_util.AttrDict(**args) if args.data_output is None: raise RuntimeError('Please, specify the name of output dataset') if (not args.render_frames and not args.render_depth and not args.render_instance_masks and not args.render_class_masks): raise RuntimeError('At least one type of images should be rendered') # set up the paths args.data_input = os.path.join(constants.ET_DATA, args.data_input) print('Creating a dataset {} using data from {}'.format( args.data_output, args.data_input)) if not os.path.isdir(args.data_input): raise RuntimeError('The input dataset {} does not exist'.format( args.data_input)) args.data_output = os.path.join(constants.ET_DATA, args.data_output) processed_files_path = os.path.join(args.data_output, 'processed.txt') if os.path.exists(args.data_output) and args.overwrite: print('Erasing the old directory') shutil.rmtree(args.data_output) os.makedirs(args.data_output, exist_ok=True) test_mode = True ##### # make a list of all the traj_data json files traj_list = [] print('Indexing images in {}'.format(args.partitions)) for partition in args.partitions: if test_mode: file_list = glob.glob(os.path.join(args.data_input, partition, '*')) else: file_list = glob.glob(os.path.join(args.data_input, partition, '*/*')) for dir_name in sorted(file_list): if 'trial_' in os.path.basename(dir_name): json_path = os.path.join(dir_name, 'traj_data.json') # if 'trial_T20190909_101117_022448' not in json_path: continue if not os.path.isfile(json_path): continue if test_mode: traj_list.append('/'.join(json_path.split('/')[-3:])) else: traj_list.append('/'.join(json_path.split('/')[-4:])) start_idx = 0 #chunk_size = len(traj_list) // 3 #traj_list = traj_list[chunk_size * start_idx : chunk_size * (start_idx + 1)] num_files, num_processed_files = len(traj_list), 0 # remove jsons that were already processed if os.path.exists(processed_files_path): with open(processed_files_path) as f: processed_files = set( [line.strip().split(';')[0] for line in f.readlines()]) # check whether which files are in the desired partitions processed_files = set( [f for f in processed_files if f.split('/')[0] in args.partitions]) traj_list = [traj for traj in traj_list if traj not in processed_files] num_processed_files += len(processed_files) print('{} jsons were already processed'.format(num_processed_files)) print(colored('The total number of triajectories to process is {}'.format( len(traj_list)), 'yellow')) model_util.save_log(args.data_output, progress=num_processed_files, total=num_files, stage='augment') # random shuffle if args.shuffle: random.shuffle(traj_list) lock = threading.Lock() if args.num_threads > 0 and False: # start threads threads = [] for worker_id in range(min(args.num_threads, len(traj_list))): thread = threading.Thread( target=start_worker, args=(worker_id, traj_list, args, lock, processed_files_path, test_mode)) threads.append(thread) thread.start() time.sleep(1) for thread in threads: thread.join() else: # run in the main thread start_worker(0, traj_list, args, lock, processed_files_path, test_mode) return with open(processed_files_path) as f: num_processed_files = len(f.readlines()) if num_files != num_processed_files: print(colored('{} trajectories were skipped'.format( num_files - num_processed_files), 'red')) else: print(colored('All trajectories were successfully recorded', 'green')) #print('Copying tests folders') #if not os.path.exists(os.path.join(args.data_output, 'tests_seen')): # shutil.copytree(os.path.join(args.data_input, 'tests_seen'), # os.path.join(args.data_output, 'tests_seen')) #if not os.path.exists(os.path.join(args.data_output, 'tests_unseen')): # shutil.copytree(os.path.join(args.data_input, 'tests_unseen'), # os.path.join(args.data_output, 'tests_unseen')) print('The generated dataset is saved to {}'.format(args.data_output))
true
true
f71d01b7f7dd99438c8b39dc984dbad4ab9a5f08
993
py
Python
cacao_accounting/contabilidad/registros/ccosto.py
cacao-accounting/cacao-accounting-mockup
ca4da3b4e48a4796fc39a12a482d9ccc1e8e6e38
[ "Apache-2.0" ]
2
2021-08-19T01:29:49.000Z
2021-09-11T16:21:40.000Z
cacao_accounting/contabilidad/registros/ccosto.py
cacao-accounting/cacao-accounting-mockup
ca4da3b4e48a4796fc39a12a482d9ccc1e8e6e38
[ "Apache-2.0" ]
29
2020-07-14T23:59:32.000Z
2021-11-04T07:42:31.000Z
cacao_accounting/contabilidad/registros/ccosto.py
cacao-accounting/cacao-accounting
3e6fa0080db3d44b1b3b8d93e46bd96a5a8d515b
[ "Apache-2.0" ]
3
2020-07-12T00:52:18.000Z
2021-08-18T01:09:22.000Z
# Copyright 2020 William José Moreno Reyes # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Contributors: # - William José Moreno Reyes """Administración centros de costos.""" from cacao_accounting.registro import Registro class RegistroCentroCosto(Registro): """Registro para la administración de Centros de Costos.""" def __init__(self): """Administración centros de costos.""" from cacao_accounting.database import CentroCosto self.tabla = CentroCosto
32.032258
74
0.747231
from cacao_accounting.registro import Registro class RegistroCentroCosto(Registro): def __init__(self): from cacao_accounting.database import CentroCosto self.tabla = CentroCosto
true
true
f71d01f2b0c1b5a7dec480a4a5104f45b41cb83c
543
py
Python
keras/preprocessing/sequence.py
seba-1511/gsoc15-demo
7fa542f33fdb39d73e2b11318c046ecf35fb9bcf
[ "MIT" ]
2
2015-09-15T19:19:24.000Z
2019-04-21T12:10:27.000Z
keras/preprocessing/sequence.py
wavelets/keras
c57d5cce7903511edd4048f8bfed2ad0dc6f6b6b
[ "MIT" ]
null
null
null
keras/preprocessing/sequence.py
wavelets/keras
c57d5cce7903511edd4048f8bfed2ad0dc6f6b6b
[ "MIT" ]
null
null
null
import numpy as np def pad_sequences(seqs, maxlen=None, dtype='int32'): """ Pad each sequence to the same lenght: the lenght of the longuest sequence. If maxlen is provided, any sequence longer than maxlen is truncated to maxlen. """ lengths = [len(s) for s in seqs] nb_samples = len(seqs) if maxlen is None: maxlen = np.max(lengths) x = np.zeros((nb_samples, maxlen)).astype(dtype) for idx, s in enumerate(seqs): x[idx, :lengths[idx]] = s[:maxlen] return x
25.857143
52
0.615101
import numpy as np def pad_sequences(seqs, maxlen=None, dtype='int32'): lengths = [len(s) for s in seqs] nb_samples = len(seqs) if maxlen is None: maxlen = np.max(lengths) x = np.zeros((nb_samples, maxlen)).astype(dtype) for idx, s in enumerate(seqs): x[idx, :lengths[idx]] = s[:maxlen] return x
true
true
f71d033f264f8a2c8ac1bb10e308f59fe6a66bae
7,853
py
Python
example/add.py
tjengbudi/python-shopee
a74e99e7a900ed0a3c0cba2b7405238acf2ee16c
[ "MIT" ]
166
2018-04-25T16:43:30.000Z
2022-03-20T07:07:39.000Z
example/add.py
tjengbudi/python-shopee
a74e99e7a900ed0a3c0cba2b7405238acf2ee16c
[ "MIT" ]
34
2018-11-27T02:56:08.000Z
2022-01-28T05:24:57.000Z
example/add.py
tjengbudi/python-shopee
a74e99e7a900ed0a3c0cba2b7405238acf2ee16c
[ "MIT" ]
62
2018-06-12T02:53:34.000Z
2022-03-13T07:31:34.000Z
import pyshopee import re import pandas as pd from pprint import pprint def _builder_attributes(attributes_resp, brand_option = None, default_brand_option = "自有品牌"): '''select mandatory attr. attributes = [ { 'attributes_id': 1365, 'value': 'Napla(娜普菈)' } ] ''' attributes = [] # in case attributes response is not define in api response if attributes_resp.get("attributes"): for ele in attributes_resp.get("attributes"): if ele.get("is_mandatory") and ele.get("attribute_name")=='品牌': attributes.append( { "attributes_id": ele.get("attribute_id"), "value": brand_option if brand_option else default_brand_option }) elif ele.get("is_mandatory"): attributes.append( { # checking the value if value can radom or set as " " "attributes_id": ele.get("attribute_id"), "value": ele.get("options")[0] if len(ele.get("options")) > 0 else " ", }) else: pass else: return None return attributes def _builder_logistics(**params): ''' logistics = [ # 'logistic_name': '黑貓宅急便' { 'logistic_id': 30001, 'enabled':False }, # 'logistic_name': '7-11', { 'logistic_id': 30005, 'enabled':False }, # 'logistic_name': '全家', { 'logistic_id': 30006, 'enabled':False }, # 'logistic_name': '萊爾富', { 'logistic_id': 30007, 'enabled':False }, # 'logistic_name': 'OK Mart', { 'logistic_id': 30008, 'enabled':False }, # 'logistic_name': '中華郵政', { 'logistic_id': 39303, 'enabled':False }, # 'logistic_name': '賣家宅配', { 'logistic_id': 39304, 'enabled':False }, # 'logistic_name': '宅配', { 'logistic_id': 39307, 'enabled':True } ] ''' logistics = list() resp = shopee.logistic.get_logistics() logistics_resp = resp.get("logistics") for logis in logistics_resp: if logis.get('enabled'): # logistics.append({ # 'logistic_id': logis.get('logistic_id'), # 'enabled': logis.get('enabled') # }) if logis.get('fee_type') == 'SIZE_SELECTION': logis['sizes'] = logis['sizes'][0]['size_id'] else: logistics.append(logis) return logistics def _builder_images(single, **params): ''' images = [ { "url": "https://cfshopeetw-a.akamaihd.net/file/b77c9b16ec1dd734c0c663fd1fcb8ac0" }, { "url": 'https://cfshopeetw-a.akamaihd.net/file/b77c9b16ec1dd734c0c663fd1fcb8ac0' }, { "url": 'https://cfshopeetw-a.akamaihd.net/file/b77c9b16ec1dd734c0c663fd1fcb8ac0' }, { "url": 'https://cfshopeetw-a.akamaihd.net/file/b77c9b16ec1dd734c0c663fd1fcb8ac0' } ] ''' images_container = [] images_container.extend( single.get("images").split(",") ) images = [] for img in images_container: images.append( { "url": "https://cfshopeetw-a.akamaihd.net/file/" + str(img) } ) return images def _builder_variations(data, **params): ''' variations = [ { "name": "Black", "stock": 1, "price": 1999.0, "variation_sku": "SKU-ABCD-EFG0-002" }, { "name": "Red", "stock": 1, "price": 2999.0, "variation_sku": "SKU-ABCD-EFG0-003" } ] ''' multi = len(data) if len(data) > 1 else None variations_container = [] if multi: for ele in data: variations = {} # check if ele["modelid"] == 0 or ele["model_status"] == 0: pass else: variations.setdefault("name",ele["model_name"].strip()) variations.setdefault("stock",1) variations.setdefault("price",ele["model_price"]) if ele.get("variation_sku"): variations.setdefault("variation_sku",ele.get("variation_sku")) variations_container.append(variations) return variations_container else: return None def _builder_weight(single, default_weight=0.1, **params): ''' the net weight of this item, the unit is KG. - type: float - require: yes ''' if single.get("item_weight"): weight = single.get("item_weight")/100000 else: weight = default_weight return float(weight) def _cleaning_hashtag(description, **params): hashtag_pattern = re.compile(r"#(.*)[\s]{0,1}", flags=re.UNICODE) cleaned_description = hashtag_pattern.sub(r' ', description) return cleaned_description if __name__ == '__main__': # build the connection shopee = pyshopee.Client( shop_id= your_shopid, partner_id=your_partner_id, secret_key=your_secret_key ) # build your data in here single = { "category_id":category_id, "item_name":item_name, "descriptio":descriptio, "item_price":item_price, "item_weight":item_weight, "category_id":category_id, "images":images } product_data = { "category_id": single.get("category_id"), "name": single.get("item_name").strip(), "description": _cleaning_hashtag(description =single.get("description") ), "price": single.get("item_price") if single.get("item_price") > 0 else data[1].get("item_price"), "stock": 1, "weight": _builder_weight(single=single, default_weight=0.1), # "variations": variations, "images": _builder_images(single=single), # "attributes": _builder_attributes( attributes_resp = shopee.item.get_attributes(category_id=int(single["category_id"])), # brand_option = single.get("value"), # default_brand_option = "自有品牌" ), "logistics": _builder_logistics(), # "package_length": 200, # "package_width": 200, # "package_height": 200, # "days_to_ship": 10, # "wholesales": wholesales } attributes = _builder_attributes( attributes_resp = shopee.item.get_attributes(category_id=int(single["category_id"])), brand_option = single.get("value"), default_brand_option = "自有品牌" ) if attributes: product_data.setdefault("attributes",attributes) variations = _builder_variations(data=data) if variations: product_data.setdefault("variations",variations) # adding process response = shopee.item.add(product_data=product_data) pprint(response)
29.859316
134
0.492041
import pyshopee import re import pandas as pd from pprint import pprint def _builder_attributes(attributes_resp, brand_option = None, default_brand_option = "自有品牌"): attributes = [] if attributes_resp.get("attributes"): for ele in attributes_resp.get("attributes"): if ele.get("is_mandatory") and ele.get("attribute_name")=='品牌': attributes.append( { "attributes_id": ele.get("attribute_id"), "value": brand_option if brand_option else default_brand_option }) elif ele.get("is_mandatory"): attributes.append( { "attributes_id": ele.get("attribute_id"), "value": ele.get("options")[0] if len(ele.get("options")) > 0 else " ", }) else: pass else: return None return attributes def _builder_logistics(**params): logistics = list() resp = shopee.logistic.get_logistics() logistics_resp = resp.get("logistics") for logis in logistics_resp: if logis.get('enabled'): if logis.get('fee_type') == 'SIZE_SELECTION': logis['sizes'] = logis['sizes'][0]['size_id'] else: logistics.append(logis) return logistics def _builder_images(single, **params): images_container = [] images_container.extend( single.get("images").split(",") ) images = [] for img in images_container: images.append( { "url": "https://cfshopeetw-a.akamaihd.net/file/" + str(img) } ) return images def _builder_variations(data, **params): multi = len(data) if len(data) > 1 else None variations_container = [] if multi: for ele in data: variations = {} if ele["modelid"] == 0 or ele["model_status"] == 0: pass else: variations.setdefault("name",ele["model_name"].strip()) variations.setdefault("stock",1) variations.setdefault("price",ele["model_price"]) if ele.get("variation_sku"): variations.setdefault("variation_sku",ele.get("variation_sku")) variations_container.append(variations) return variations_container else: return None def _builder_weight(single, default_weight=0.1, **params): if single.get("item_weight"): weight = single.get("item_weight")/100000 else: weight = default_weight return float(weight) def _cleaning_hashtag(description, **params): hashtag_pattern = re.compile(r"#(.*)[\s]{0,1}", flags=re.UNICODE) cleaned_description = hashtag_pattern.sub(r' ', description) return cleaned_description if __name__ == '__main__': shopee = pyshopee.Client( shop_id= your_shopid, partner_id=your_partner_id, secret_key=your_secret_key ) single = { "category_id":category_id, "item_name":item_name, "descriptio":descriptio, "item_price":item_price, "item_weight":item_weight, "category_id":category_id, "images":images } product_data = { "category_id": single.get("category_id"), "name": single.get("item_name").strip(), "description": _cleaning_hashtag(description =single.get("description") ), "price": single.get("item_price") if single.get("item_price") > 0 else data[1].get("item_price"), "stock": 1, "weight": _builder_weight(single=single, default_weight=0.1), "images": _builder_images(single=single), "logistics": _builder_logistics(), } attributes = _builder_attributes( attributes_resp = shopee.item.get_attributes(category_id=int(single["category_id"])), brand_option = single.get("value"), default_brand_option = "自有品牌" ) if attributes: product_data.setdefault("attributes",attributes) variations = _builder_variations(data=data) if variations: product_data.setdefault("variations",variations) response = shopee.item.add(product_data=product_data) pprint(response)
true
true
f71d035297f489215598c4103d58e28bf84e12d5
7,753
py
Python
transformer/third_party/feat_convert/kaldi_io/batchmk.py
wxy1988/ASR
8ef3ef347523044c89c46c263ecc7b8e9b2c06d1
[ "Apache-2.0" ]
60
2018-08-21T08:07:31.000Z
2021-11-08T10:40:17.000Z
transformer/third_party/feat_convert/kaldi_io/batchmk.py
wxy1988/ASR
8ef3ef347523044c89c46c263ecc7b8e9b2c06d1
[ "Apache-2.0" ]
7
2018-10-23T08:50:15.000Z
2021-11-15T09:32:29.000Z
transformer/third_party/feat_convert/kaldi_io/batchmk.py
wxy1988/ASR
8ef3ef347523044c89c46c263ecc7b8e9b2c06d1
[ "Apache-2.0" ]
29
2018-09-21T06:11:03.000Z
2022-02-18T07:12:58.000Z
#!/usr/bin/python # coding=utf-8 """ @version: @author: Dong Linhao @license: Apache Licence @contact: donglinhao2015@ia.ac.cn @site: @software: PyCharm Community Edition @file: batchmk.py @time: 09/04/17 21:10 """ import src.io.fea as fea import tensorflow as tf import numpy as np import time LONGEST_FRMS = 2000 class lstm_batch(object): def __init__(self, num_streams, num_steps, input_dim): self.sample_feat_list = [np.zeros([LONGEST_FRMS, input_dim]) for _ in range(num_streams)] self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)] self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)] self.curt = np.zeros(num_streams, dtype=int) self.lent = np.zeros(num_streams, dtype=int) self.reset_flag = np.zeros(num_streams, dtype=bool) self.num_streams = num_streams self.num_steps = num_steps self.input_dim = input_dim self.handled_utt_num = 0 self.handled_frm_num = 0 self.cur_epoch_finish = False def set_stream_num(self, num_streams): self.num_streams = num_streams self.sample_feat_list = [np.zeros([LONGEST_FRMS, self.input_dim]) for _ in range(num_streams)] self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)] self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)] self.curt = np.zeros(num_streams, dtype=int) self.lent = np.zeros(num_streams, dtype=int) self.reset_flag = np.zeros(num_streams, dtype=bool) def reset(self): self.sample_feat_list = [np.zeros([LONGEST_FRMS, self.input_dim]) for _ in range(self.num_streams)] self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(self.num_streams)] self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(self.num_streams)] self.curt = np.zeros(self.num_streams, dtype=int) self.lent = np.zeros(self.num_streams, dtype=int) self.reset_flag = np.zeros(self.num_streams, dtype=bool) self.handled_utt_num = 0 self.handled_frm_num = 0 self.cur_epoch_finish = False def make_batch(self, sess, sample, run_device, total_utt_num): with tf.device(run_device): multistream_feat_batch = [np.zeros([self.num_steps, self.input_dim]) for _ in range(self.num_streams)] multistream_label_batch = [np.zeros([self.num_steps]) for _ in range(self.num_streams)] multistream_mask_batch = [np.zeros([self.num_steps]) for _ in range(self.num_streams)] reset_flag = np.zeros(self.num_streams, dtype=bool) for s in range(self.num_streams): if self.curt[s] < self.lent[s]: reset_flag[s] = False continue if self.handled_utt_num < total_utt_num: sample_feats, sample_labels, sample_masks = sess.run(sample) self.handled_utt_num += 1 self.sample_feat_list[s] = sample_feats self.sample_label_list[s] = sample_labels self.sample_mask_list[s] = sample_masks self.lent[s] = np.shape(sample_feats)[0] self.curt[s] = 0 reset_flag[s] = True for s in range(self.num_streams): if self.curt[s] < self.lent[s]: multistream_feat_batch[s] = self.sample_feat_list[s][self.curt[s]:self.curt[s]+self.num_steps, :] multistream_label_batch[s] = self.sample_label_list[s][self.curt[s]:self.curt[s]+self.num_steps] multistream_mask_batch[s] = self.sample_mask_list[s][self.curt[s]:self.curt[s]+self.num_steps] self.curt[s] += self.num_steps self.handled_frm_num += np.sum(multistream_mask_batch[s]) else: multistream_mask_batch[s] = np.zeros([self.num_steps]) final_feat_batch = np.stack(multistream_feat_batch, axis=1) final_label_batch = np.stack(multistream_label_batch, axis=1) final_mask_batch = np.stack(multistream_mask_batch, axis=1) done = True for s in range(self.num_streams): if self.curt[s] < self.lent[s]: done = False if done: self.cur_epoch_finish = True return final_feat_batch, final_label_batch, final_mask_batch, reset_flag def getfilelst(scp_file_path): # get tf list tf_list = [] with open(scp_file_path) as list_file: for line in list_file: tf_list.append(line.strip()) return tf_list def process_my_feature(feature, label, flags): # Add delta if flags.add_delta: feature = fea.tf_fea_add_delt(feature) # CMVN feature = fea.tf_fea_cmvn_global(feature, flags.feat_mean, flags.feat_var) # Splice feature = fea.tf_fea_splice(feature, flags.l_splice, flags.r_splice) feature = tf.reshape(feature, [-1, flags.input_dim]) return feature[:], label[:] def read_my_file_format(filename_queue, org_feat_dim): # build reader reader = tf.TFRecordReader() _, serialized_example = reader.read(filename_queue) raw_example = tf.parse_single_example( serialized_example, # Defaults are not specified since both keys are required. features={ 'feat': tf.FixedLenFeature([], tf.string), 'label': tf.FixedLenFeature([], tf.string), }) example = tf.decode_raw(raw_example['feat'], tf.float32) example = tf.reshape(example, [-1, org_feat_dim]) label = tf.decode_raw(raw_example['label'], tf.int32) return example, label def lstm_input_pipeline(flags, is_training, num_epochs=None, shuffle_state = True): with tf.device(flags.default_device): if is_training: filenames = getfilelst(flags.trn_data_dir + '/tf.lst') else: filenames = getfilelst(flags.cv_data_dir + '/tf.lst') # generate file queue filename_queue = tf.train.string_input_producer( filenames, num_epochs = num_epochs, shuffle = shuffle_state) # read from file queue sample = read_my_file_format(filename_queue, flags.org_feat_dim) # handle sample sample_feats, sample_labels = process_my_feature(sample[0], sample[1], flags) sample_length = tf.shape(sample_feats)[0] sample_masks = tf.ones([sample_length], dtype=tf.float32) # add target delay if flags.target_delay > 0: feats_part1 = tf.slice(sample_feats, [flags.target_delay, 0], [sample_length-flags.target_delay, -1]) last_frm_feats = tf.slice(sample_feats, [sample_length-1, 0], [1, -1]) feats_part2 = tf.concat([last_frm_feats for _ in range(flags.target_delay)], axis=0) sample_feats = tf.concat([feats_part1, feats_part2], axis=0) padding_length = flags.num_steps - sample_length % flags.num_steps padding_feats = tf.zeros([padding_length, flags.input_dim], dtype=tf.float32) feats = tf.concat(axis=0, values=[sample_feats, padding_feats]) padding_labels = tf.zeros([padding_length], dtype=tf.int32) labels = tf.concat(axis=0, values=[sample_labels, padding_labels]) padding_masks = tf.zeros([padding_length], dtype=tf.float32) frame_masks = tf.concat(axis=0, values=[sample_masks, padding_masks]) return feats, labels, frame_masks
41.682796
118
0.632013
import src.io.fea as fea import tensorflow as tf import numpy as np import time LONGEST_FRMS = 2000 class lstm_batch(object): def __init__(self, num_streams, num_steps, input_dim): self.sample_feat_list = [np.zeros([LONGEST_FRMS, input_dim]) for _ in range(num_streams)] self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)] self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)] self.curt = np.zeros(num_streams, dtype=int) self.lent = np.zeros(num_streams, dtype=int) self.reset_flag = np.zeros(num_streams, dtype=bool) self.num_streams = num_streams self.num_steps = num_steps self.input_dim = input_dim self.handled_utt_num = 0 self.handled_frm_num = 0 self.cur_epoch_finish = False def set_stream_num(self, num_streams): self.num_streams = num_streams self.sample_feat_list = [np.zeros([LONGEST_FRMS, self.input_dim]) for _ in range(num_streams)] self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)] self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(num_streams)] self.curt = np.zeros(num_streams, dtype=int) self.lent = np.zeros(num_streams, dtype=int) self.reset_flag = np.zeros(num_streams, dtype=bool) def reset(self): self.sample_feat_list = [np.zeros([LONGEST_FRMS, self.input_dim]) for _ in range(self.num_streams)] self.sample_label_list = [np.zeros([LONGEST_FRMS]) for _ in range(self.num_streams)] self.sample_mask_list = [np.zeros([LONGEST_FRMS]) for _ in range(self.num_streams)] self.curt = np.zeros(self.num_streams, dtype=int) self.lent = np.zeros(self.num_streams, dtype=int) self.reset_flag = np.zeros(self.num_streams, dtype=bool) self.handled_utt_num = 0 self.handled_frm_num = 0 self.cur_epoch_finish = False def make_batch(self, sess, sample, run_device, total_utt_num): with tf.device(run_device): multistream_feat_batch = [np.zeros([self.num_steps, self.input_dim]) for _ in range(self.num_streams)] multistream_label_batch = [np.zeros([self.num_steps]) for _ in range(self.num_streams)] multistream_mask_batch = [np.zeros([self.num_steps]) for _ in range(self.num_streams)] reset_flag = np.zeros(self.num_streams, dtype=bool) for s in range(self.num_streams): if self.curt[s] < self.lent[s]: reset_flag[s] = False continue if self.handled_utt_num < total_utt_num: sample_feats, sample_labels, sample_masks = sess.run(sample) self.handled_utt_num += 1 self.sample_feat_list[s] = sample_feats self.sample_label_list[s] = sample_labels self.sample_mask_list[s] = sample_masks self.lent[s] = np.shape(sample_feats)[0] self.curt[s] = 0 reset_flag[s] = True for s in range(self.num_streams): if self.curt[s] < self.lent[s]: multistream_feat_batch[s] = self.sample_feat_list[s][self.curt[s]:self.curt[s]+self.num_steps, :] multistream_label_batch[s] = self.sample_label_list[s][self.curt[s]:self.curt[s]+self.num_steps] multistream_mask_batch[s] = self.sample_mask_list[s][self.curt[s]:self.curt[s]+self.num_steps] self.curt[s] += self.num_steps self.handled_frm_num += np.sum(multistream_mask_batch[s]) else: multistream_mask_batch[s] = np.zeros([self.num_steps]) final_feat_batch = np.stack(multistream_feat_batch, axis=1) final_label_batch = np.stack(multistream_label_batch, axis=1) final_mask_batch = np.stack(multistream_mask_batch, axis=1) done = True for s in range(self.num_streams): if self.curt[s] < self.lent[s]: done = False if done: self.cur_epoch_finish = True return final_feat_batch, final_label_batch, final_mask_batch, reset_flag def getfilelst(scp_file_path): tf_list = [] with open(scp_file_path) as list_file: for line in list_file: tf_list.append(line.strip()) return tf_list def process_my_feature(feature, label, flags): if flags.add_delta: feature = fea.tf_fea_add_delt(feature) feature = fea.tf_fea_cmvn_global(feature, flags.feat_mean, flags.feat_var) feature = fea.tf_fea_splice(feature, flags.l_splice, flags.r_splice) feature = tf.reshape(feature, [-1, flags.input_dim]) return feature[:], label[:] def read_my_file_format(filename_queue, org_feat_dim): reader = tf.TFRecordReader() _, serialized_example = reader.read(filename_queue) raw_example = tf.parse_single_example( serialized_example, features={ 'feat': tf.FixedLenFeature([], tf.string), 'label': tf.FixedLenFeature([], tf.string), }) example = tf.decode_raw(raw_example['feat'], tf.float32) example = tf.reshape(example, [-1, org_feat_dim]) label = tf.decode_raw(raw_example['label'], tf.int32) return example, label def lstm_input_pipeline(flags, is_training, num_epochs=None, shuffle_state = True): with tf.device(flags.default_device): if is_training: filenames = getfilelst(flags.trn_data_dir + '/tf.lst') else: filenames = getfilelst(flags.cv_data_dir + '/tf.lst') filename_queue = tf.train.string_input_producer( filenames, num_epochs = num_epochs, shuffle = shuffle_state) sample = read_my_file_format(filename_queue, flags.org_feat_dim) sample_feats, sample_labels = process_my_feature(sample[0], sample[1], flags) sample_length = tf.shape(sample_feats)[0] sample_masks = tf.ones([sample_length], dtype=tf.float32) if flags.target_delay > 0: feats_part1 = tf.slice(sample_feats, [flags.target_delay, 0], [sample_length-flags.target_delay, -1]) last_frm_feats = tf.slice(sample_feats, [sample_length-1, 0], [1, -1]) feats_part2 = tf.concat([last_frm_feats for _ in range(flags.target_delay)], axis=0) sample_feats = tf.concat([feats_part1, feats_part2], axis=0) padding_length = flags.num_steps - sample_length % flags.num_steps padding_feats = tf.zeros([padding_length, flags.input_dim], dtype=tf.float32) feats = tf.concat(axis=0, values=[sample_feats, padding_feats]) padding_labels = tf.zeros([padding_length], dtype=tf.int32) labels = tf.concat(axis=0, values=[sample_labels, padding_labels]) padding_masks = tf.zeros([padding_length], dtype=tf.float32) frame_masks = tf.concat(axis=0, values=[sample_masks, padding_masks]) return feats, labels, frame_masks
true
true
f71d05be152905647cb539a75d42c89a113e0993
556
py
Python
src/operations/equality.py
iamantony/PythonNotes
4ed740378bd5e031e2c21675f33ae8e199a48bbb
[ "MIT" ]
null
null
null
src/operations/equality.py
iamantony/PythonNotes
4ed740378bd5e031e2c21675f33ae8e199a48bbb
[ "MIT" ]
null
null
null
src/operations/equality.py
iamantony/PythonNotes
4ed740378bd5e031e2c21675f33ae8e199a48bbb
[ "MIT" ]
null
null
null
__author__ = 'Antony Cherepanov' def diff_ways_to_equality_check(): print("\ndiff_ways_to_equality_check()") l1 = l2 = [1, 2, 3] print("Our lists: " + str(l1) + ", " + str(l2) + ". They reference to the same object") print("l1 == l2 ? : ", l1 == l2) print("l1 is l2 ? : ", l1 is l2) l3 = [1, 2] l4 = [1, 2] print("Our lists: " + str(l3) + ", " + str(l4) + ". They reference to different objects") print("l3 == l4 ? : ", l3 == l4) print("l3 is l4 ? : ", l3 is l4) diff_ways_to_equality_check()
29.263158
94
0.532374
__author__ = 'Antony Cherepanov' def diff_ways_to_equality_check(): print("\ndiff_ways_to_equality_check()") l1 = l2 = [1, 2, 3] print("Our lists: " + str(l1) + ", " + str(l2) + ". They reference to the same object") print("l1 == l2 ? : ", l1 == l2) print("l1 is l2 ? : ", l1 is l2) l3 = [1, 2] l4 = [1, 2] print("Our lists: " + str(l3) + ", " + str(l4) + ". They reference to different objects") print("l3 == l4 ? : ", l3 == l4) print("l3 is l4 ? : ", l3 is l4) diff_ways_to_equality_check()
true
true
f71d05e03c289edc6d71ce6c053ebcd1ba27ca78
3,562
py
Python
cfgov/login/tests/test_auth_forms.py
Colin-Seifer/consumerfinance.gov
a1a943f7170b498707d642d6be97b9a97a2b52e3
[ "CC0-1.0" ]
156
2015-01-16T15:16:46.000Z
2020-08-04T04:48:01.000Z
cfgov/login/tests/test_auth_forms.py
Colin-Seifer/consumerfinance.gov
a1a943f7170b498707d642d6be97b9a97a2b52e3
[ "CC0-1.0" ]
3,604
2015-01-05T22:09:12.000Z
2020-08-14T17:09:19.000Z
cfgov/login/tests/test_auth_forms.py
Colin-Seifer/consumerfinance.gov
a1a943f7170b498707d642d6be97b9a97a2b52e3
[ "CC0-1.0" ]
102
2015-01-28T14:51:18.000Z
2020-08-10T00:00:39.000Z
from unittest.mock import patch from django.contrib.auth.models import User from django.test import TestCase from login.forms import CFGOVPasswordChangeForm, UserCreationForm, UserEditForm from login.tests.test_password_policy import TestWithUser @patch("login.forms.send_password_reset_email") class UserCreationFormTestCase(TestCase): def setUp(self): self.username = self.__class__.__name__ self.email = "george@example.com" self.userdata = { "email": self.email, "username": self.username, "first_name": "George", "last_name": "Washington", "password1": "cherrytree", "password2": "cherrytree", } def tearDown(self): User.objects.filter(username=self.username).delete() def test_save_sends_email(self, send_email): form = UserCreationForm(self.userdata) self.assertTrue(form.is_valid()) form.save(commit=True) send_email.assert_called_once_with(self.email) def test_save_without_commit_doesnt_send_email(self, send_email): form = UserCreationForm(self.userdata) self.assertTrue(form.is_valid()) form.save(commit=False) send_email.assert_not_called() def test_duplicate_email_fails_validation(self, send_email): User.objects.create(username="foo", email=self.email) form = UserCreationForm(self.userdata) self.assertFalse(form.is_valid()) self.assertTrue(form.errors["email"]) class UserEditFormTestCase(TestCase): def setUp(self): self.userdata = { "username": "george", "email": "george@washington.com", "first_name": "george", "last_name": "washington", } def test_no_edits_valid(self): user = User.objects.create(**self.userdata) form = UserEditForm(data=self.userdata, instance=user) self.assertTrue(form.is_valid()) def test_edit_first_name(self): user = User.objects.create(**self.userdata) userdata2 = dict(self.userdata) userdata2["first_name"] = "joe" form = UserEditForm(data=userdata2, instance=user) self.assertTrue(form.is_valid()) user = form.save() self.assertEqual(user.first_name, "joe") self.assertEqual(user.username, "george") def test_duplicate_email_fails_validation(self): User.objects.create(**self.userdata) userdata2 = dict(self.userdata) userdata2["username"] = "patrick" form = UserEditForm(data=userdata2) self.assertFalse(form.is_valid()) self.assertTrue(form.errors["email"]) def test_duplicate_emails_allowed_on_user_model(self): User.objects.create(**self.userdata) userdata2 = dict(self.userdata) userdata2["username"] = "patrick" try: User.objects.create(**userdata2) except Exception: self.fail( "users with duplicate emails are allowed, " "just not when creating or editing via for " ) class PasswordValidationMixinTestCase(TestWithUser): def test_edit_password(self): user = self.get_user(last_password="testing") form = CFGOVPasswordChangeForm( data={ "old_password": "testing", "new_password1": "Testing12345!", "new_password2": "Testing12345!", }, user=user, ) form.is_valid() self.assertTrue(form.is_valid())
32.09009
79
0.635036
from unittest.mock import patch from django.contrib.auth.models import User from django.test import TestCase from login.forms import CFGOVPasswordChangeForm, UserCreationForm, UserEditForm from login.tests.test_password_policy import TestWithUser @patch("login.forms.send_password_reset_email") class UserCreationFormTestCase(TestCase): def setUp(self): self.username = self.__class__.__name__ self.email = "george@example.com" self.userdata = { "email": self.email, "username": self.username, "first_name": "George", "last_name": "Washington", "password1": "cherrytree", "password2": "cherrytree", } def tearDown(self): User.objects.filter(username=self.username).delete() def test_save_sends_email(self, send_email): form = UserCreationForm(self.userdata) self.assertTrue(form.is_valid()) form.save(commit=True) send_email.assert_called_once_with(self.email) def test_save_without_commit_doesnt_send_email(self, send_email): form = UserCreationForm(self.userdata) self.assertTrue(form.is_valid()) form.save(commit=False) send_email.assert_not_called() def test_duplicate_email_fails_validation(self, send_email): User.objects.create(username="foo", email=self.email) form = UserCreationForm(self.userdata) self.assertFalse(form.is_valid()) self.assertTrue(form.errors["email"]) class UserEditFormTestCase(TestCase): def setUp(self): self.userdata = { "username": "george", "email": "george@washington.com", "first_name": "george", "last_name": "washington", } def test_no_edits_valid(self): user = User.objects.create(**self.userdata) form = UserEditForm(data=self.userdata, instance=user) self.assertTrue(form.is_valid()) def test_edit_first_name(self): user = User.objects.create(**self.userdata) userdata2 = dict(self.userdata) userdata2["first_name"] = "joe" form = UserEditForm(data=userdata2, instance=user) self.assertTrue(form.is_valid()) user = form.save() self.assertEqual(user.first_name, "joe") self.assertEqual(user.username, "george") def test_duplicate_email_fails_validation(self): User.objects.create(**self.userdata) userdata2 = dict(self.userdata) userdata2["username"] = "patrick" form = UserEditForm(data=userdata2) self.assertFalse(form.is_valid()) self.assertTrue(form.errors["email"]) def test_duplicate_emails_allowed_on_user_model(self): User.objects.create(**self.userdata) userdata2 = dict(self.userdata) userdata2["username"] = "patrick" try: User.objects.create(**userdata2) except Exception: self.fail( "users with duplicate emails are allowed, " "just not when creating or editing via for " ) class PasswordValidationMixinTestCase(TestWithUser): def test_edit_password(self): user = self.get_user(last_password="testing") form = CFGOVPasswordChangeForm( data={ "old_password": "testing", "new_password1": "Testing12345!", "new_password2": "Testing12345!", }, user=user, ) form.is_valid() self.assertTrue(form.is_valid())
true
true
f71d07441c3958aedc9d9d95b22f02745383912c
1,054
py
Python
spylon_kernel/__init__.py
Gr4vi7y/spylon-kernel
2d0ddf2aca1b91738f938b72a500c20293e3156c
[ "BSD-3-Clause" ]
111
2018-03-12T09:22:07.000Z
2021-04-06T07:33:52.000Z
spylon_kernel/__init__.py
mariusvniekerk/metakernel-scala-spark
2d0ddf2aca1b91738f938b72a500c20293e3156c
[ "BSD-3-Clause" ]
34
2017-01-30T19:12:26.000Z
2017-09-08T14:14:07.000Z
spylon_kernel/__init__.py
mariusvniekerk/metakernel-scala-spark
2d0ddf2aca1b91738f938b72a500c20293e3156c
[ "BSD-3-Clause" ]
16
2018-04-11T19:25:14.000Z
2020-10-20T10:56:27.000Z
from __future__ import absolute_import, print_function, division from .scala_kernel import SpylonKernel from .scala_magic import ScalaMagic from .init_spark_magic import InitSparkMagic from .scala_interpreter import get_scala_interpreter def register_ipython_magics(): """For usage within ipykernel. This will instantiate the magics for IPython """ from metakernel import IPythonKernel from IPython.core.magic import register_cell_magic, register_line_cell_magic kernel = IPythonKernel() scala_magic = ScalaMagic(kernel) init_spark_magic = InitSparkMagic(kernel) @register_line_cell_magic def scala(line, cell): if line: return scala_magic.line_scala(line) else: scala_magic.code = cell return scala_magic.cell_scala() @register_cell_magic def init_spark(line, cell): init_spark_magic.code = cell return init_spark_magic.cell_init_spark() from ._version import get_versions __version__ = get_versions()['version'] del get_versions
29.277778
80
0.743833
from __future__ import absolute_import, print_function, division from .scala_kernel import SpylonKernel from .scala_magic import ScalaMagic from .init_spark_magic import InitSparkMagic from .scala_interpreter import get_scala_interpreter def register_ipython_magics(): from metakernel import IPythonKernel from IPython.core.magic import register_cell_magic, register_line_cell_magic kernel = IPythonKernel() scala_magic = ScalaMagic(kernel) init_spark_magic = InitSparkMagic(kernel) @register_line_cell_magic def scala(line, cell): if line: return scala_magic.line_scala(line) else: scala_magic.code = cell return scala_magic.cell_scala() @register_cell_magic def init_spark(line, cell): init_spark_magic.code = cell return init_spark_magic.cell_init_spark() from ._version import get_versions __version__ = get_versions()['version'] del get_versions
true
true
f71d08c432f7544e984ac784fbbc3c5d98bf9349
755
py
Python
wemake_python_styleguide/presets/types/file_tokens.py
cdhiraj40/wemake-python-styleguide
7cef9be081d594c30045b7a98cae77a9be46e1aa
[ "MIT" ]
1,931
2018-03-17T13:52:45.000Z
2022-03-27T09:39:17.000Z
wemake_python_styleguide/presets/types/file_tokens.py
amansr02/wemake-python-styleguide
681035ed21fbe28ebfb32b8807b98e8de76b64aa
[ "MIT" ]
2,231
2018-03-09T21:19:05.000Z
2022-03-31T08:35:37.000Z
wemake_python_styleguide/presets/types/file_tokens.py
amansr02/wemake-python-styleguide
681035ed21fbe28ebfb32b8807b98e8de76b64aa
[ "MIT" ]
492
2018-05-18T21:20:28.000Z
2022-03-20T14:11:50.000Z
from typing_extensions import Final from wemake_python_styleguide.visitors.tokenize import ( comments, conditions, primitives, statements, syntax, ) #: Used to store all token related visitors to be later passed to checker: PRESET: Final = ( comments.WrongCommentVisitor, comments.ShebangVisitor, comments.NoqaVisitor, comments.EmptyCommentVisitor, syntax.WrongKeywordTokenVisitor, primitives.WrongNumberTokenVisitor, primitives.WrongStringTokenVisitor, primitives.WrongStringConcatenationVisitor, statements.ExtraIndentationVisitor, statements.BracketLocationVisitor, statements.MultilineStringVisitor, statements.InconsistentComprehensionVisitor, conditions.IfElseVisitor, )
24.354839
74
0.780132
from typing_extensions import Final from wemake_python_styleguide.visitors.tokenize import ( comments, conditions, primitives, statements, syntax, ) PRESET: Final = ( comments.WrongCommentVisitor, comments.ShebangVisitor, comments.NoqaVisitor, comments.EmptyCommentVisitor, syntax.WrongKeywordTokenVisitor, primitives.WrongNumberTokenVisitor, primitives.WrongStringTokenVisitor, primitives.WrongStringConcatenationVisitor, statements.ExtraIndentationVisitor, statements.BracketLocationVisitor, statements.MultilineStringVisitor, statements.InconsistentComprehensionVisitor, conditions.IfElseVisitor, )
true
true
f71d0905abbc7a17b2098e10bd4351b995725485
5,026
py
Python
server/src/army.py
matteli/histemul
61f1ea8e1263b92fd2bead0c808f67940faad802
[ "BSD-2-Clause" ]
1
2019-07-05T09:40:50.000Z
2019-07-05T09:40:50.000Z
server/src/army.py
matteli/histemul
61f1ea8e1263b92fd2bead0c808f67940faad802
[ "BSD-2-Clause" ]
null
null
null
server/src/army.py
matteli/histemul
61f1ea8e1263b92fd2bead0c808f67940faad802
[ "BSD-2-Clause" ]
null
null
null
''' Copyright (c) 2012-2015, Matthieu Nué All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' from mongoengine import Document, ReferenceField, IntField, ListField, BooleanField, StringField from battle import Battle #TODO: make origin the pk class Army(Document): for_the = ReferenceField('Person') battle = ReferenceField('Battle') attitude = StringField() location = ReferenceField('Province') origin = ReferenceField('Province') way = ListField(ReferenceField('Province')) next_province = ReferenceField('Province') knights = IntField() morale = IntField() time_walking = IntField() @classmethod def new(cls, province): army = cls.objects.create(for_the=province.domain_of.holder, attitude='normal', location=province, origin=province, knights = province.manpower, morale=100, time_walking=0) province.manpower = 0 province.save() return army def move(self, way): self.way = way self.save() return def dismiss(self): self.origin.manpower += self.knights self.origin.save() #army.knights = 0 #army.save() self.delete() return def stop(self): self.next_province = None self.time_walking = 0 self.way = [] #self.save() return def retreat(self): province = self.location.get_random_walkable_adjacent() if province: self.battle = None self.attitude = 'retreat' self.next_province = province self.way.append(province) self.time_walking = 0 #self.save() return True else: return False def update(self, date): if self.way and self.next_province != self.way[-1]: #change way since last update self.next_province = self.way[-1] self.time_walking = 0 if self.time_walking >= self.location.size: #enter a new province self.time_walking -= self.location.size province = self.next_province self.location = province self.way.pop() if self.way: self.next_province = self.way[-1] else: self.next_province = None self.attitude = 'normal' #when enter a new province, look if there is enemy or already a battle person = self.for_the battle = province.battle if not battle: war = None enemies = [] for army_in_province in province.armies: if not war: war = person.in_war_against(army_in_province.for_the)['war'] enemies.append(army_in_province) else: w = person.in_war_against(army_in_province.for_the)[0]['war'] if w == war: enemies.append(army_in_province) if enemies: #enemy so battle self.stop() Battle.new(war, province, [self], enemies) else: war = battle.war if person in war.aggressors: self.stop() battle.add_aggressor(self) if person in war.defenders: self.stop() battle.add_defender(self) if self.next_province: self.time_walking += 500 * self.location.land.walkable else: self.time_walking = 0 #morale if self.attitude == 'normal': if self.morale < 95: self.morale += 5 else: self.morale = 100 self.save()
35.146853
180
0.607839
from mongoengine import Document, ReferenceField, IntField, ListField, BooleanField, StringField from battle import Battle class Army(Document): for_the = ReferenceField('Person') battle = ReferenceField('Battle') attitude = StringField() location = ReferenceField('Province') origin = ReferenceField('Province') way = ListField(ReferenceField('Province')) next_province = ReferenceField('Province') knights = IntField() morale = IntField() time_walking = IntField() @classmethod def new(cls, province): army = cls.objects.create(for_the=province.domain_of.holder, attitude='normal', location=province, origin=province, knights = province.manpower, morale=100, time_walking=0) province.manpower = 0 province.save() return army def move(self, way): self.way = way self.save() return def dismiss(self): self.origin.manpower += self.knights self.origin.save() self.delete() return def stop(self): self.next_province = None self.time_walking = 0 self.way = [] return def retreat(self): province = self.location.get_random_walkable_adjacent() if province: self.battle = None self.attitude = 'retreat' self.next_province = province self.way.append(province) self.time_walking = 0 return True else: return False def update(self, date): if self.way and self.next_province != self.way[-1]: self.next_province = self.way[-1] self.time_walking = 0 if self.time_walking >= self.location.size: self.time_walking -= self.location.size province = self.next_province self.location = province self.way.pop() if self.way: self.next_province = self.way[-1] else: self.next_province = None self.attitude = 'normal' person = self.for_the battle = province.battle if not battle: war = None enemies = [] for army_in_province in province.armies: if not war: war = person.in_war_against(army_in_province.for_the)['war'] enemies.append(army_in_province) else: w = person.in_war_against(army_in_province.for_the)[0]['war'] if w == war: enemies.append(army_in_province) if enemies: self.stop() Battle.new(war, province, [self], enemies) else: war = battle.war if person in war.aggressors: self.stop() battle.add_aggressor(self) if person in war.defenders: self.stop() battle.add_defender(self) if self.next_province: self.time_walking += 500 * self.location.land.walkable else: self.time_walking = 0 if self.attitude == 'normal': if self.morale < 95: self.morale += 5 else: self.morale = 100 self.save()
true
true
f71d0918a14b24074948327cd78a0618dd6eff25
5,508
py
Python
setup.py
openedx/bok-choy
b2f82ebea4c24c84361170063d8cad0314405a4a
[ "Apache-2.0" ]
2
2022-01-22T22:22:53.000Z
2022-02-28T03:13:57.000Z
setup.py
openedx/bok-choy
b2f82ebea4c24c84361170063d8cad0314405a4a
[ "Apache-2.0" ]
16
2022-01-11T04:11:33.000Z
2022-03-29T12:30:45.000Z
setup.py
openedx/bok-choy
b2f82ebea4c24c84361170063d8cad0314405a4a
[ "Apache-2.0" ]
1
2022-03-16T14:43:57.000Z
2022-03-16T14:43:57.000Z
#!/usr/bin/env python import codecs import os import re import sys from setuptools import setup DESCRIPTION = 'UI-level acceptance test framework' def load_requirements(*requirements_paths): """ Load all requirements from the specified requirements files. Requirements will include any constraints from files specified with -c in the requirements files. Returns a list of requirement strings. """ # UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why. # minor update to allow brackets in library names requirements = {} constraint_files = set() # groups "my-package-name<=x.y.z,..." into ("my-package-name", "<=x.y.z,...") requirement_line_regex = re.compile(r"([a-zA-Z0-9-_.\[\]]+)([<>=][^#\s]+)?") def add_version_constraint_or_raise(current_line, current_requirements, add_if_not_present): regex_match = requirement_line_regex.match(current_line) if regex_match: package = regex_match.group(1) version_constraints = regex_match.group(2) existing_version_constraints = current_requirements.get(package, None) # it's fine to add constraints to an unconstrained package, but raise an error if there are already # constraints in place if existing_version_constraints and existing_version_constraints != version_constraints: raise BaseException(f'Multiple constraint definitions found for {package}:' f' "{existing_version_constraints}" and "{version_constraints}".' f'Combine constraints into one location with {package}' f'{existing_version_constraints},{version_constraints}.') if add_if_not_present or package in current_requirements: current_requirements[package] = version_constraints # process .in files and store the path to any constraint files that are pulled in for path in requirements_paths: with open(path) as reqs: for line in reqs: if is_requirement(line): add_version_constraint_or_raise(line, requirements, True) if line and line.startswith('-c') and not line.startswith('-c http'): constraint_files.add(os.path.dirname(path) + '/' + line.split('#')[0].replace('-c', '').strip()) # process constraint files and add any new constraints found to existing requirements for constraint_file in constraint_files: with open(constraint_file) as reader: for line in reader: if is_requirement(line): add_version_constraint_or_raise(line, requirements, False) # process back into list of pkg><=constraints strings constrained_requirements = [f'{pkg}{version or ""}' for (pkg, version) in sorted(requirements.items())] return constrained_requirements def is_requirement(line): """ Return True if the requirement line is a package requirement. Returns: bool: True if the line is not blank, a comment, a URL, or an included file """ # UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why return line and line.strip() and not line.startswith(('-r', '#', '-e', 'git+', '-c')) if sys.argv[-1] == 'tag': print("Tagging the version on github:") os.system("git tag -a v%s -m 'v%s'" % (VERSION, VERSION)) os.system("git push --tags") sys.exit() with codecs.open('README.rst', 'r', 'utf-8') as f: LONG_DESCRIPTION = f.read() def get_version(*file_paths): """ Extract the version string from the file at the given relative path fragments. """ filename = os.path.join(os.path.dirname(__file__), *file_paths) with open(filename, encoding='utf-8') as opened_file: version_file = opened_file.read() version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError('Unable to find version string.') VERSION = get_version("bok_choy", "__init__.py") setup( name='bok_choy', version=VERSION, author='edX', author_email='oscm@edx.org', url='http://github.com/edx/bok-choy', description=DESCRIPTION, long_description=LONG_DESCRIPTION, license='Apache 2.0', classifiers=['Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Quality Assurance'], packages=['bok_choy', 'bok_choy/a11y'], package_data={'bok_choy': ['vendor/google/*.*', 'vendor/axe-core/*.*']}, install_requires=load_requirements('requirements/base.in'), extras_require={ 'visual_diff': ['needle'] } )
40.5
119
0.634713
import codecs import os import re import sys from setuptools import setup DESCRIPTION = 'UI-level acceptance test framework' def load_requirements(*requirements_paths): requirements = {} constraint_files = set() requirement_line_regex = re.compile(r"([a-zA-Z0-9-_.\[\]]+)([<>=][^#\s]+)?") def add_version_constraint_or_raise(current_line, current_requirements, add_if_not_present): regex_match = requirement_line_regex.match(current_line) if regex_match: package = regex_match.group(1) version_constraints = regex_match.group(2) existing_version_constraints = current_requirements.get(package, None) # constraints in place if existing_version_constraints and existing_version_constraints != version_constraints: raise BaseException(f'Multiple constraint definitions found for {package}:' f' "{existing_version_constraints}" and "{version_constraints}".' f'Combine constraints into one location with {package}' f'{existing_version_constraints},{version_constraints}.') if add_if_not_present or package in current_requirements: current_requirements[package] = version_constraints # process .in files and store the path to any constraint files that are pulled in for path in requirements_paths: with open(path) as reqs: for line in reqs: if is_requirement(line): add_version_constraint_or_raise(line, requirements, True) if line and line.startswith('-c') and not line.startswith('-c http'): constraint_files.add(os.path.dirname(path) + '/' + line.split(' # process constraint files and add any new constraints found to existing requirements for constraint_file in constraint_files: with open(constraint_file) as reader: for line in reader: if is_requirement(line): add_version_constraint_or_raise(line, requirements, False) # process back into list of pkg><=constraints strings constrained_requirements = [f'{pkg}{version or ""}' for (pkg, version) in sorted(requirements.items())] return constrained_requirements def is_requirement(line): # UPDATED VIA SEMGREP - if you need to remove/modify this method remove this line and add a comment specifying why return line and line.strip() and not line.startswith(('-r', ' if sys.argv[-1] == 'tag': print("Tagging the version on github:") os.system("git tag -a v%s -m 'v%s'" % (VERSION, VERSION)) os.system("git push --tags") sys.exit() with codecs.open('README.rst', 'r', 'utf-8') as f: LONG_DESCRIPTION = f.read() def get_version(*file_paths): filename = os.path.join(os.path.dirname(__file__), *file_paths) with open(filename, encoding='utf-8') as opened_file: version_file = opened_file.read() version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M) if version_match: return version_match.group(1) raise RuntimeError('Unable to find version string.') VERSION = get_version("bok_choy", "__init__.py") setup( name='bok_choy', version=VERSION, author='edX', author_email='oscm@edx.org', url='http://github.com/edx/bok-choy', description=DESCRIPTION, long_description=LONG_DESCRIPTION, license='Apache 2.0', classifiers=['Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Software Development :: Testing', 'Topic :: Software Development :: Quality Assurance'], packages=['bok_choy', 'bok_choy/a11y'], package_data={'bok_choy': ['vendor/google/*.*', 'vendor/axe-core/*.*']}, install_requires=load_requirements('requirements/base.in'), extras_require={ 'visual_diff': ['needle'] } )
true
true
f71d0934a87f934d765eb310af08af588938c234
7,835
py
Python
pyschism/outputs/combine.py
SorooshMani-NOAA/pyschism
df803edb53184625b12399f38a8bd26a022abbc1
[ "Apache-2.0" ]
null
null
null
pyschism/outputs/combine.py
SorooshMani-NOAA/pyschism
df803edb53184625b12399f38a8bd26a022abbc1
[ "Apache-2.0" ]
null
null
null
pyschism/outputs/combine.py
SorooshMani-NOAA/pyschism
df803edb53184625b12399f38a8bd26a022abbc1
[ "Apache-2.0" ]
null
null
null
import os from time import time import pathlib from typing import Dict, Union import glob import numpy as np import xarray as xr from pyschism.mesh.base import Gr3 def combine(var, shape, l2g, name): values = np.full(tuple(shape), np.nan) local_ids = list(l2g.keys()) for i, data in enumerate(var): cpu_id = str(i).zfill(6) #print(cpu_id) n_local_to_global = l2g[cpu_id] #print(n_local_to_global[0]) local_ids = list(n_local_to_global.keys()) global_idxs = list( map(lambda x: int(n_local_to_global[x])-1, local_ids)) #print(global_idxs[0]) values[global_idxs] = data return values def combine_(dst, var, shape, l2g, name): #if len(dst[0][var].shape) < 4: out = [] for i in range(len(dst)): out.append(dst[i][var]) r = combine(out, shape, l2g, name) return (xr.DataArray(r, dims = list(dst[0][var].dims), name = var)) class CombineOutputs: def __init__(self, path: Union[str, os.PathLike]): self.path = pathlib.Path(path) if not self.path.exists(): raise ValueError(f'Directory {self.path} does not exist.') nodes = {} elements = {} for ifile in sorted(self.path.glob(r'local_to_global_[0-9][0-9][0-9][0-9][0-9][0-9]')): with open(ifile) as f: ns_global, ne_global, np_global, nvrt, nproc, ntracers, \ T, S, GEN, AGE, SED3D, EcoSim, ICM, CoSINE, Feco, \ TIMOR, FARM, DVD = f.readline().split() f.readline() # elements ne_local = int(f.readline()) e_local_to_global = {} for i in range(ne_local): local_element_id, global_element_id = f.readline().split() e_local_to_global[local_element_id] = global_element_id # points np_local = int(f.readline()) n_local_to_global = {} for i in range(np_local): local_node_id, global_node_id = f.readline().split() n_local_to_global[local_node_id] = global_node_id # sides ns_local = int(f.readline()) s_local_to_global = {} for i in range(ns_local): local_side_id, global_side_id = f.readline().split() s_local_to_global[local_side_id] = global_side_id f.readline() # Header: line = f.readline().split() #old schism print to multiple lines not just one line if len(line) != 5: line.extend(f.readline().split()) self.start_year, self.start_month, self.start_day, \ self.start_hour, self.utc_start = line nrec, dtout, nspool, nvrt, kz, h0, h_s, h_c, theta_b, \ theta_f, ics = f.readline().split() #In the old version of schism, ztot was written to nvrt lines for i in np.arange(int(nvrt)): f.readline() # (ztot(k),k=1,kz-1),(sigma(k),k=1,nvrt-kz+1) f.readline() # (ztot(k),k=1,kz-1),(sigma(k),k=1,nvrt-kz+1) #_ne_local = None #_np_local = None #while _ne_local != ne_local and _np_local != np_local: # line = f.readline().split() # _np_local = int(float(line[0]) # _ne_local = int(float(line[1]) for i in range(np_local): x, y, z, flag = map(float, f.readline().split()) nodes.setdefault( n_local_to_global[str(i+1)], ((x, y), -z)) for i in range(ne_local): eids = f.readline().split()[1:] elements.setdefault( e_local_to_global[str(i+1)], list(map(lambda x: n_local_to_global[x], eids))) nproc_id = ifile.name.split('local_to_global_')[-1] self.e_local_to_global.setdefault(nproc_id, e_local_to_global) self.n_local_to_global.setdefault(nproc_id, n_local_to_global) self.s_local_to_global.setdefault(nproc_id, s_local_to_global) #nodes = {str(i+1): nodes[str(i+1)] for i in range(len(nodes))} #elements = {str(i+1): elements[str(i+1)] for i in range(len(elements))} self.hgrid = Gr3(nodes=nodes, elements=elements, crs='epsg:4326') def hotstart(self, it=None): self.filenames = sorted( self.path.glob(r'hotstart_*_{}.nc'.format(it))) dst = [] for i in range(len(self.filenames)): dst.append(xr.open_dataset(self.filenames[i])) #create dataset side = [] node = [] elem = [] one = [] #variables = ['eta2', 'su2', 'sv2'] #variables = ['eta2', 'we', 'su2', 'tr_el', 'time', 'it', 'ifile', 'nsteps_from_cold'] #for var in variables: for var in dst[0].variables: t0 = time() shape = [] if 'nResident_elem' in dst[0][var].dims: shape.append(self.hgrid.elements.array.shape[0]) if len(dst[0][var].shape) > 1: for i in range(len(dst[0][var].shape)): if i == 0: continue else: shape.append(dst[0][var].shape[i]) r = combine_(dst, var, shape, self.e_local_to_global, 'nResident_elem') elem.append(r) elif 'nResident_node' in dst[0][var].dims: shape.append(self.hgrid.nodes.values.shape[0]) if len(dst[0][var].shape) > 1: for i in range(len(dst[0][var].shape)): if i == 0: continue else: shape.append(dst[0][var].shape[i]) r = combine_(dst, var, shape, self.n_local_to_global, 'nResident_node') node.append(r) elif 'nResident_side' in dst[0][var].dims: shape.append(self.hgrid.elements.sides.shape[0]) if len(dst[0][var].shape) > 1: for i in range(len(dst[0][var].shape)): if i == 0: continue else: shape.append(dst[0][var].shape[i]) r = combine_(dst, var, shape, self.s_local_to_global, 'nResident_side') side.append(r) else: one.append(dst[0][var]) print(f'It took {time()-t0} seconds to combine var {var} in file[{i}]') side = xr.merge(side).rename({'nResident_side': 'side'}) elem = xr.merge(elem).rename({'nResident_elem': 'elem'}) node = xr.merge(node).rename({'nResident_node': 'node'}) one = xr.merge(one).rename({'one': 'one_new', 'it': 'iths'}) xdat = xr.merge([side, elem, node, one]) #xdat = xr.merge([node, one]) hfile = 'hotstart_it={}.nc'.format(it) xdat.to_netcdf(f'./{hfile}') @property def n_local_to_global(self): if not hasattr(self, '_n_local_to_global'): self._n_local_to_global = {} return self._n_local_to_global @property def s_local_to_global(self): if not hasattr(self, '_s_local_to_global'): self._s_local_to_global = {} return self._s_local_to_global @property def e_local_to_global(self): if not hasattr(self, '_e_local_to_global'): self._e_local_to_global = {} return self._e_local_to_global
41.675532
95
0.515763
import os from time import time import pathlib from typing import Dict, Union import glob import numpy as np import xarray as xr from pyschism.mesh.base import Gr3 def combine(var, shape, l2g, name): values = np.full(tuple(shape), np.nan) local_ids = list(l2g.keys()) for i, data in enumerate(var): cpu_id = str(i).zfill(6) n_local_to_global = l2g[cpu_id] local_ids = list(n_local_to_global.keys()) global_idxs = list( map(lambda x: int(n_local_to_global[x])-1, local_ids)) values[global_idxs] = data return values def combine_(dst, var, shape, l2g, name): out = [] for i in range(len(dst)): out.append(dst[i][var]) r = combine(out, shape, l2g, name) return (xr.DataArray(r, dims = list(dst[0][var].dims), name = var)) class CombineOutputs: def __init__(self, path: Union[str, os.PathLike]): self.path = pathlib.Path(path) if not self.path.exists(): raise ValueError(f'Directory {self.path} does not exist.') nodes = {} elements = {} for ifile in sorted(self.path.glob(r'local_to_global_[0-9][0-9][0-9][0-9][0-9][0-9]')): with open(ifile) as f: ns_global, ne_global, np_global, nvrt, nproc, ntracers, \ T, S, GEN, AGE, SED3D, EcoSim, ICM, CoSINE, Feco, \ TIMOR, FARM, DVD = f.readline().split() f.readline() ne_local = int(f.readline()) e_local_to_global = {} for i in range(ne_local): local_element_id, global_element_id = f.readline().split() e_local_to_global[local_element_id] = global_element_id np_local = int(f.readline()) n_local_to_global = {} for i in range(np_local): local_node_id, global_node_id = f.readline().split() n_local_to_global[local_node_id] = global_node_id ns_local = int(f.readline()) s_local_to_global = {} for i in range(ns_local): local_side_id, global_side_id = f.readline().split() s_local_to_global[local_side_id] = global_side_id f.readline() line = f.readline().split() if len(line) != 5: line.extend(f.readline().split()) self.start_year, self.start_month, self.start_day, \ self.start_hour, self.utc_start = line nrec, dtout, nspool, nvrt, kz, h0, h_s, h_c, theta_b, \ theta_f, ics = f.readline().split() for i in np.arange(int(nvrt)): f.readline() f.readline() for i in range(np_local): x, y, z, flag = map(float, f.readline().split()) nodes.setdefault( n_local_to_global[str(i+1)], ((x, y), -z)) for i in range(ne_local): eids = f.readline().split()[1:] elements.setdefault( e_local_to_global[str(i+1)], list(map(lambda x: n_local_to_global[x], eids))) nproc_id = ifile.name.split('local_to_global_')[-1] self.e_local_to_global.setdefault(nproc_id, e_local_to_global) self.n_local_to_global.setdefault(nproc_id, n_local_to_global) self.s_local_to_global.setdefault(nproc_id, s_local_to_global) self.hgrid = Gr3(nodes=nodes, elements=elements, crs='epsg:4326') def hotstart(self, it=None): self.filenames = sorted( self.path.glob(r'hotstart_*_{}.nc'.format(it))) dst = [] for i in range(len(self.filenames)): dst.append(xr.open_dataset(self.filenames[i])) side = [] node = [] elem = [] one = [] for var in dst[0].variables: t0 = time() shape = [] if 'nResident_elem' in dst[0][var].dims: shape.append(self.hgrid.elements.array.shape[0]) if len(dst[0][var].shape) > 1: for i in range(len(dst[0][var].shape)): if i == 0: continue else: shape.append(dst[0][var].shape[i]) r = combine_(dst, var, shape, self.e_local_to_global, 'nResident_elem') elem.append(r) elif 'nResident_node' in dst[0][var].dims: shape.append(self.hgrid.nodes.values.shape[0]) if len(dst[0][var].shape) > 1: for i in range(len(dst[0][var].shape)): if i == 0: continue else: shape.append(dst[0][var].shape[i]) r = combine_(dst, var, shape, self.n_local_to_global, 'nResident_node') node.append(r) elif 'nResident_side' in dst[0][var].dims: shape.append(self.hgrid.elements.sides.shape[0]) if len(dst[0][var].shape) > 1: for i in range(len(dst[0][var].shape)): if i == 0: continue else: shape.append(dst[0][var].shape[i]) r = combine_(dst, var, shape, self.s_local_to_global, 'nResident_side') side.append(r) else: one.append(dst[0][var]) print(f'It took {time()-t0} seconds to combine var {var} in file[{i}]') side = xr.merge(side).rename({'nResident_side': 'side'}) elem = xr.merge(elem).rename({'nResident_elem': 'elem'}) node = xr.merge(node).rename({'nResident_node': 'node'}) one = xr.merge(one).rename({'one': 'one_new', 'it': 'iths'}) xdat = xr.merge([side, elem, node, one]) hfile = 'hotstart_it={}.nc'.format(it) xdat.to_netcdf(f'./{hfile}') @property def n_local_to_global(self): if not hasattr(self, '_n_local_to_global'): self._n_local_to_global = {} return self._n_local_to_global @property def s_local_to_global(self): if not hasattr(self, '_s_local_to_global'): self._s_local_to_global = {} return self._s_local_to_global @property def e_local_to_global(self): if not hasattr(self, '_e_local_to_global'): self._e_local_to_global = {} return self._e_local_to_global
true
true
f71d0974fd90c6e55313e89b3f21b9cf56c88f29
4,930
py
Python
lib/silfont/scripts/psftuneraliases.py
simoncozens/pysilfont
bb8a9fc58a83e074bbcc466ba058841845b9107e
[ "MIT" ]
41
2015-05-21T21:12:26.000Z
2022-02-17T17:23:14.000Z
lib/silfont/scripts/psftuneraliases.py
simoncozens/pysilfont
bb8a9fc58a83e074bbcc466ba058841845b9107e
[ "MIT" ]
63
2015-05-15T10:25:55.000Z
2021-02-23T04:51:17.000Z
lib/silfont/scripts/psftuneraliases.py
simoncozens/pysilfont
bb8a9fc58a83e074bbcc466ba058841845b9107e
[ "MIT" ]
12
2015-06-12T11:52:08.000Z
2020-09-23T10:40:59.000Z
#!/usr/bin/env python __doc__ = '''Merge lookup and feature aliases into TypeTuner feature file''' __url__ = 'http://github.com/silnrsi/pysilfont' __copyright__ = 'Copyright (c) 2019 SIL International (http://www.sil.org)' __license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)' __author__ = 'Bob Hallissy' from silfont.core import execute from xml.etree import ElementTree as ET from fontTools import ttLib import csv import struct argspec = [ ('input', {'help': 'Input TypeTuner feature file'}, {'type': 'infile'}), ('output', {'help': 'Output TypeTuner feature file'}, {}), ('-m','--mapping', {'help': 'Input csv mapping file'}, {'type': 'incsv'}), ('-f','--ttf', {'help': 'Compiled TTF file'}, {}), ('-l','--log',{'help': 'Optional log file'}, {'type': 'outfile', 'def': '_tuneraliases.log', 'optlog': True}), ] def doit(args) : logger = args.logger if args.mapping is None and args.ttf is None: logger.log("One or both of -m and -f must be provided", "S") featdoc = ET.parse(args.input) root = featdoc.getroot() if root.tag != 'all_features': logger.log("Invalid TypeTuner feature file: missing root element", "S") # Whitespace to add after each new alias: tail = '\n\t\t' # Find or add alliaes element aliases = root.find('aliases') if aliases is None: aliases = ET.SubElement(root,'aliases') aliases.tail = '\n' added = set() duplicates = set() def setalias(name, value): # detect duplicate names in input if name in added: duplicates.add(name) else: added.add(name) # modify existing or add new alias alias = aliases.find('alias[@name="{}"]'.format(name)) if alias is None: alias = ET.SubElement(aliases, 'alias', {'name': name, 'value': value}) alias.tail = tail else: alias.set('value', value) # Process mapping file if present: if args.mapping: # Mapping file is assumed to come from psfbuildfea, and should look like: # lookupname,table,index # e.g. DigitAlternates,GSUB,51 for (name,table,value) in args.mapping: setalias(name, value) # Process the ttf file if present if args.ttf: # Generate aliases for features. # In this code featureID means the key used in FontUtils for finding the feature, e.g., "calt _2" def dotable(t): # Common routine for GPOS and GSUB currtag = None currtagindex = None flist = [] # list, in order, of (featureTag, featureID), per Font::TTF for i in range(0,t.FeatureList.FeatureCount): newtag = str(t.FeatureList.FeatureRecord[i].FeatureTag) if currtag is None or currtag != newtag: flist.append((newtag, newtag)) currtag = newtag currtagindex = 0 else: flist.append( (currtag, '{} _{}'.format(currtag, currtagindex))) currtagindex += 1 fslList = {} # dictionary keyed by feature_script_lang values returning featureID for s in t.ScriptList.ScriptRecord: currtag = str(s.ScriptTag) # At present only looking at the dflt lang entries for findex in s.Script.DefaultLangSys.FeatureIndex: fslList['{}_{}_dflt'.format(flist[findex][0],currtag)] = flist[findex][1] # Now that we have them all, add them in sorted order. for name, value in sorted(fslList.items()): setalias(name,value) # Open the TTF for processing try: f = ttLib.TTFont(args.ttf) except Exception as e: logger.log("Couldn't open font '{}' for reading : {}".format(args.ttf, str(e)),"S") # Grab features from GSUB and GPOS for tag in ('GSUB', 'GPOS'): try: dotable(f[tag].table) except Exception as e: logger.log("Failed to process {} table: {}".format(tag, str(e)), "W") # Grab features from Graphite: try: for tag in sorted(f['Feat'].features.keys()): if tag == '1': continue name = 'gr_' + tag value = str(struct.unpack('>L', tag.encode())[0]) setalias(name,value) except Exception as e: logger.log("Failed to process Feat table: {}".format(str(e)), "W") if len(duplicates): logger.log("The following aliases defined more than once in input: {}".format(", ".join(sorted(duplicates))), "S") # Success. Write the result featdoc.write(args.output, encoding='UTF-8', xml_declaration=True) def cmd() : execute(None,doit,argspec) if __name__ == "__main__": cmd()
40.409836
122
0.577079
__doc__ = '''Merge lookup and feature aliases into TypeTuner feature file''' __url__ = 'http://github.com/silnrsi/pysilfont' __copyright__ = 'Copyright (c) 2019 SIL International (http://www.sil.org)' __license__ = 'Released under the MIT License (http://opensource.org/licenses/MIT)' __author__ = 'Bob Hallissy' from silfont.core import execute from xml.etree import ElementTree as ET from fontTools import ttLib import csv import struct argspec = [ ('input', {'help': 'Input TypeTuner feature file'}, {'type': 'infile'}), ('output', {'help': 'Output TypeTuner feature file'}, {}), ('-m','--mapping', {'help': 'Input csv mapping file'}, {'type': 'incsv'}), ('-f','--ttf', {'help': 'Compiled TTF file'}, {}), ('-l','--log',{'help': 'Optional log file'}, {'type': 'outfile', 'def': '_tuneraliases.log', 'optlog': True}), ] def doit(args) : logger = args.logger if args.mapping is None and args.ttf is None: logger.log("One or both of -m and -f must be provided", "S") featdoc = ET.parse(args.input) root = featdoc.getroot() if root.tag != 'all_features': logger.log("Invalid TypeTuner feature file: missing root element", "S") tail = '\n\t\t' aliases = root.find('aliases') if aliases is None: aliases = ET.SubElement(root,'aliases') aliases.tail = '\n' added = set() duplicates = set() def setalias(name, value): if name in added: duplicates.add(name) else: added.add(name) alias = aliases.find('alias[@name="{}"]'.format(name)) if alias is None: alias = ET.SubElement(aliases, 'alias', {'name': name, 'value': value}) alias.tail = tail else: alias.set('value', value) if args.mapping: for (name,table,value) in args.mapping: setalias(name, value) if args.ttf: def dotable(t): currtag = None currtagindex = None flist = [] for i in range(0,t.FeatureList.FeatureCount): newtag = str(t.FeatureList.FeatureRecord[i].FeatureTag) if currtag is None or currtag != newtag: flist.append((newtag, newtag)) currtag = newtag currtagindex = 0 else: flist.append( (currtag, '{} _{}'.format(currtag, currtagindex))) currtagindex += 1 fslList = {} for s in t.ScriptList.ScriptRecord: currtag = str(s.ScriptTag) for findex in s.Script.DefaultLangSys.FeatureIndex: fslList['{}_{}_dflt'.format(flist[findex][0],currtag)] = flist[findex][1] for name, value in sorted(fslList.items()): setalias(name,value) try: f = ttLib.TTFont(args.ttf) except Exception as e: logger.log("Couldn't open font '{}' for reading : {}".format(args.ttf, str(e)),"S") # Grab features from GSUB and GPOS for tag in ('GSUB', 'GPOS'): try: dotable(f[tag].table) except Exception as e: logger.log("Failed to process {} table: {}".format(tag, str(e)), "W") # Grab features from Graphite: try: for tag in sorted(f['Feat'].features.keys()): if tag == '1': continue name = 'gr_' + tag value = str(struct.unpack('>L', tag.encode())[0]) setalias(name,value) except Exception as e: logger.log("Failed to process Feat table: {}".format(str(e)), "W") if len(duplicates): logger.log("The following aliases defined more than once in input: {}".format(", ".join(sorted(duplicates))), "S") # Success. Write the result featdoc.write(args.output, encoding='UTF-8', xml_declaration=True) def cmd() : execute(None,doit,argspec) if __name__ == "__main__": cmd()
true
true
f71d0cdd41ebaf5f19271d970cbfc6c054ba49fb
4,068
py
Python
relbert_cl/train.py
asahi417/relbert
cb718e40fb452e88ccae1c271ccdea25013791b1
[ "MIT" ]
17
2021-09-10T14:49:41.000Z
2022-01-26T13:18:02.000Z
relbert_cl/train.py
asahi417/relbert
cb718e40fb452e88ccae1c271ccdea25013791b1
[ "MIT" ]
2
2021-11-14T07:47:36.000Z
2021-11-22T17:34:06.000Z
relbert_cl/train.py
asahi417/relbert
cb718e40fb452e88ccae1c271ccdea25013791b1
[ "MIT" ]
1
2021-12-14T01:35:05.000Z
2021-12-14T01:35:05.000Z
""" Train RelBERT model. """ import argparse import logging import relbert def config(parser): # optimization parser.add_argument('-s', '--softmax-loss', help='softmax loss', action='store_true') parser.add_argument('-n', '--in-batch-negative', help='in batch negative', action='store_true') parser.add_argument('-p', '--parent-contrast', help='hierarchical contrastive loss', action='store_true') parser.add_argument('-e', '--epoch', help='training epochs', default=1, type=int) parser.add_argument('--mse-margin', help='contrastive loss margin', default=1, type=int) parser.add_argument('-b', '--batch', help='batch size', default=64, type=int) parser.add_argument('--lr', help='learning rate', default=0.00002, type=float) parser.add_argument('--random-seed', help='random seed', default=0, type=int) parser.add_argument('--lr-decay', help='linear decay of learning rate after warmup', action='store_true') parser.add_argument("--lr-warmup", help="linear warmup of lr", default=10, type=int) parser.add_argument("--weight-decay", help="l2 penalty for weight decay", default=0, type=float) parser.add_argument('--optimizer', help='optimizer `adam`/`adamax`/`adam`', default='adam', type=str) parser.add_argument("--momentum", help="sgd momentum", default=0.9, type=float) # training environment parser.add_argument('--cache-dir', help='cache directory to store dataset', default=None, type=str) parser.add_argument('--num-workers', help='workers for dataloder', default=5, type=int) parser.add_argument('--fp16', help='fp16 for training', action='store_true') parser.add_argument('--epoch-save', help='interval to save model weight', default=5, type=int) parser.add_argument('--debug', help='log level', action='store_true') parser.add_argument('--export', help='directory to export model weight file', required=True, type=str) # language model parser.add_argument('-m', '--model', help='language model', default='roberta-large', type=str) parser.add_argument('-l', '--max-length', help='length', default=64, type=int) parser.add_argument('--mode', help='lm mode', default='average_no_mask', type=str) # data parser.add_argument('--data', help='dataset', default='semeval2012', type=str) parser.add_argument('--n-sample', help='sample size', default=10, type=int) parser.add_argument('-t', '--template-type', help='template type or path to generated prompt file', default='a', type=str) return parser def main(): argument_parser = argparse.ArgumentParser(description='Train RelBERT.') argument_parser = config(argument_parser) opt = argument_parser.parse_args() # logging level = logging.DEBUG if opt.debug else logging.INFO logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', level=level, datefmt='%Y-%m-%d %H:%M:%S') trainer = relbert.Trainer( model=opt.model, max_length=opt.max_length, mode=opt.mode, data=opt.data, n_sample=opt.n_sample, template_type=opt.template_type, softmax_loss=opt.softmax_loss, in_batch_negative=opt.in_batch_negative, parent_contrast=opt.parent_contrast, mse_margin=opt.mse_margin, epoch=opt.epoch, export=opt.export, batch=opt.batch, lr=opt.lr, lr_decay=opt.lr_decay, lr_warmup=opt.lr_warmup, weight_decay=opt.weight_decay, optimizer=opt.optimizer, momentum=opt.momentum, fp16=opt.fp16, random_seed=opt.random_seed, cache_dir=opt.cache_dir) # add file handler logger = logging.getLogger() file_handler = logging.FileHandler('{}/training.log'.format(trainer.checkpoint_dir)) file_handler.setLevel(level) file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(message)s')) logger.addHandler(file_handler) trainer.train(num_workers=opt.num_workers, epoch_save=opt.epoch_save) if __name__ == '__main__': main()
47.302326
115
0.684366
import argparse import logging import relbert def config(parser): parser.add_argument('-s', '--softmax-loss', help='softmax loss', action='store_true') parser.add_argument('-n', '--in-batch-negative', help='in batch negative', action='store_true') parser.add_argument('-p', '--parent-contrast', help='hierarchical contrastive loss', action='store_true') parser.add_argument('-e', '--epoch', help='training epochs', default=1, type=int) parser.add_argument('--mse-margin', help='contrastive loss margin', default=1, type=int) parser.add_argument('-b', '--batch', help='batch size', default=64, type=int) parser.add_argument('--lr', help='learning rate', default=0.00002, type=float) parser.add_argument('--random-seed', help='random seed', default=0, type=int) parser.add_argument('--lr-decay', help='linear decay of learning rate after warmup', action='store_true') parser.add_argument("--lr-warmup", help="linear warmup of lr", default=10, type=int) parser.add_argument("--weight-decay", help="l2 penalty for weight decay", default=0, type=float) parser.add_argument('--optimizer', help='optimizer `adam`/`adamax`/`adam`', default='adam', type=str) parser.add_argument("--momentum", help="sgd momentum", default=0.9, type=float) parser.add_argument('--cache-dir', help='cache directory to store dataset', default=None, type=str) parser.add_argument('--num-workers', help='workers for dataloder', default=5, type=int) parser.add_argument('--fp16', help='fp16 for training', action='store_true') parser.add_argument('--epoch-save', help='interval to save model weight', default=5, type=int) parser.add_argument('--debug', help='log level', action='store_true') parser.add_argument('--export', help='directory to export model weight file', required=True, type=str) parser.add_argument('-m', '--model', help='language model', default='roberta-large', type=str) parser.add_argument('-l', '--max-length', help='length', default=64, type=int) parser.add_argument('--mode', help='lm mode', default='average_no_mask', type=str) parser.add_argument('--data', help='dataset', default='semeval2012', type=str) parser.add_argument('--n-sample', help='sample size', default=10, type=int) parser.add_argument('-t', '--template-type', help='template type or path to generated prompt file', default='a', type=str) return parser def main(): argument_parser = argparse.ArgumentParser(description='Train RelBERT.') argument_parser = config(argument_parser) opt = argument_parser.parse_args() level = logging.DEBUG if opt.debug else logging.INFO logging.basicConfig(format='%(asctime)s %(levelname)-8s %(message)s', level=level, datefmt='%Y-%m-%d %H:%M:%S') trainer = relbert.Trainer( model=opt.model, max_length=opt.max_length, mode=opt.mode, data=opt.data, n_sample=opt.n_sample, template_type=opt.template_type, softmax_loss=opt.softmax_loss, in_batch_negative=opt.in_batch_negative, parent_contrast=opt.parent_contrast, mse_margin=opt.mse_margin, epoch=opt.epoch, export=opt.export, batch=opt.batch, lr=opt.lr, lr_decay=opt.lr_decay, lr_warmup=opt.lr_warmup, weight_decay=opt.weight_decay, optimizer=opt.optimizer, momentum=opt.momentum, fp16=opt.fp16, random_seed=opt.random_seed, cache_dir=opt.cache_dir) logger = logging.getLogger() file_handler = logging.FileHandler('{}/training.log'.format(trainer.checkpoint_dir)) file_handler.setLevel(level) file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s %(message)s')) logger.addHandler(file_handler) trainer.train(num_workers=opt.num_workers, epoch_save=opt.epoch_save) if __name__ == '__main__': main()
true
true
f71d0fb7e6da71f240857ee7196759e47de60c68
11,547
py
Python
src/virtual-wan/azext_vwan/vendored_sdks/v2021_08_01/v2021_08_01/operations/__init__.py
Caoxuyang/azure-cli-extensions
d2011261f29033cb31a1064256727d87049ab423
[ "MIT" ]
1
2022-02-01T18:50:12.000Z
2022-02-01T18:50:12.000Z
src/virtual-wan/azext_vwan/vendored_sdks/v2021_08_01/v2021_08_01/operations/__init__.py
Caoxuyang/azure-cli-extensions
d2011261f29033cb31a1064256727d87049ab423
[ "MIT" ]
9
2022-03-25T19:35:49.000Z
2022-03-31T06:09:47.000Z
src/virtual-wan/azext_vwan/vendored_sdks/v2021_08_01/v2021_08_01/operations/__init__.py
Caoxuyang/azure-cli-extensions
d2011261f29033cb31a1064256727d87049ab423
[ "MIT" ]
1
2022-02-14T21:43:29.000Z
2022-02-14T21:43:29.000Z
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from ._operations import ApplicationGatewaysOperations from ._operations import ApplicationGatewayPrivateLinkResourcesOperations from ._operations import ApplicationGatewayPrivateEndpointConnectionsOperations from ._operations import ApplicationSecurityGroupsOperations from ._operations import AvailableDelegationsOperations from ._operations import AvailableResourceGroupDelegationsOperations from ._operations import AvailableServiceAliasesOperations from ._operations import AzureFirewallsOperations from ._operations import AzureFirewallFqdnTagsOperations from ._operations import WebCategoriesOperations from ._operations import BastionHostsOperations from ._operations import NetworkManagementClientOperationsMixin from ._operations import NetworkInterfacesOperations from ._operations import PublicIPAddressesOperations from ._operations import CustomIPPrefixesOperations from ._operations import DdosCustomPoliciesOperations from ._operations import DdosProtectionPlansOperations from ._operations import DscpConfigurationOperations from ._operations import AvailableEndpointServicesOperations from ._operations import ExpressRouteCircuitAuthorizationsOperations from ._operations import ExpressRouteCircuitPeeringsOperations from ._operations import ExpressRouteCircuitConnectionsOperations from ._operations import PeerExpressRouteCircuitConnectionsOperations from ._operations import ExpressRouteCircuitsOperations from ._operations import ExpressRouteServiceProvidersOperations from ._operations import ExpressRouteCrossConnectionsOperations from ._operations import ExpressRouteCrossConnectionPeeringsOperations from ._operations import ExpressRoutePortsLocationsOperations from ._operations import ExpressRoutePortsOperations from ._operations import ExpressRouteLinksOperations from ._operations import ExpressRoutePortAuthorizationsOperations from ._operations import FirewallPoliciesOperations from ._operations import FirewallPolicyRuleCollectionGroupsOperations from ._operations import FirewallPolicyIdpsSignaturesOperations from ._operations import FirewallPolicyIdpsSignaturesOverridesOperations from ._operations import FirewallPolicyIdpsSignaturesFilterValuesOperations from ._operations import IpAllocationsOperations from ._operations import IpGroupsOperations from ._operations import LoadBalancersOperations from ._operations import LoadBalancerBackendAddressPoolsOperations from ._operations import LoadBalancerFrontendIPConfigurationsOperations from ._operations import InboundNatRulesOperations from ._operations import LoadBalancerLoadBalancingRulesOperations from ._operations import LoadBalancerOutboundRulesOperations from ._operations import LoadBalancerNetworkInterfacesOperations from ._operations import LoadBalancerProbesOperations from ._operations import NatGatewaysOperations from ._operations import NetworkInterfaceIPConfigurationsOperations from ._operations import NetworkInterfaceLoadBalancersOperations from ._operations import NetworkInterfaceTapConfigurationsOperations from ._operations import NetworkProfilesOperations from ._operations import NetworkSecurityGroupsOperations from ._operations import SecurityRulesOperations from ._operations import DefaultSecurityRulesOperations from ._operations import NetworkVirtualAppliancesOperations from ._operations import VirtualApplianceSitesOperations from ._operations import VirtualApplianceSkusOperations from ._operations import InboundSecurityRuleOperations from ._operations import NetworkWatchersOperations from ._operations import PacketCapturesOperations from ._operations import ConnectionMonitorsOperations from ._operations import FlowLogsOperations from ._operations import Operations from ._operations import PrivateEndpointsOperations from ._operations import AvailablePrivateEndpointTypesOperations from ._operations import PrivateDnsZoneGroupsOperations from ._operations import PrivateLinkServicesOperations from ._operations import PublicIPPrefixesOperations from ._operations import RouteFiltersOperations from ._operations import RouteFilterRulesOperations from ._operations import RouteTablesOperations from ._operations import RoutesOperations from ._operations import SecurityPartnerProvidersOperations from ._operations import BgpServiceCommunitiesOperations from ._operations import ServiceEndpointPoliciesOperations from ._operations import ServiceEndpointPolicyDefinitionsOperations from ._operations import ServiceTagsOperations from ._operations import ServiceTagInformationOperations from ._operations import UsagesOperations from ._operations import VirtualNetworksOperations from ._operations import SubnetsOperations from ._operations import ResourceNavigationLinksOperations from ._operations import ServiceAssociationLinksOperations from ._operations import VirtualNetworkPeeringsOperations from ._operations import VirtualNetworkGatewaysOperations from ._operations import VirtualNetworkGatewayConnectionsOperations from ._operations import LocalNetworkGatewaysOperations from ._operations import VirtualNetworkGatewayNatRulesOperations from ._operations import VirtualNetworkTapsOperations from ._operations import VirtualRoutersOperations from ._operations import VirtualRouterPeeringsOperations from ._operations import VirtualWansOperations from ._operations import VpnSitesOperations from ._operations import VpnSiteLinksOperations from ._operations import VpnSitesConfigurationOperations from ._operations import VpnServerConfigurationsOperations from ._operations import ConfigurationPolicyGroupsOperations from ._operations import VirtualHubsOperations from ._operations import HubVirtualNetworkConnectionsOperations from ._operations import VpnGatewaysOperations from ._operations import VpnLinkConnectionsOperations from ._operations import VpnConnectionsOperations from ._operations import VpnSiteLinkConnectionsOperations from ._operations import NatRulesOperations from ._operations import P2SVpnGatewaysOperations from ._operations import VpnServerConfigurationsAssociatedWithVirtualWanOperations from ._operations import VirtualHubRouteTableV2SOperations from ._operations import ExpressRouteGatewaysOperations from ._operations import ExpressRouteConnectionsOperations from ._operations import VirtualHubBgpConnectionOperations from ._operations import VirtualHubBgpConnectionsOperations from ._operations import VirtualHubIpConfigurationOperations from ._operations import HubRouteTablesOperations from ._operations import RoutingIntentOperations from ._operations import WebApplicationFirewallPoliciesOperations __all__ = [ 'ApplicationGatewaysOperations', 'ApplicationGatewayPrivateLinkResourcesOperations', 'ApplicationGatewayPrivateEndpointConnectionsOperations', 'ApplicationSecurityGroupsOperations', 'AvailableDelegationsOperations', 'AvailableResourceGroupDelegationsOperations', 'AvailableServiceAliasesOperations', 'AzureFirewallsOperations', 'AzureFirewallFqdnTagsOperations', 'WebCategoriesOperations', 'BastionHostsOperations', 'NetworkManagementClientOperationsMixin', 'NetworkInterfacesOperations', 'PublicIPAddressesOperations', 'CustomIPPrefixesOperations', 'DdosCustomPoliciesOperations', 'DdosProtectionPlansOperations', 'DscpConfigurationOperations', 'AvailableEndpointServicesOperations', 'ExpressRouteCircuitAuthorizationsOperations', 'ExpressRouteCircuitPeeringsOperations', 'ExpressRouteCircuitConnectionsOperations', 'PeerExpressRouteCircuitConnectionsOperations', 'ExpressRouteCircuitsOperations', 'ExpressRouteServiceProvidersOperations', 'ExpressRouteCrossConnectionsOperations', 'ExpressRouteCrossConnectionPeeringsOperations', 'ExpressRoutePortsLocationsOperations', 'ExpressRoutePortsOperations', 'ExpressRouteLinksOperations', 'ExpressRoutePortAuthorizationsOperations', 'FirewallPoliciesOperations', 'FirewallPolicyRuleCollectionGroupsOperations', 'FirewallPolicyIdpsSignaturesOperations', 'FirewallPolicyIdpsSignaturesOverridesOperations', 'FirewallPolicyIdpsSignaturesFilterValuesOperations', 'IpAllocationsOperations', 'IpGroupsOperations', 'LoadBalancersOperations', 'LoadBalancerBackendAddressPoolsOperations', 'LoadBalancerFrontendIPConfigurationsOperations', 'InboundNatRulesOperations', 'LoadBalancerLoadBalancingRulesOperations', 'LoadBalancerOutboundRulesOperations', 'LoadBalancerNetworkInterfacesOperations', 'LoadBalancerProbesOperations', 'NatGatewaysOperations', 'NetworkInterfaceIPConfigurationsOperations', 'NetworkInterfaceLoadBalancersOperations', 'NetworkInterfaceTapConfigurationsOperations', 'NetworkProfilesOperations', 'NetworkSecurityGroupsOperations', 'SecurityRulesOperations', 'DefaultSecurityRulesOperations', 'NetworkVirtualAppliancesOperations', 'VirtualApplianceSitesOperations', 'VirtualApplianceSkusOperations', 'InboundSecurityRuleOperations', 'NetworkWatchersOperations', 'PacketCapturesOperations', 'ConnectionMonitorsOperations', 'FlowLogsOperations', 'Operations', 'PrivateEndpointsOperations', 'AvailablePrivateEndpointTypesOperations', 'PrivateDnsZoneGroupsOperations', 'PrivateLinkServicesOperations', 'PublicIPPrefixesOperations', 'RouteFiltersOperations', 'RouteFilterRulesOperations', 'RouteTablesOperations', 'RoutesOperations', 'SecurityPartnerProvidersOperations', 'BgpServiceCommunitiesOperations', 'ServiceEndpointPoliciesOperations', 'ServiceEndpointPolicyDefinitionsOperations', 'ServiceTagsOperations', 'ServiceTagInformationOperations', 'UsagesOperations', 'VirtualNetworksOperations', 'SubnetsOperations', 'ResourceNavigationLinksOperations', 'ServiceAssociationLinksOperations', 'VirtualNetworkPeeringsOperations', 'VirtualNetworkGatewaysOperations', 'VirtualNetworkGatewayConnectionsOperations', 'LocalNetworkGatewaysOperations', 'VirtualNetworkGatewayNatRulesOperations', 'VirtualNetworkTapsOperations', 'VirtualRoutersOperations', 'VirtualRouterPeeringsOperations', 'VirtualWansOperations', 'VpnSitesOperations', 'VpnSiteLinksOperations', 'VpnSitesConfigurationOperations', 'VpnServerConfigurationsOperations', 'ConfigurationPolicyGroupsOperations', 'VirtualHubsOperations', 'HubVirtualNetworkConnectionsOperations', 'VpnGatewaysOperations', 'VpnLinkConnectionsOperations', 'VpnConnectionsOperations', 'VpnSiteLinkConnectionsOperations', 'NatRulesOperations', 'P2SVpnGatewaysOperations', 'VpnServerConfigurationsAssociatedWithVirtualWanOperations', 'VirtualHubRouteTableV2SOperations', 'ExpressRouteGatewaysOperations', 'ExpressRouteConnectionsOperations', 'VirtualHubBgpConnectionOperations', 'VirtualHubBgpConnectionsOperations', 'VirtualHubIpConfigurationOperations', 'HubRouteTablesOperations', 'RoutingIntentOperations', 'WebApplicationFirewallPoliciesOperations', ]
47.714876
94
0.850611
from ._operations import ApplicationGatewaysOperations from ._operations import ApplicationGatewayPrivateLinkResourcesOperations from ._operations import ApplicationGatewayPrivateEndpointConnectionsOperations from ._operations import ApplicationSecurityGroupsOperations from ._operations import AvailableDelegationsOperations from ._operations import AvailableResourceGroupDelegationsOperations from ._operations import AvailableServiceAliasesOperations from ._operations import AzureFirewallsOperations from ._operations import AzureFirewallFqdnTagsOperations from ._operations import WebCategoriesOperations from ._operations import BastionHostsOperations from ._operations import NetworkManagementClientOperationsMixin from ._operations import NetworkInterfacesOperations from ._operations import PublicIPAddressesOperations from ._operations import CustomIPPrefixesOperations from ._operations import DdosCustomPoliciesOperations from ._operations import DdosProtectionPlansOperations from ._operations import DscpConfigurationOperations from ._operations import AvailableEndpointServicesOperations from ._operations import ExpressRouteCircuitAuthorizationsOperations from ._operations import ExpressRouteCircuitPeeringsOperations from ._operations import ExpressRouteCircuitConnectionsOperations from ._operations import PeerExpressRouteCircuitConnectionsOperations from ._operations import ExpressRouteCircuitsOperations from ._operations import ExpressRouteServiceProvidersOperations from ._operations import ExpressRouteCrossConnectionsOperations from ._operations import ExpressRouteCrossConnectionPeeringsOperations from ._operations import ExpressRoutePortsLocationsOperations from ._operations import ExpressRoutePortsOperations from ._operations import ExpressRouteLinksOperations from ._operations import ExpressRoutePortAuthorizationsOperations from ._operations import FirewallPoliciesOperations from ._operations import FirewallPolicyRuleCollectionGroupsOperations from ._operations import FirewallPolicyIdpsSignaturesOperations from ._operations import FirewallPolicyIdpsSignaturesOverridesOperations from ._operations import FirewallPolicyIdpsSignaturesFilterValuesOperations from ._operations import IpAllocationsOperations from ._operations import IpGroupsOperations from ._operations import LoadBalancersOperations from ._operations import LoadBalancerBackendAddressPoolsOperations from ._operations import LoadBalancerFrontendIPConfigurationsOperations from ._operations import InboundNatRulesOperations from ._operations import LoadBalancerLoadBalancingRulesOperations from ._operations import LoadBalancerOutboundRulesOperations from ._operations import LoadBalancerNetworkInterfacesOperations from ._operations import LoadBalancerProbesOperations from ._operations import NatGatewaysOperations from ._operations import NetworkInterfaceIPConfigurationsOperations from ._operations import NetworkInterfaceLoadBalancersOperations from ._operations import NetworkInterfaceTapConfigurationsOperations from ._operations import NetworkProfilesOperations from ._operations import NetworkSecurityGroupsOperations from ._operations import SecurityRulesOperations from ._operations import DefaultSecurityRulesOperations from ._operations import NetworkVirtualAppliancesOperations from ._operations import VirtualApplianceSitesOperations from ._operations import VirtualApplianceSkusOperations from ._operations import InboundSecurityRuleOperations from ._operations import NetworkWatchersOperations from ._operations import PacketCapturesOperations from ._operations import ConnectionMonitorsOperations from ._operations import FlowLogsOperations from ._operations import Operations from ._operations import PrivateEndpointsOperations from ._operations import AvailablePrivateEndpointTypesOperations from ._operations import PrivateDnsZoneGroupsOperations from ._operations import PrivateLinkServicesOperations from ._operations import PublicIPPrefixesOperations from ._operations import RouteFiltersOperations from ._operations import RouteFilterRulesOperations from ._operations import RouteTablesOperations from ._operations import RoutesOperations from ._operations import SecurityPartnerProvidersOperations from ._operations import BgpServiceCommunitiesOperations from ._operations import ServiceEndpointPoliciesOperations from ._operations import ServiceEndpointPolicyDefinitionsOperations from ._operations import ServiceTagsOperations from ._operations import ServiceTagInformationOperations from ._operations import UsagesOperations from ._operations import VirtualNetworksOperations from ._operations import SubnetsOperations from ._operations import ResourceNavigationLinksOperations from ._operations import ServiceAssociationLinksOperations from ._operations import VirtualNetworkPeeringsOperations from ._operations import VirtualNetworkGatewaysOperations from ._operations import VirtualNetworkGatewayConnectionsOperations from ._operations import LocalNetworkGatewaysOperations from ._operations import VirtualNetworkGatewayNatRulesOperations from ._operations import VirtualNetworkTapsOperations from ._operations import VirtualRoutersOperations from ._operations import VirtualRouterPeeringsOperations from ._operations import VirtualWansOperations from ._operations import VpnSitesOperations from ._operations import VpnSiteLinksOperations from ._operations import VpnSitesConfigurationOperations from ._operations import VpnServerConfigurationsOperations from ._operations import ConfigurationPolicyGroupsOperations from ._operations import VirtualHubsOperations from ._operations import HubVirtualNetworkConnectionsOperations from ._operations import VpnGatewaysOperations from ._operations import VpnLinkConnectionsOperations from ._operations import VpnConnectionsOperations from ._operations import VpnSiteLinkConnectionsOperations from ._operations import NatRulesOperations from ._operations import P2SVpnGatewaysOperations from ._operations import VpnServerConfigurationsAssociatedWithVirtualWanOperations from ._operations import VirtualHubRouteTableV2SOperations from ._operations import ExpressRouteGatewaysOperations from ._operations import ExpressRouteConnectionsOperations from ._operations import VirtualHubBgpConnectionOperations from ._operations import VirtualHubBgpConnectionsOperations from ._operations import VirtualHubIpConfigurationOperations from ._operations import HubRouteTablesOperations from ._operations import RoutingIntentOperations from ._operations import WebApplicationFirewallPoliciesOperations __all__ = [ 'ApplicationGatewaysOperations', 'ApplicationGatewayPrivateLinkResourcesOperations', 'ApplicationGatewayPrivateEndpointConnectionsOperations', 'ApplicationSecurityGroupsOperations', 'AvailableDelegationsOperations', 'AvailableResourceGroupDelegationsOperations', 'AvailableServiceAliasesOperations', 'AzureFirewallsOperations', 'AzureFirewallFqdnTagsOperations', 'WebCategoriesOperations', 'BastionHostsOperations', 'NetworkManagementClientOperationsMixin', 'NetworkInterfacesOperations', 'PublicIPAddressesOperations', 'CustomIPPrefixesOperations', 'DdosCustomPoliciesOperations', 'DdosProtectionPlansOperations', 'DscpConfigurationOperations', 'AvailableEndpointServicesOperations', 'ExpressRouteCircuitAuthorizationsOperations', 'ExpressRouteCircuitPeeringsOperations', 'ExpressRouteCircuitConnectionsOperations', 'PeerExpressRouteCircuitConnectionsOperations', 'ExpressRouteCircuitsOperations', 'ExpressRouteServiceProvidersOperations', 'ExpressRouteCrossConnectionsOperations', 'ExpressRouteCrossConnectionPeeringsOperations', 'ExpressRoutePortsLocationsOperations', 'ExpressRoutePortsOperations', 'ExpressRouteLinksOperations', 'ExpressRoutePortAuthorizationsOperations', 'FirewallPoliciesOperations', 'FirewallPolicyRuleCollectionGroupsOperations', 'FirewallPolicyIdpsSignaturesOperations', 'FirewallPolicyIdpsSignaturesOverridesOperations', 'FirewallPolicyIdpsSignaturesFilterValuesOperations', 'IpAllocationsOperations', 'IpGroupsOperations', 'LoadBalancersOperations', 'LoadBalancerBackendAddressPoolsOperations', 'LoadBalancerFrontendIPConfigurationsOperations', 'InboundNatRulesOperations', 'LoadBalancerLoadBalancingRulesOperations', 'LoadBalancerOutboundRulesOperations', 'LoadBalancerNetworkInterfacesOperations', 'LoadBalancerProbesOperations', 'NatGatewaysOperations', 'NetworkInterfaceIPConfigurationsOperations', 'NetworkInterfaceLoadBalancersOperations', 'NetworkInterfaceTapConfigurationsOperations', 'NetworkProfilesOperations', 'NetworkSecurityGroupsOperations', 'SecurityRulesOperations', 'DefaultSecurityRulesOperations', 'NetworkVirtualAppliancesOperations', 'VirtualApplianceSitesOperations', 'VirtualApplianceSkusOperations', 'InboundSecurityRuleOperations', 'NetworkWatchersOperations', 'PacketCapturesOperations', 'ConnectionMonitorsOperations', 'FlowLogsOperations', 'Operations', 'PrivateEndpointsOperations', 'AvailablePrivateEndpointTypesOperations', 'PrivateDnsZoneGroupsOperations', 'PrivateLinkServicesOperations', 'PublicIPPrefixesOperations', 'RouteFiltersOperations', 'RouteFilterRulesOperations', 'RouteTablesOperations', 'RoutesOperations', 'SecurityPartnerProvidersOperations', 'BgpServiceCommunitiesOperations', 'ServiceEndpointPoliciesOperations', 'ServiceEndpointPolicyDefinitionsOperations', 'ServiceTagsOperations', 'ServiceTagInformationOperations', 'UsagesOperations', 'VirtualNetworksOperations', 'SubnetsOperations', 'ResourceNavigationLinksOperations', 'ServiceAssociationLinksOperations', 'VirtualNetworkPeeringsOperations', 'VirtualNetworkGatewaysOperations', 'VirtualNetworkGatewayConnectionsOperations', 'LocalNetworkGatewaysOperations', 'VirtualNetworkGatewayNatRulesOperations', 'VirtualNetworkTapsOperations', 'VirtualRoutersOperations', 'VirtualRouterPeeringsOperations', 'VirtualWansOperations', 'VpnSitesOperations', 'VpnSiteLinksOperations', 'VpnSitesConfigurationOperations', 'VpnServerConfigurationsOperations', 'ConfigurationPolicyGroupsOperations', 'VirtualHubsOperations', 'HubVirtualNetworkConnectionsOperations', 'VpnGatewaysOperations', 'VpnLinkConnectionsOperations', 'VpnConnectionsOperations', 'VpnSiteLinkConnectionsOperations', 'NatRulesOperations', 'P2SVpnGatewaysOperations', 'VpnServerConfigurationsAssociatedWithVirtualWanOperations', 'VirtualHubRouteTableV2SOperations', 'ExpressRouteGatewaysOperations', 'ExpressRouteConnectionsOperations', 'VirtualHubBgpConnectionOperations', 'VirtualHubBgpConnectionsOperations', 'VirtualHubIpConfigurationOperations', 'HubRouteTablesOperations', 'RoutingIntentOperations', 'WebApplicationFirewallPoliciesOperations', ]
true
true
f71d0fd842f2064dbce2597cdedbee45810477a0
192
py
Python
ticket_universe/charsets.py
lotify/ticket_universe
1947cc9d6a555a68af5b39d252cac3ecef06400c
[ "MIT" ]
4
2019-01-24T12:38:37.000Z
2019-03-26T12:36:18.000Z
ticket_universe/charsets.py
lotify/ticket_universe
1947cc9d6a555a68af5b39d252cac3ecef06400c
[ "MIT" ]
null
null
null
ticket_universe/charsets.py
lotify/ticket_universe
1947cc9d6a555a68af5b39d252cac3ecef06400c
[ "MIT" ]
null
null
null
def latin() -> [str]: """[A-Z]""" return list("ABCDEFGHIJKLMNOPQRSTUVWXYZ") def safe_latin() -> [str]: """[A-Z] excluding (O, I, L)""" return list("ABCDEFGHJKMNPQRSTUVWXYZ")
21.333333
45
0.583333
def latin() -> [str]: return list("ABCDEFGHIJKLMNOPQRSTUVWXYZ") def safe_latin() -> [str]: return list("ABCDEFGHJKMNPQRSTUVWXYZ")
true
true
f71d10310130fbb663c3c6a549057fdf4e5e6935
56
py
Python
app/config_combos/schema.py
rafiq10/rrhh_tdd_backend
fecbe4f3dd249b31f71e4b63904c565e207e45f9
[ "bzip2-1.0.6" ]
null
null
null
app/config_combos/schema.py
rafiq10/rrhh_tdd_backend
fecbe4f3dd249b31f71e4b63904c565e207e45f9
[ "bzip2-1.0.6" ]
2
2021-03-25T22:50:12.000Z
2021-04-30T20:53:22.000Z
app/config_combos/schema.py
rafiq10/rrhh_tdd_backend
fecbe4f3dd249b31f71e4b63904c565e207e45f9
[ "bzip2-1.0.6" ]
null
null
null
from .combos.combos import CombosModel, get_combo_tables
56
56
0.875
from .combos.combos import CombosModel, get_combo_tables
true
true
f71d123c09ba6000e80271689145557882b00944
12,061
py
Python
content/generate_folder_directory.py
lingcog/2019-CS109A
f1eaa62976fe989c3ad3f3ab4b8dd5d71574a2c3
[ "MIT" ]
442
2019-06-11T06:47:00.000Z
2022-03-12T11:19:31.000Z
content/generate_folder_directory.py
lelandroberts97/2019-CS109A
976da6b65c26fd3c5db285cbf9ec9cde92751a70
[ "MIT" ]
3
2019-09-23T17:32:51.000Z
2022-02-09T06:06:00.000Z
content/generate_folder_directory.py
lelandroberts97/2019-CS109A
976da6b65c26fd3c5db285cbf9ec9cde92751a70
[ "MIT" ]
486
2019-06-17T05:01:07.000Z
2022-03-13T20:30:44.000Z
import os import sys from datetime import datetime class GenerateStructure: def __init__(self, number_of_lectures, number_of_labs, number_of_homework, number_of_sections, number_of_advanced_sections, folders, default_directory, default_directory_lectures, default_directory_lecture_playground, default_directory_labs, default_directory_labs_playground, default_directory_homework, default_directory_homework_playground, default_directory_advanced_sections, default_directory_advanced_sections_playground, default_directory_sections_playground): self.number_of_lectures = number_of_lectures self.number_of_labs = number_of_labs self.number_of_homework = number_of_homework self.number_of_sections = number_of_sections self.number_of_advanced_sections = number_of_advanced_sections self.folders = folders self.default_directory = default_directory self.default_directory_lectures = default_directory_lectures self.default_directory_lecture_playground = default_directory_lecture_playground self.default_directory_labs = default_directory_labs self.default_directory_labs_playground = default_directory_labs_playground self.default_directory_homework = default_directory_homework self.default_directory_homework_playground = default_directory_homework_playground self.default_directory_advanced_sections = default_directory_advanced_sections self.default_directory_sections = default_directory_sections self.default_directory_sections_playground = default_directory_sections_playground @staticmethod def create_directory(directory, fold): os.makedirs(directory + fold) open(directory + fold + '/.placeholder', 'w').close() @staticmethod def create_index(directory, title, category, slug, i): with open(directory + "index.md", 'a')as index: index.write("Title: " + title + str(i) + ':\n' + "Category: " + category + '\n' + "Date: " + datetime.today().strftime('%Y-%m-%d') + '\n' + "Author: " + '\n' + "Slug: " + slug + str(i) + '\n' + "Tages: ADD TAGS HERE" + '\n\n\n' + "## Slides" ) # Create Lectures folders def create_lectures(self, directory, number_of_hw, folders): for i in range(1, number_of_hw+1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "lecture" + str(i)): os.makedirs(directory + "lecture" + str(i)) directory_lectures = directory + "lecture" + str(i) + '/' for fold in folders: self.create_directory(directory_lectures, fold) self.create_index(directory_lectures, "Lecture ", "lectures" , "lecture", i) else: print("The directory : '", directory + "lecture" + str(i), "' already exist.") # Create Lectures playground folders def create_lecture_playground(self, directory, number_of_lectures, folders): for i in range(1, number_of_lectures+1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "lecture" + str(i)): os.makedirs(directory + "lecture" + str(i)) directory_lectures = directory + "lecture" + str(i) + '/' for fold in folders: self.create_directory(directory_lectures, fold) else: print("The directory : '", directory + "lecture" + str(i), "' already exist.") # Create Labs folders def create_labs(self, directory, number_of_labs, folders): for i in range(1, number_of_labs+1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "lab" + str(i)): os.makedirs(directory + "lab" + str(i)) directory_lab = directory + "lab" + str(i) + '/' for fold in folders: self.create_directory(directory_lab, fold) self.create_index(directory_lab, "Lab ", "labs", "lab", i) else: print("The directory : '", directory + "lab" + str(i), "' already exist.") # Create Homework folders def create_homework(self, directory, number_of_labs, folders): for i in range(0, number_of_labs+1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "how" + str(i)): os.makedirs(directory + "how" + str(i)) directory_homework = directory + "how" + str(i) + '/' for fold in folders: self.create_directory(directory_homework, fold) else: print("The directory : '", directory + "homework" + str(i), "' already exist.") # Create Sections folders def create_section(self, directory, sections, folders): for i in range(1, sections + 1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "section" + str(i)): os.makedirs(directory + "section" + str(i)) directory_section = directory + "section" + str(i) + '/' for fold in folders: self.create_directory(directory_section, fold) self.create_index(directory_section, "Sections ", "section", "section", i) else: print("The directory : '", directory + "Sections" + str(i), "' already exist.") # Create AdvancedSections folders def create_a_section(self, directory, advanced_sections, folders): for i in range(1, advanced_sections+1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "a-sec" + str(i)): os.makedirs(directory + "a-sec" + str(i)) directory_a_section = directory + "a-sec" + str(i) + '/' for fold in folders: self.create_directory(directory_a_section, fold) self.create_index(directory_a_section, "Advanced Sections ", "a-sections", "a-sections", i) else: print("The directory : '", directory + "Advanced Sections" + str(i), "' already exist.") if __name__ == "__main__": number_of_lectures = 24 number_of_labs = 13 number_of_homework = 8 number_of_advanced_sections = 6 number_of_sections = 13 folders = ["data", "fig", "notes", "presentation"] default_directory = os.path.dirname(os.path.realpath(__file__)) + '/' default_directory_lectures = default_directory + "lectures/" default_directory_lecture_playground = default_directory + "lectures_playground/" default_directory_labs = default_directory + "labs/" default_directory_labs_playground = default_directory + "labs_playground/" default_directory_homework = default_directory + "homeworks/" default_directory_homework_playground = default_directory + "homeworks_playground/" default_directory_advanced_sections = default_directory + "a-sections/" default_directory_advanced_sections_playground = default_directory + "a_sections_playground/" default_directory_sections = default_directory + "sections/" default_directory_sections_playground = default_directory + "sections_playground/" try: print("The default values are :") print("Number of lectures: ", number_of_lectures) print("Number of labs: ", number_of_labs) print("Number of sections: ", number_of_sections) print("Number of a-sections: ", number_of_advanced_sections) print("Default directory: ", default_directory, '\n') change = input("Do you want to change it ? Please press 'y' if you want or 'n' if you do not change it: ") if change == '': change = 'n' while change not in ('y', 'n'): change = input("Do you want to change it ? Please press 'y' if you want or 'n' if you do not change it: ") if change == 'y': default_directory = input("Please enter the default directory: ") print("Default directory: ", default_directory, '\n') default_directory_lectures = default_directory + "lectures/" default_directory_lecture_playground = default_directory + "lecture_playground/" default_directory_labs = default_directory + "labs/" default_directory_labs_playground = default_directory + "lab_playground/" default_directory_homework = default_directory + "homework/" default_directory_homework_playground = default_directory + "homework_playground/" default_directory_advanced_sections = default_directory + "a-section/" number_of_lectures = int(input("Please enter the number of lectures: ")) print("Number of lectures: ", number_of_lectures, '\n') number_of_labs = int(input("Please enter the number of labs: ")) print("Number of labs: ", number_of_labs, '\n') number_of_homework = int(input("Please enter the number of homework: ")) print("Number of homework: ", number_of_homework, '\n') number_of_advanced_sections = int(input("Please enter the number of advanced sections: ")) print("Number of advanced sections: ", number_of_advanced_sections, '\n') ge = GenerateStructure(number_of_lectures, number_of_labs, number_of_homework, number_of_sections, number_of_advanced_sections, folders, default_directory, default_directory_lectures, default_directory_lecture_playground, default_directory_labs, default_directory_labs_playground, default_directory_homework, default_directory_homework_playground, default_directory_advanced_sections, default_directory_advanced_sections_playground, default_directory_sections_playground) ge.create_lectures(default_directory_lectures, number_of_lectures, folders) # Create Lectures folders ge.create_lecture_playground(default_directory_lecture_playground, number_of_lectures, folders) # Create Lectures playground folders ge.create_labs(default_directory_labs, number_of_labs, folders) # Create Labs folders ge.create_labs(default_directory_labs_playground, number_of_labs, folders) # Create Labs playground folders ge.create_homework(default_directory_homework, number_of_homework, folders) # Create Homework folders ge.create_homework(default_directory_homework_playground, number_of_homework, folders) # Create Homework playground folders ge.create_section(default_directory_sections, number_of_sections, folders) # Create sections folders ge.create_section(default_directory_sections_playground, number_of_sections, folders) # Create sections playground folders ge.create_a_section(default_directory_advanced_sections, number_of_advanced_sections, folders) # Create advanced sections folders ge.create_a_section(default_directory_advanced_sections_playground, number_of_advanced_sections, folders) # Create advanced sections folders except OSError as err: print("OS error: {0}".format(err)) except ValueError: print("Could not convert data to an integer.") except: print("Unexpected error:", sys.exc_info()[0]) raise
44.341912
118
0.643313
import os import sys from datetime import datetime class GenerateStructure: def __init__(self, number_of_lectures, number_of_labs, number_of_homework, number_of_sections, number_of_advanced_sections, folders, default_directory, default_directory_lectures, default_directory_lecture_playground, default_directory_labs, default_directory_labs_playground, default_directory_homework, default_directory_homework_playground, default_directory_advanced_sections, default_directory_advanced_sections_playground, default_directory_sections_playground): self.number_of_lectures = number_of_lectures self.number_of_labs = number_of_labs self.number_of_homework = number_of_homework self.number_of_sections = number_of_sections self.number_of_advanced_sections = number_of_advanced_sections self.folders = folders self.default_directory = default_directory self.default_directory_lectures = default_directory_lectures self.default_directory_lecture_playground = default_directory_lecture_playground self.default_directory_labs = default_directory_labs self.default_directory_labs_playground = default_directory_labs_playground self.default_directory_homework = default_directory_homework self.default_directory_homework_playground = default_directory_homework_playground self.default_directory_advanced_sections = default_directory_advanced_sections self.default_directory_sections = default_directory_sections self.default_directory_sections_playground = default_directory_sections_playground @staticmethod def create_directory(directory, fold): os.makedirs(directory + fold) open(directory + fold + '/.placeholder', 'w').close() @staticmethod def create_index(directory, title, category, slug, i): with open(directory + "index.md", 'a')as index: index.write("Title: " + title + str(i) + ':\n' + "Category: " + category + '\n' + "Date: " + datetime.today().strftime('%Y-%m-%d') + '\n' + "Author: " + '\n' + "Slug: " + slug + str(i) + '\n' + "Tages: ADD TAGS HERE" + '\n\n\n' + "## Slides" ) def create_lectures(self, directory, number_of_hw, folders): for i in range(1, number_of_hw+1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "lecture" + str(i)): os.makedirs(directory + "lecture" + str(i)) directory_lectures = directory + "lecture" + str(i) + '/' for fold in folders: self.create_directory(directory_lectures, fold) self.create_index(directory_lectures, "Lecture ", "lectures" , "lecture", i) else: print("The directory : '", directory + "lecture" + str(i), "' already exist.") def create_lecture_playground(self, directory, number_of_lectures, folders): for i in range(1, number_of_lectures+1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "lecture" + str(i)): os.makedirs(directory + "lecture" + str(i)) directory_lectures = directory + "lecture" + str(i) + '/' for fold in folders: self.create_directory(directory_lectures, fold) else: print("The directory : '", directory + "lecture" + str(i), "' already exist.") def create_labs(self, directory, number_of_labs, folders): for i in range(1, number_of_labs+1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "lab" + str(i)): os.makedirs(directory + "lab" + str(i)) directory_lab = directory + "lab" + str(i) + '/' for fold in folders: self.create_directory(directory_lab, fold) self.create_index(directory_lab, "Lab ", "labs", "lab", i) else: print("The directory : '", directory + "lab" + str(i), "' already exist.") def create_homework(self, directory, number_of_labs, folders): for i in range(0, number_of_labs+1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "how" + str(i)): os.makedirs(directory + "how" + str(i)) directory_homework = directory + "how" + str(i) + '/' for fold in folders: self.create_directory(directory_homework, fold) else: print("The directory : '", directory + "homework" + str(i), "' already exist.") def create_section(self, directory, sections, folders): for i in range(1, sections + 1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "section" + str(i)): os.makedirs(directory + "section" + str(i)) directory_section = directory + "section" + str(i) + '/' for fold in folders: self.create_directory(directory_section, fold) self.create_index(directory_section, "Sections ", "section", "section", i) else: print("The directory : '", directory + "Sections" + str(i), "' already exist.") def create_a_section(self, directory, advanced_sections, folders): for i in range(1, advanced_sections+1): if not os.path.exists(directory): os.makedirs(directory) if not os.path.exists(directory + "a-sec" + str(i)): os.makedirs(directory + "a-sec" + str(i)) directory_a_section = directory + "a-sec" + str(i) + '/' for fold in folders: self.create_directory(directory_a_section, fold) self.create_index(directory_a_section, "Advanced Sections ", "a-sections", "a-sections", i) else: print("The directory : '", directory + "Advanced Sections" + str(i), "' already exist.") if __name__ == "__main__": number_of_lectures = 24 number_of_labs = 13 number_of_homework = 8 number_of_advanced_sections = 6 number_of_sections = 13 folders = ["data", "fig", "notes", "presentation"] default_directory = os.path.dirname(os.path.realpath(__file__)) + '/' default_directory_lectures = default_directory + "lectures/" default_directory_lecture_playground = default_directory + "lectures_playground/" default_directory_labs = default_directory + "labs/" default_directory_labs_playground = default_directory + "labs_playground/" default_directory_homework = default_directory + "homeworks/" default_directory_homework_playground = default_directory + "homeworks_playground/" default_directory_advanced_sections = default_directory + "a-sections/" default_directory_advanced_sections_playground = default_directory + "a_sections_playground/" default_directory_sections = default_directory + "sections/" default_directory_sections_playground = default_directory + "sections_playground/" try: print("The default values are :") print("Number of lectures: ", number_of_lectures) print("Number of labs: ", number_of_labs) print("Number of sections: ", number_of_sections) print("Number of a-sections: ", number_of_advanced_sections) print("Default directory: ", default_directory, '\n') change = input("Do you want to change it ? Please press 'y' if you want or 'n' if you do not change it: ") if change == '': change = 'n' while change not in ('y', 'n'): change = input("Do you want to change it ? Please press 'y' if you want or 'n' if you do not change it: ") if change == 'y': default_directory = input("Please enter the default directory: ") print("Default directory: ", default_directory, '\n') default_directory_lectures = default_directory + "lectures/" default_directory_lecture_playground = default_directory + "lecture_playground/" default_directory_labs = default_directory + "labs/" default_directory_labs_playground = default_directory + "lab_playground/" default_directory_homework = default_directory + "homework/" default_directory_homework_playground = default_directory + "homework_playground/" default_directory_advanced_sections = default_directory + "a-section/" number_of_lectures = int(input("Please enter the number of lectures: ")) print("Number of lectures: ", number_of_lectures, '\n') number_of_labs = int(input("Please enter the number of labs: ")) print("Number of labs: ", number_of_labs, '\n') number_of_homework = int(input("Please enter the number of homework: ")) print("Number of homework: ", number_of_homework, '\n') number_of_advanced_sections = int(input("Please enter the number of advanced sections: ")) print("Number of advanced sections: ", number_of_advanced_sections, '\n') ge = GenerateStructure(number_of_lectures, number_of_labs, number_of_homework, number_of_sections, number_of_advanced_sections, folders, default_directory, default_directory_lectures, default_directory_lecture_playground, default_directory_labs, default_directory_labs_playground, default_directory_homework, default_directory_homework_playground, default_directory_advanced_sections, default_directory_advanced_sections_playground, default_directory_sections_playground) ge.create_lectures(default_directory_lectures, number_of_lectures, folders) ge.create_lecture_playground(default_directory_lecture_playground, number_of_lectures, folders) ge.create_labs(default_directory_labs, number_of_labs, folders) ge.create_labs(default_directory_labs_playground, number_of_labs, folders) ge.create_homework(default_directory_homework, number_of_homework, folders) ge.create_homework(default_directory_homework_playground, number_of_homework, folders) ge.create_section(default_directory_sections, number_of_sections, folders) ge.create_section(default_directory_sections_playground, number_of_sections, folders) ge.create_a_section(default_directory_advanced_sections, number_of_advanced_sections, folders) ge.create_a_section(default_directory_advanced_sections_playground, number_of_advanced_sections, folders) except OSError as err: print("OS error: {0}".format(err)) except ValueError: print("Could not convert data to an integer.") except: print("Unexpected error:", sys.exc_info()[0]) raise
true
true
f71d1305afa552bee4f335ebedcfe627045c6a38
1,760
py
Python
src/pugnlp/scripts/bon_lsi.py
totalgood/pugnlp
290e5761b60257a0ac876d1eaa0f1256da945d9a
[ "MIT" ]
2
2019-06-01T10:08:45.000Z
2020-11-07T08:13:31.000Z
src/pugnlp/scripts/bon_lsi.py
totalgood/pugnlp
290e5761b60257a0ac876d1eaa0f1256da945d9a
[ "MIT" ]
null
null
null
src/pugnlp/scripts/bon_lsi.py
totalgood/pugnlp
290e5761b60257a0ac876d1eaa0f1256da945d9a
[ "MIT" ]
1
2018-08-28T20:12:09.000Z
2018-08-28T20:12:09.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Bot-or-not tweet LSA/LSI model .""" from __future__ import division, print_function, absolute_import, unicode_literals from builtins import ( # noqa bytes, dict, int, list, object, range, str, ascii, chr, hex, input, next, oct, open, pow, round, super, filter, map, zip) import logging import pandas as pd from nltk.tokenize.casual import casual_tokenize from gensim.corpora import Dictionary from gensim.models import LsiModel, TfidfModel logger = logging.getLogger(__name__) np = pd.np def main(Tweet=None): qs = Tweet.objects.filter(is_strict__gte=13) tweets = np.array(qs.values_list('pk', 'text', 'user__screen_name', 'user__is_bot')) tweets = pd.DataFrame(np.array(tweets), columns='pk text user is_bot'.split()) tweets = tweets.set_index('pk', drop=True) tweets['tokens'] = tweets.text.apply(casual_tokenize) vocab = Dictionary(tweets.tokens) tfidf = TfidfModel(dictionary=vocab, id2word=vocab) bows = pd.Series(vocab.doc2bow(toks) for toks in tweets.tokens) lsi = LsiModel(tfidf[bows], num_topics=80, id2word=vocab, extra_samples=100, power_iters=2) lsi.save('/home/hobs/src/hackor/twote/data/lsi{}x{}x{}.saved'.format(len(tweets), lsi.num_topics, lsi.num_terms)) topics = lsi[tfidf[bows]] topics = pd.DataFrame([dict(d) for d in topics], index=tweets.index, columns=range(80)) if __name__ == '__main__': try: from twote.models import Tweet except (ImportError, ModuleNotFoundError): try: from openchat.models import Tweet except (ImportError, ModuleNotFoundError): Tweet = object logger.warn('Unable to import a Tweet data model (ORM object)') main(Tweet)
36.666667
117
0.696023
from __future__ import division, print_function, absolute_import, unicode_literals from builtins import ( bytes, dict, int, list, object, range, str, ascii, chr, hex, input, next, oct, open, pow, round, super, filter, map, zip) import logging import pandas as pd from nltk.tokenize.casual import casual_tokenize from gensim.corpora import Dictionary from gensim.models import LsiModel, TfidfModel logger = logging.getLogger(__name__) np = pd.np def main(Tweet=None): qs = Tweet.objects.filter(is_strict__gte=13) tweets = np.array(qs.values_list('pk', 'text', 'user__screen_name', 'user__is_bot')) tweets = pd.DataFrame(np.array(tweets), columns='pk text user is_bot'.split()) tweets = tweets.set_index('pk', drop=True) tweets['tokens'] = tweets.text.apply(casual_tokenize) vocab = Dictionary(tweets.tokens) tfidf = TfidfModel(dictionary=vocab, id2word=vocab) bows = pd.Series(vocab.doc2bow(toks) for toks in tweets.tokens) lsi = LsiModel(tfidf[bows], num_topics=80, id2word=vocab, extra_samples=100, power_iters=2) lsi.save('/home/hobs/src/hackor/twote/data/lsi{}x{}x{}.saved'.format(len(tweets), lsi.num_topics, lsi.num_terms)) topics = lsi[tfidf[bows]] topics = pd.DataFrame([dict(d) for d in topics], index=tweets.index, columns=range(80)) if __name__ == '__main__': try: from twote.models import Tweet except (ImportError, ModuleNotFoundError): try: from openchat.models import Tweet except (ImportError, ModuleNotFoundError): Tweet = object logger.warn('Unable to import a Tweet data model (ORM object)') main(Tweet)
true
true
f71d14d5d94757d06616ea762afce34495659c34
6,980
py
Python
mlcomp/test/run_test.py
lisapm/mlpiper
74ad5ae343d364682cc2f8aaa007f2e8a1d84929
[ "Apache-2.0" ]
7
2019-04-08T02:31:55.000Z
2021-11-15T14:40:49.000Z
mlcomp/test/run_test.py
lisapm/mlpiper
74ad5ae343d364682cc2f8aaa007f2e8a1d84929
[ "Apache-2.0" ]
31
2019-02-22T22:23:26.000Z
2021-08-02T17:17:06.000Z
mlcomp/test/run_test.py
lisapm/mlpiper
74ad5ae343d364682cc2f8aaa007f2e8a1d84929
[ "Apache-2.0" ]
8
2019-03-15T23:46:08.000Z
2020-02-06T09:16:02.000Z
#!/usr/bin/env python3 import argparse from os.path import expanduser import glob import json import os import re import shutil import subprocess import tempfile from termcolor import cprint, colored class TestRunner: SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) def __init__(self, options): self._options = options self._main_egg_filepath = os.path.join(TestRunner.SCRIPT_DIR, '../dist/*.egg') self._test_root_path = tempfile.mkdtemp(suffix='_mlcomp_it') self._model_save_path = os.path.join(self._test_root_path, "saved-model") self._test_pipeline_path = self._get_pipeline_path() self._tmp_comps_egg_dir = tempfile.mkdtemp(suffix='_mlcomp_it_comps') self._pipeline_json = None if os.path.isdir(self._model_save_path): shutil.rmtree(self._model_save_path) print("\n*** " + colored("{}".format(os.path.basename(self._test_pipeline_path)), "cyan") + " ***\n") fmt = "{:<25} {}" print(fmt.format("Pipeline path:", self._test_pipeline_path)) print(fmt.format("Test dir:", self._test_root_path)) print(fmt.format("Components egg tmp dir:", self._tmp_comps_egg_dir)) print("") def _get_pipeline_path(self): if os.path.isabs(self._options.test): test_pipeline_path = self._options.test else: test_pipeline_path = TestRunner.SCRIPT_DIR + "/" + self._options.test if not test_pipeline_path.endswith('.json'): test_pipeline_path += '.json' if not os.path.isfile(test_pipeline_path): raise Exception("Pipeline test file not found! path: {}".format(test_pipeline_path)) return test_pipeline_path def go(self): try: self._create_main_egg() self._load_pipeline() self._create_components_egg() self._prepare_test_dir() self._execute_program() self._cleanup() cprint("\nTest passed successfully!\n", "green") except Exception as e: colored("Test failed!\n", "red") raise e def _create_main_egg(self): cmd = TestRunner.SCRIPT_DIR + '/../bin/create-egg.sh --silent' subprocess.check_call(cmd, shell=True) def _create_components_egg(self): dst_comp_tmp_dir = self._tmp_comps_egg_dir + '/parallelm/code_components' os.makedirs(dst_comp_tmp_dir) comp_names = set([e['type'] for e in self._pipeline_json['pipe']]) for comp_name in comp_names: src_comp_dir = self._options.comps_root + '/' + comp_name dst_comp_dir = dst_comp_tmp_dir + '/' + comp_name shutil.copytree(src_comp_dir, dst_comp_dir) shutil.copy(TestRunner.SCRIPT_DIR + '/setup.py', self._tmp_comps_egg_dir) open(dst_comp_tmp_dir + '/__init__.py', 'w').close() with open(dst_comp_tmp_dir + '/../__init__.py', 'w') as f: f.write("__import__('pkg_resources').declare_namespace(__name__)") create_egg_cmd = '{}/../bin/create-egg.sh --root={} --silent'.format(TestRunner.SCRIPT_DIR, self._tmp_comps_egg_dir) subprocess.check_call(create_egg_cmd, shell=True) for egg_filepath in glob.glob(self._tmp_comps_egg_dir + '/' + 'dist/*.egg'): shutil.copy(egg_filepath, self._test_root_path) def _load_pipeline(self): with open(self._test_pipeline_path, 'r') as f: content = f.read() pipeline_dir = os.path.realpath(os.path.dirname(self._test_pipeline_path)) revised_content = re.sub(r'\$__pipeline_dir__\$', pipeline_dir, content, flags=re.M) self._pipeline_json = json.loads(revised_content) def _prepare_test_dir(self): self._dst_test_driver_path = self._test_root_path + '/driver.py' main_py_path = os.path.join(TestRunner.SCRIPT_DIR, '../__main__.py') shutil.copyfile(main_py_path, self._dst_test_driver_path) for egg_filepath in glob.glob(self._main_egg_filepath): shutil.copy(egg_filepath, self._test_root_path) self._pipeline_json['systemConfig']['modelFileSinkPath'] = self._model_save_path self._dst_test_pipeline_path = self._test_root_path + '/' + os.path.basename(self._test_pipeline_path) with open(self._dst_test_pipeline_path, 'w') as f: json.dump(self._pipeline_json, f) def _execute_program(self): master = 'spark://localhost:7077' if self._options.local_cluster else 'local[*]' eggs = ','.join(glob.glob(self._test_root_path + '/*.egg')) spark_submit_tool = os.environ['SPARK_HOME'] + "/bin/spark-submit" submit_cmd = '{} --master {} --py-files {} {} exec -f {}'.format(spark_submit_tool, master, eggs, self._dst_test_driver_path, self._dst_test_pipeline_path) print("--- Start of Engine Output ---") with subprocess.Popen(submit_cmd, shell=True, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True) as p: for line in p.stdout: print(line) print("--- End of Engine Output ---") if p.returncode != 0: print("Test failed!", "red") raise subprocess.CalledProcessError(returncode=p.returncode, cmd=p.args) subprocess.check_call(TestRunner.SCRIPT_DIR + '/../bin/cleanup.sh', shell=True) print("\n-----------------") def _cleanup(self): print("Cleaning up ... " + self._tmp_comps_egg_dir) shutil.rmtree(self._tmp_comps_egg_dir) should_clean = 'y' if self._options.ask_clean: should_clean = input("\nShould clean up test root path [{}]? [Y|n] " .format(self._test_root_path)).lower() if should_clean == 'y': print("Cleaning up ... " + self._test_root_path) shutil.rmtree(self._test_root_path) def parse_args(args): parser = argparse.ArgumentParser(description='Run full PySpark integration test') parser.add_argument('--test', default='pi-pipeline-rdd.json', help='test pipeline json file path') parser.add_argument('--comps-root', default=os.path.join(expanduser("~"), "dev/mlops-components/dev/connectable/pyspark/spark-context"), help='ml components root dir') parser.add_argument('--local-cluster', action="store_true", help='Specify whether to run test on local Spark cluster [default: embedded]') parser.add_argument('--ask-clean', action="store_true", default=False, help="Wait for user's confirmation before cleanup") args = parser.parse_args(args) return args def main(args=None): options = parse_args(args) TestRunner(options).go() if __name__ == '__main__': main()
40.114943
124
0.629656
import argparse from os.path import expanduser import glob import json import os import re import shutil import subprocess import tempfile from termcolor import cprint, colored class TestRunner: SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__)) def __init__(self, options): self._options = options self._main_egg_filepath = os.path.join(TestRunner.SCRIPT_DIR, '../dist/*.egg') self._test_root_path = tempfile.mkdtemp(suffix='_mlcomp_it') self._model_save_path = os.path.join(self._test_root_path, "saved-model") self._test_pipeline_path = self._get_pipeline_path() self._tmp_comps_egg_dir = tempfile.mkdtemp(suffix='_mlcomp_it_comps') self._pipeline_json = None if os.path.isdir(self._model_save_path): shutil.rmtree(self._model_save_path) print("\n*** " + colored("{}".format(os.path.basename(self._test_pipeline_path)), "cyan") + " ***\n") fmt = "{:<25} {}" print(fmt.format("Pipeline path:", self._test_pipeline_path)) print(fmt.format("Test dir:", self._test_root_path)) print(fmt.format("Components egg tmp dir:", self._tmp_comps_egg_dir)) print("") def _get_pipeline_path(self): if os.path.isabs(self._options.test): test_pipeline_path = self._options.test else: test_pipeline_path = TestRunner.SCRIPT_DIR + "/" + self._options.test if not test_pipeline_path.endswith('.json'): test_pipeline_path += '.json' if not os.path.isfile(test_pipeline_path): raise Exception("Pipeline test file not found! path: {}".format(test_pipeline_path)) return test_pipeline_path def go(self): try: self._create_main_egg() self._load_pipeline() self._create_components_egg() self._prepare_test_dir() self._execute_program() self._cleanup() cprint("\nTest passed successfully!\n", "green") except Exception as e: colored("Test failed!\n", "red") raise e def _create_main_egg(self): cmd = TestRunner.SCRIPT_DIR + '/../bin/create-egg.sh --silent' subprocess.check_call(cmd, shell=True) def _create_components_egg(self): dst_comp_tmp_dir = self._tmp_comps_egg_dir + '/parallelm/code_components' os.makedirs(dst_comp_tmp_dir) comp_names = set([e['type'] for e in self._pipeline_json['pipe']]) for comp_name in comp_names: src_comp_dir = self._options.comps_root + '/' + comp_name dst_comp_dir = dst_comp_tmp_dir + '/' + comp_name shutil.copytree(src_comp_dir, dst_comp_dir) shutil.copy(TestRunner.SCRIPT_DIR + '/setup.py', self._tmp_comps_egg_dir) open(dst_comp_tmp_dir + '/__init__.py', 'w').close() with open(dst_comp_tmp_dir + '/../__init__.py', 'w') as f: f.write("__import__('pkg_resources').declare_namespace(__name__)") create_egg_cmd = '{}/../bin/create-egg.sh --root={} --silent'.format(TestRunner.SCRIPT_DIR, self._tmp_comps_egg_dir) subprocess.check_call(create_egg_cmd, shell=True) for egg_filepath in glob.glob(self._tmp_comps_egg_dir + '/' + 'dist/*.egg'): shutil.copy(egg_filepath, self._test_root_path) def _load_pipeline(self): with open(self._test_pipeline_path, 'r') as f: content = f.read() pipeline_dir = os.path.realpath(os.path.dirname(self._test_pipeline_path)) revised_content = re.sub(r'\$__pipeline_dir__\$', pipeline_dir, content, flags=re.M) self._pipeline_json = json.loads(revised_content) def _prepare_test_dir(self): self._dst_test_driver_path = self._test_root_path + '/driver.py' main_py_path = os.path.join(TestRunner.SCRIPT_DIR, '../__main__.py') shutil.copyfile(main_py_path, self._dst_test_driver_path) for egg_filepath in glob.glob(self._main_egg_filepath): shutil.copy(egg_filepath, self._test_root_path) self._pipeline_json['systemConfig']['modelFileSinkPath'] = self._model_save_path self._dst_test_pipeline_path = self._test_root_path + '/' + os.path.basename(self._test_pipeline_path) with open(self._dst_test_pipeline_path, 'w') as f: json.dump(self._pipeline_json, f) def _execute_program(self): master = 'spark://localhost:7077' if self._options.local_cluster else 'local[*]' eggs = ','.join(glob.glob(self._test_root_path + '/*.egg')) spark_submit_tool = os.environ['SPARK_HOME'] + "/bin/spark-submit" submit_cmd = '{} --master {} --py-files {} {} exec -f {}'.format(spark_submit_tool, master, eggs, self._dst_test_driver_path, self._dst_test_pipeline_path) print("--- Start of Engine Output ---") with subprocess.Popen(submit_cmd, shell=True, stdout=subprocess.PIPE, bufsize=1, universal_newlines=True) as p: for line in p.stdout: print(line) print("--- End of Engine Output ---") if p.returncode != 0: print("Test failed!", "red") raise subprocess.CalledProcessError(returncode=p.returncode, cmd=p.args) subprocess.check_call(TestRunner.SCRIPT_DIR + '/../bin/cleanup.sh', shell=True) print("\n-----------------") def _cleanup(self): print("Cleaning up ... " + self._tmp_comps_egg_dir) shutil.rmtree(self._tmp_comps_egg_dir) should_clean = 'y' if self._options.ask_clean: should_clean = input("\nShould clean up test root path [{}]? [Y|n] " .format(self._test_root_path)).lower() if should_clean == 'y': print("Cleaning up ... " + self._test_root_path) shutil.rmtree(self._test_root_path) def parse_args(args): parser = argparse.ArgumentParser(description='Run full PySpark integration test') parser.add_argument('--test', default='pi-pipeline-rdd.json', help='test pipeline json file path') parser.add_argument('--comps-root', default=os.path.join(expanduser("~"), "dev/mlops-components/dev/connectable/pyspark/spark-context"), help='ml components root dir') parser.add_argument('--local-cluster', action="store_true", help='Specify whether to run test on local Spark cluster [default: embedded]') parser.add_argument('--ask-clean', action="store_true", default=False, help="Wait for user's confirmation before cleanup") args = parser.parse_args(args) return args def main(args=None): options = parse_args(args) TestRunner(options).go() if __name__ == '__main__': main()
true
true
f71d14e93643f0e1bfa895edafc0dd3f02d0b475
226
py
Python
setup.py
bethanymorin/smt-scrapy
50878b47094d0b4d72c483802d3aec9077c2b16b
[ "MIT" ]
null
null
null
setup.py
bethanymorin/smt-scrapy
50878b47094d0b4d72c483802d3aec9077c2b16b
[ "MIT" ]
3
2017-08-15T23:51:47.000Z
2017-09-26T18:27:55.000Z
setup.py
bethanymorin/smt-scrapy
50878b47094d0b4d72c483802d3aec9077c2b16b
[ "MIT" ]
null
null
null
# Automatically created by: shub deploy from setuptools import setup, find_packages setup( name='project', version='1.0', packages=find_packages(), entry_points={'scrapy': ['settings = scraper.settings']}, )
20.545455
61
0.69469
from setuptools import setup, find_packages setup( name='project', version='1.0', packages=find_packages(), entry_points={'scrapy': ['settings = scraper.settings']}, )
true
true
f71d157d8596c23bae0491b174ad13908d464306
300
py
Python
fructify/blueprints/debug.py
fffergal/fructify
f6043e157d3ecfce1cafba6d16769b21f6defb4b
[ "Apache-2.0" ]
null
null
null
fructify/blueprints/debug.py
fffergal/fructify
f6043e157d3ecfce1cafba6d16769b21f6defb4b
[ "Apache-2.0" ]
20
2020-03-14T16:05:43.000Z
2022-02-20T23:55:53.000Z
fructify/blueprints/debug.py
fffergal/fructify
f6043e157d3ecfce1cafba6d16769b21f6defb4b
[ "Apache-2.0" ]
null
null
null
from flask import Blueprint, request bp = Blueprint("debug", __name__) @bp.route("/api/v1/debug", methods=["DELETE", "GET", "POST"]) def debug(): if request.method == "DELETE": raise Exception if request.method != "POST": return request.args return request.get_data()
21.428571
61
0.64
from flask import Blueprint, request bp = Blueprint("debug", __name__) @bp.route("/api/v1/debug", methods=["DELETE", "GET", "POST"]) def debug(): if request.method == "DELETE": raise Exception if request.method != "POST": return request.args return request.get_data()
true
true
f71d16974deddb08bf8a27d6c9d768d917a81691
501
py
Python
beluga/utils/storage.py
doublefloyd/beluga
740bda376634945ef51bf1cf946fcbe002e9bc7f
[ "MIT" ]
20
2017-10-02T13:09:58.000Z
2022-03-28T20:50:35.000Z
beluga/utils/storage.py
doublefloyd/beluga
740bda376634945ef51bf1cf946fcbe002e9bc7f
[ "MIT" ]
187
2018-02-04T20:35:03.000Z
2021-01-27T15:04:18.000Z
beluga/utils/storage.py
doublefloyd/beluga
740bda376634945ef51bf1cf946fcbe002e9bc7f
[ "MIT" ]
12
2018-01-19T04:00:09.000Z
2022-03-28T16:44:17.000Z
import cloudpickle as pickle def save(sol_set=None, ocp=None, bvp=None, filename='data.beluga'): save_dict = {} if sol_set is not None: save_dict['solutions'] = sol_set if ocp is not None: save_dict['ocp'] = ocp if bvp is not None: save_dict['bvp'] = bvp with open(filename, 'wb') as file: pickle.dump(save_dict, file) def load(filename): with open(filename, 'rb') as file: save_dict = pickle.load(file) return save_dict
17.892857
67
0.616766
import cloudpickle as pickle def save(sol_set=None, ocp=None, bvp=None, filename='data.beluga'): save_dict = {} if sol_set is not None: save_dict['solutions'] = sol_set if ocp is not None: save_dict['ocp'] = ocp if bvp is not None: save_dict['bvp'] = bvp with open(filename, 'wb') as file: pickle.dump(save_dict, file) def load(filename): with open(filename, 'rb') as file: save_dict = pickle.load(file) return save_dict
true
true