text
stringlengths
3
1.05M
#!/usr/bin/env python2 # Copyright (c) 2015 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * class SignRawTransactionsTest(BitcoinTestFramework): """Tests transaction signing via RPC command "signrawtransaction".""" def setup_chain(self): print('Initializing test directory ' + self.options.tmpdir) initialize_chain_clean(self.options.tmpdir, 1) def setup_network(self, split=False): self.nodes = start_nodes(1, self.options.tmpdir) self.is_network_split = False def successful_signing_test(self): """Creates and signs a valid raw transaction with one input. Expected results: 1) The transaction has a complete set of signatures 2) No script verification error occurred""" privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N'] inputs = [ # Valid pay-to-pubkey script {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0, 'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'} ] outputs = {'xxE2Q5yK8qgJnytakFyeAoxXirWJH9eaSW': 0.1} rawTx = self.nodes[0].createrawtransaction(inputs, outputs) rawTxSigned = self.nodes[0].signrawtransaction(rawTx, inputs, privKeys) # 1) The transaction has a complete set of signatures assert 'complete' in rawTxSigned assert_equal(rawTxSigned['complete'], True) # 2) No script verification error occurred assert 'errors' not in rawTxSigned def script_verification_error_test(self): """Creates and signs a raw transaction with valid (vin 0), invalid (vin 1) and one missing (vin 2) input script. Expected results: 3) The transaction has no complete set of signatures 4) Two script verification errors occurred 5) Script verification errors have certain properties ("txid", "vout", "scriptSig", "sequence", "error") 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2)""" privKeys = ['cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N'] inputs = [ # Valid pay-to-pubkey script {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0}, # Invalid script {'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7}, # Missing scriptPubKey {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 1}, ] scripts = [ # Valid pay-to-pubkey script {'txid': '9b907ef1e3c26fc71fe4a4b3580bc75264112f95050014157059c736f0202e71', 'vout': 0, 'scriptPubKey': '76a91460baa0f494b38ce3c940dea67f3804dc52d1fb9488ac'}, # Invalid script {'txid': '5b8673686910442c644b1f4993d8f7753c7c8fcb5c87ee40d56eaeef25204547', 'vout': 7, 'scriptPubKey': 'badbadbadbad'} ] outputs = {'xxE2Q5yK8qgJnytakFyeAoxXirWJH9eaSW': 0.1} rawTx = self.nodes[0].createrawtransaction(inputs, outputs) rawTxSigned = self.nodes[0].signrawtransaction(rawTx, scripts, privKeys) # 3) The transaction has no complete set of signatures assert 'complete' in rawTxSigned assert_equal(rawTxSigned['complete'], False) # 4) Two script verification errors occurred assert 'errors' in rawTxSigned assert_equal(len(rawTxSigned['errors']), 2) # 5) Script verification errors have certain properties assert 'txid' in rawTxSigned['errors'][0] assert 'vout' in rawTxSigned['errors'][0] assert 'scriptSig' in rawTxSigned['errors'][0] assert 'sequence' in rawTxSigned['errors'][0] assert 'error' in rawTxSigned['errors'][0] # 6) The verification errors refer to the invalid (vin 1) and missing input (vin 2) assert_equal(rawTxSigned['errors'][0]['txid'], inputs[1]['txid']) assert_equal(rawTxSigned['errors'][0]['vout'], inputs[1]['vout']) assert_equal(rawTxSigned['errors'][1]['txid'], inputs[2]['txid']) assert_equal(rawTxSigned['errors'][1]['vout'], inputs[2]['vout']) def run_test(self): self.successful_signing_test() self.script_verification_error_test() if __name__ == '__main__': SignRawTransactionsTest().main()
something = input('Enter something: ') print('The type of the entered value is: {}'.format(type(something))) print('Alpha: {}'.format(something.isalpha())) print('Numeric: {}'.format(something.isnumeric())) print('AlphaNum: {}'.format(something.isalnum())) print('Lower: {}'.format(something.islower())) print('Upper: {}'.format(something.isupper()))
""" Defines Neural Networks """ import torch import torch.nn.functional as F import torch.nn as nn import numpy as np from torch.autograd.variable import Variable from ..utils.generators.mixed_len_generator import Parser, \ SimulateStack from typing import List from .mdn import MixtureDensityNetwork from globals import device class Encoder(nn.Module): def __init__(self, dropout=0.2): """ Encoder for 2D CSGNet. :param dropout: dropout """ super(Encoder, self).__init__() self.p = dropout self.conv1 = nn.Conv2d(1, 8, 3, padding=(1, 1)) self.conv2 = nn.Conv2d(8, 16, 3, padding=(1, 1)) self.conv3 = nn.Conv2d(16, 32, 3, padding=(1, 1)) self.drop = nn.Dropout(dropout) def encode(self, x): x = F.max_pool2d(self.drop(F.relu(self.conv1(x))), (2, 2)) x = F.max_pool2d(self.drop(F.relu(self.conv2(x))), (2, 2)) x = F.max_pool2d(F.relu(self.conv3(x)), (2, 2)) return x def num_flat_features(self, x): size = x.size()[1:] # all dimensions except the batch dimension num_features = 1 for s in size: num_features *= s return num_features class ImitateJoint(nn.Module): def __init__(self, input_size, hidden_size, output_size, encoder, time_steps=3, dropout=0.5): """ Defines RNN structure that takes features encoded by CNN and produces program instructions at every time step. :param num_draws: Total number of tokens present in the dataset or total number of operations to be predicted + a stop symbol = 400 :param canvas_shape: canvas shape :param dropout: dropout :param hd_sz: rnn hidden size :param input_size: input_size (CNN feature size) to rnn :param encoder: Feature extractor network object :param mode: Mode of training, RNN, BDRNN or something else :param num_layers: Number of layers to rnn :param time_steps: max length of program """ super(ImitateJoint, self).__init__() self.hd_sz = hidden_size self.in_sz = input_size self.encoder = encoder self.out_sz = output_size # Dense layer to project input ops(labels) to input of rnn (EDITED TO EMBEDDING) self.emb_size = 64 self.param_size = 64 self.embedding = nn.Embedding(8, self.emb_size) self.dense_params = nn.Linear(3, self.param_size) self.input_op_sz = self.emb_size + self.param_size self.rnn = nn.GRU( input_size=self.in_sz + self.input_op_sz, hidden_size=self.hd_sz, batch_first=True) self.dense_fc_1 = nn.Linear( in_features=self.hd_sz, out_features=self.hd_sz) self.dense_output = nn.Linear( in_features=self.hd_sz, out_features=self.out_sz) self.drop = nn.Dropout(dropout) self.tf_drop = nn.Dropout(1) self.relu = nn.ReLU() self.mdn = MixtureDensityNetwork(self.out_sz, 3, 10) def forward(self, data, input_op, program_len): """ returns (batch, timesteps, features) """ assert data.size()[0] == program_len + 1, "Incorrect stack size!!" batch_size = data.size()[1] h = Variable(torch.zeros(1, batch_size, self.hd_sz)).cuda() x_f = self.encoder.encode(data[-1, :, 0:1, :, :]) x_f = x_f.view(batch_size, 1, self.in_sz) # remove stop token for input to decoder input_op = input_op[:, :-1, :] input_params = self.dense_params(input_op[:, :, 1:]) input_type = self.embedding(input_op[:, :, 0].long()) input_op_rnn = self.relu(self.tf_drop(torch.cat([input_type, input_params], dim=2))) x_f = x_f.repeat(1, program_len+1, 1) input = torch.cat((self.drop(x_f), input_op_rnn), 2) output, h = self.rnn(input, h) output = self.relu(self.dense_fc_1(self.drop(output))) output = self.dense_output(self.drop(output)) return output def test(self, data, input_op, program_len): batch_size = data.size()[1] h = Variable(torch.zeros(1, batch_size, self.hd_sz)).cuda() x_f = self.encoder.encode(data[-1, :, 0:1, :, :]) x_f = x_f.view(batch_size, self.in_sz) outputs = [] last_output = input_op[:, 0, :] for timestep in range(0, program_len + 1): # X_f is always input to the network at every time step # along with previous predicted label # round params to look like quantized training data ONLY FOR TRAINING ON SYN DATA REOMVE AFTER last_output[:, 1:3] = torch.clamp(torch.round(last_output[:, 1:3] / 8) * 8, 8, 56) last_output[:, 3:] = torch.clamp(torch.round(last_output[:, 3:] / 4) * 4, 8, 32) input_params = self.dense_params(last_output[:, 1:]) input_type = self.embedding(last_output[:, 0].long()) # (timesteps, batch, features) # input_op_rnn = self.relu(torch.cat([input_type, input_params], dim=1)) input_op_rnn = torch.zeros((batch_size, 128)).to(device) input = torch.cat((self.drop(x_f), input_op_rnn), 1).reshape((batch_size, 1, -1)) rnn_out, h = self.rnn(input, h) hd = self.relu(self.dense_fc_1(self.drop(rnn_out[:, 0]))) output = self.dense_output(self.drop(hd)) type = torch.argmax(output[:, :8], dim=1).float() params = F.relu(self.mdn.sample(output)) print(params.shape) last_output = torch.cat([type.reshape((batch_size, 1)), params], dim=1) outputs.append(output) return torch.stack(outputs).permute(1, 0, 2) def loss_function(self, outputs, labels, program_len): # remove start token from label labels = labels[:, 1:, :] type_loss = F.cross_entropy(outputs[:, :, :8].permute(0, 2, 1), labels[:, :, 0].long()) param_loss = 0 for i in range(program_len + 1): param_loss += self.mdn.loss(outputs[:, i], labels[:, i, 1:]).mean() # scaling factor chosen to make param_loss and type_loss about equal param_loss *= 0.01 return param_loss + type_loss class ParseModelOutput: def __init__(self, unique_draw, stack_size, canvas_shape , mdn): """ This class parses complete output from the network which are in joint fashion. This class can be used to generate final canvas and expressions. :param unique_draws: Unique draw/op operations in the current dataset :param stack_size: Stack size :param steps: Number of steps in the program :param canvas_shape: Shape of the canvases """ self.unique_draws = unique_draw self.canvas_shape = canvas_shape self.stack_size = stack_size self.Parser = Parser() self.sim = SimulateStack(self.stack_size, self.canvas_shape) self.mdn = mdn def get_final_canvas(self, outputs: List, if_just_expressions=False, if_pred_images=False): """ Takes the raw output from the network and returns the predicted canvas. The steps involve parsing the outputs into expressions, decoding expressions, and finally producing the canvas using intermediate stacks. :param if_just_expressions: If only expression is required than we just return the function after calculating expressions :param outputs: List, each element correspond to the output from the network :return: stack: Predicted final stack for correct programs :return: correct_programs: Indices of correct programs """ batch_size = outputs.size()[0] steps = outputs.size()[1] # Initialize empty expression string, len equal to batch_size correct_programs = [] expressions = [""] * batch_size type_labels = torch.argmax(outputs[:, :, :8], dim=2).data.cpu().numpy() for j in range(batch_size): for i in range(steps): if type_labels[j][i] == 0: expressions[j] += "+" if type_labels[j][i] == 1: expressions[j] += "*" if type_labels[j][i] == 2: expressions[j] += "-" if type_labels[j][i] == 3: expressions[j] += "$" if type_labels[j][i] > 3: params = F.relu(self.mdn.sample(outputs[j:j+1, i])) params = params.cpu().numpy().reshape((-1,)) params = str([int(x) for x in params])[1:-1].replace(" ", "") if type_labels[j][i] == 4: expressions[j] += f"c({params})" if type_labels[j][i] == 5: expressions[j] += f"s({params})" if type_labels[j][i] == 6: expressions[j] += f"t({params})" # Remove the stop symbol and later part of the expression for index, exp in enumerate(expressions): expressions[index] = exp.split("$")[0] if if_just_expressions: return expressions stacks = [] for index, exp in enumerate(expressions): print(exp) program = self.Parser.parse(exp) if validity(program, len(program), len(program) - 1): correct_programs.append(index) else: if if_pred_images: # if you just want final predicted image stack = np.zeros((self.canvas_shape[0], self.canvas_shape[1])) else: stack = np.zeros( (self.steps + 1, self.stack_size, self.canvas_shape[0], self.canvas_shape[1])) stacks.append(stack) continue # Check the validity of the expressions self.sim.generate_stack(program) stack = self.sim.stack_t stack = np.stack(stack, axis=0) if if_pred_images: stacks.append(stack[-1, 0, :, :]) else: stacks.append(stack) if len(stacks) == 0: return None if if_pred_images: stacks = np.stack(stacks, 0).astype(dtype=np.bool) else: stacks = np.stack(stacks, 1).astype(dtype=np.bool) return stacks, correct_programs, expressions def expression2stack(self, expressions: List): """Assuming all the expression are correct and coming from groundtruth labels. Helpful in visualization of programs :param expressions: List, each element an expression of program """ stacks = [] for index, exp in enumerate(expressions): program = self.Parser.parse(exp) self.sim.generate_stack(program) stack = self.sim.stack_t stack = np.stack(stack, axis=0) stacks.append(stack) stacks = np.stack(stacks, 1).astype(dtype=np.float32) return stacks def labels2exps(self, labels: np.ndarray, steps: int): """ Assuming grountruth labels, we want to find expressions for them :param labels: Grounth labels batch_size x time_steps :return: expressions: Expressions corresponding to labels """ if isinstance(labels, np.ndarray): batch_size = labels.shape[0] else: batch_size = labels.size()[0] labels = labels.data.cpu().numpy() # Initialize empty expression string, len equal to batch_size correct_programs = [] expressions = [""] * batch_size for j in range(batch_size): for i in range(steps): expressions[j] += self.unique_draws[labels[j, i]] return expressions def validity(program: List, max_time: int, timestep: int): """ Checks the validity of the program. In short implements a pushdown automaton that accepts valid strings. :param program: List of dictionary containing program type and elements :param max_time: Max allowed length of program :param timestep: Current timestep of the program, or in a sense length of program # at evey index :return: """ num_draws = 0 num_ops = 0 for i, p in enumerate(program): if p["type"] == "draw": # draw a shape on canvas kind of operation num_draws += 1 elif p["type"] == "op": # +, *, - kind of operation num_ops += 1 elif p["type"] == "stop": # Stop symbol, no need to process further if num_draws > ((len(program) - 1) // 2 + 1): return False if not (num_draws > num_ops): return False return (num_draws - 1) == num_ops if num_draws <= num_ops: # condition where number of operands are lesser than 2 return False if num_draws > (max_time // 2 + 1): # condition for stack over flow return False if (max_time - 1) == timestep: return (num_draws - 1) == num_ops return True
/* -------------------------------------------------------------------------- */ /* Copyright 2002-2018, OpenNebula Project, OpenNebula Systems */ /* */ /* Licensed under the Apache License, Version 2.0 (the "License"); you may */ /* not use this file except in compliance with the License. You may obtain */ /* a copy of the License at */ /* */ /* http://www.apache.org/licenses/LICENSE-2.0 */ /* */ /* Unless required by applicable law or agreed to in writing, software */ /* distributed under the License is distributed on an "AS IS" BASIS, */ /* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. */ /* See the License for the specific language governing permissions and */ /* limitations under the License. */ /* -------------------------------------------------------------------------- */ define(function(require){ var Locale = require("utils/locale"); var Config = require("sunstone-config"); var OpenNebula = require("opennebula"); var OpenNebulaImage = require("opennebula/image"); var UserInputs = require("utils/user-inputs"); var WizardFields = require("utils/wizard-fields"); var DisksResizeTemplate = require("hbs!./disks-resize/html"); var Humanize = require("utils/humanize"); return { "insert": _insert, "retrieve": _retrieve }; function _calculateCost(context, disk_cost, callback){ var cost = 0; var totalSize = 0; $(".diskContainer", context).each(function(){ var size = 0; var fields = WizardFields.retrieve(this); if (fields.SIZE != undefined){ size = fields.SIZE; } else{ disk = $(this).data("template_disk"); if (disk != undefined && disk["SIZE"] != undefined){ size = disk["SIZE"]; } } totalSize += parseFloat(size); cost += size * disk_cost; cost += $(this).data("disk_snapshot_total_cost"); }); $(".cost_value", context).text(cost.toFixed(6)); $("#quotas-disks", context).text(totalSize.toFixed(2)); if(callback != undefined){ callback(); } } /** * @param {Object} opts - template_json: extended info template (with DISK/SIZE) * - disksContext: jquery selector, where to place the html * - force_persistent {bool}: mark all disks as if they * were persistent, disabling resize inputs * - cost_callback: function to call when the cost changes */ function _insert(opts) { var disksContext = opts.disksContext; var template_disk = opts.template_json.VMTEMPLATE.TEMPLATE.DISK; var disks = []; if ($.isArray(template_disk)) { disks = template_disk; } else if (!$.isEmptyObject(template_disk)) { disks = [template_disk]; } if (opts.template_base_json && opts.template_base_json.VMTEMPLATE) { var template_base_disk = opts.template_base_json.VMTEMPLATE.TEMPLATE.DISK; var disks_base = []; if ($.isArray(template_base_disk)) { disks_base = template_base_disk; } else if (!$.isEmptyObject(template_base_disk)) { disks_base = [template_base_disk]; } } if (disks.length > 0) { disksContext.html(DisksResizeTemplate()); var disk_cost = opts.template_json.VMTEMPLATE.TEMPLATE.DISK_COST; if (disk_cost == undefined) { disk_cost = Config.onedConf.DEFAULT_COST.DISK_COST; } disksContext.off("change", "input"); if (Config.isFeatureEnabled("showback")) { if(disk_cost != 0){ $(".provision_create_template_disk_cost_div", disksContext).show(); } disksContext.on("change", "input", function(){ _calculateCost(disksContext, disk_cost, opts.cost_callback); }); _calculateCost(disksContext, disk_cost, opts.cost_callback); } else { $(".provision_create_template_disk_cost_div", disksContext).hide(); } var diskContext; $(".disksContainer", disksContext).html(""); $.each(disks, function(disk_id, disk) { diskContext = $( "<div class=\"row diskContainer\">"+ "<div class=\"small-12 columns\">"+ "<label></label>"+ "</div>"+ "<div class=\"large-12 columns diskSlider\">" + "</div>" + "</div>").appendTo($(".disksContainer", disksContext)); if (disks_base) { disks_base[disk_id].SIZE = disk.SIZE; disks_base[disk_id].ORIGINAL_SIZE = disk.ORIGINAL_SIZE; diskContext.data("template_disk", disks_base[disk_id]); } var disk_snapshot_total_size = 0; if (disk.DISK_SNAPSHOT_TOTAL_SIZE != undefined) { disk_snapshot_total_size = parseInt(disk.DISK_SNAPSHOT_TOTAL_SIZE); } diskContext.data("disk_snapshot_total_size", disk_snapshot_total_size); diskContext.data("disk_snapshot_total_cost", disk_snapshot_total_size * disk_cost); var volatile = (disk.IMAGE == undefined && disk.IMAGE_ID == undefined); var label; if (volatile){ label = Locale.tr("Volatile Disk"); } else { label = disk.IMAGE ? disk.IMAGE : Locale.tr("Image was not found"); } $("label", diskContext).text(Locale.tr("DISK") + " " + disk_id + ": " + label); var persistent = ( opts.force_persistent || (disk.PERSISTENT && disk.PERSISTENT.toUpperCase() == "YES") ); var disabled = ( persistent || (disk.TYPE && OpenNebulaImage.TYPES[disk.TYPE] == OpenNebulaImage.TYPES.CDROM) ); if (persistent){ $("label", diskContext).append("<i class=\"disk-resize-icon has-tip left fas fa-lg fa-floppy-o\" title=\"" + Locale.tr("Persistent image. The changes will be saved back to the datastore after the VM is shut down") + "\"></i>"); }else{ $("label", diskContext).append("<i class=\"disk-resize-icon has-tip left fas fa-lg fa-recycle\" title=\"" + Locale.tr("Non-persistent disk. The changes will be lost once the VM is shut down") + "\"></i>"); } if (disk.IMAGE_STATE){ var color_class = OpenNebulaImage.stateColor(disk.IMAGE_STATE) + "-color"; $("label", diskContext).append("<i class=\""+color_class+" disk-resize-icon has-tip left fas fa-square\" title=\"" + Locale.tr("Image state: ") + OpenNebulaImage.stateStr(disk.IMAGE_STATE) + "\"></i>"); } else if (disk.IMAGE || disk.IMAGE_ID) { var color_class = "error-color"; $("label", diskContext).append("<i class=\""+color_class+" disk-resize-icon has-tip left fas fa-square\" title=\"" + Locale.tr("Image was not found") + "\"></i>"); } // else is volatile, does not have state var attr; if (disabled){ if (disk.SIZE){ attr = UserInputs.parse("SIZE","O|fixed|"+label+"||"+disk.SIZE); } else { attr = UserInputs.parse("SIZE","O|fixed|"+label+"||-"); } } else { if (disk.SIZE != undefined){ // Range from original size to size + 500GB var min = parseInt(disk.SIZE); var max = min + Humanize.sizeToMB("1024GB"); attr = UserInputs.parse( "SIZE", "O|range|"+label+"|"+min+".."+max+"|"+min); } else { attr = UserInputs.parse( "SIZE", "M|number|"+label+"||"); } } attr.max_value = ""; if(!opts.uinput_mb){ $(".diskSlider", diskContext).html(UserInputs.attributeInput(attr)); } else { UserInputs.insertAttributeInputMB(attr, $(".diskSlider", diskContext)); } }); } else { disksContext.html(""); } } function _retrieve(context) { var disks = []; var disk; $(".diskContainer", context).each(function(){ if ($(this).data("template_disk")) { disk = $(this).data("template_disk"); var fields = WizardFields.retrieve(this); if (disk["SIZE"] && fields["SIZE"] && disk["ORIGINAL_SIZE"] === fields["SIZE"] && fields["SIZE"] === disk["SIZE"]){ if (disk["IMAGE_ID"] || disk["IMAGE_NAME"]){ delete disk["SIZE"]; } delete disk["ORIGINAL_SIZE"]; disks.push(disk); } else { var diskAux = $.extend(true, {}, disk); diskAux["SIZE"] = fields["SIZE"]; delete diskAux["ORIGINAL_SIZE"]; disks.push(diskAux); } } }); return disks; } });
function getPrevalenceScore(ocurrency, damage) { var json_matrix = { "-101,-101": { "-101,-101": -100, "-76,-100": -90, "-51,-75": -80, "-26,-50": -70, "-1,-25": -60, "0,0": -50, "1,25": -40, "26,50": -30, "51,75": -20, "76,100": -10, "101,101": 0 }, "-76,-100": { "-101,-101": -90, "-76,-100": -80, "-51,-75": -70, "-26,-50": -60, "-1,-25": -50, "0,0": -40, "1,25": -30, "26,50": -20, "51,75": -10, "76,100": 0, "101,101": 10 }, "-51,-75": { "-101,-101": -80, "-76,-100": -70, "-51,-75": -60, "-26,-50": -50, "-1,-25": -40, "0,0": -30, "1,25": -20, "26,50": -10, "51,75": 0, "76,100": 10, "101,101": 20 }, "-26,-50": { "-101,-101": -70, "-76,-100": -60, "-51,-75": -50, "-26,-50": -40, "-1,-25": -30, "0,0": -20, "1,25": -10, "26,50": 0, "51,75": 10, "76,100": 20, "101,101": 30 }, "-1,-25": { "-101,-101": -60, "-76,-100": -50, "-51,-75": -40, "-26,-50": -30, "-1,-25": -20, "0,0": -10, "1,25": 0, "26,50": 10, "51,75": 20, "76,100": 30, "101,101": 40 }, "0,0": { "-101,-101": -50, "-76,-100": -40, "-51,-75": -30, "-26,-50": -20, "-1,-25": -10, "0,0": 0, "1,25": 10, "26,50": 20, "51,75": 30, "76,100": 40, "101,101": 50 }, "1,25": { "-101,-101": -40, "-76,-100": -30, "-51,-75": -20, "-26,-50": -10, "-1,-25": 0, "0,0": 10, "1,25": 20, "26,50": 30, "51,75": 40, "76,100": 50, "101,101": 60 }, "26,50": { "-101,-101": -30, "-76,-100": -20, "-51,-75": -10, "-26,-50": 0, "-1,-25": 10, "0,0": 20, "1,25": 30, "26,50": 40, "51,75": 50, "76,100": 60, "101,101": 70 }, "51,75": { "-101,-101": -20, "-76,-100": -10, "-51,-75": 0, "-26,-50": 10, "-1,-25": 20, "0,0": 30, "1,25": 40, "26,50": 50, "51,75": 60, "76,100": 70, "101,101": 80 }, "76,100": { "-101,-101": -10, "-76,-100": 0, "-51,-75": 10, "-26,-50": 20, "-1,-25": 30, "0,0": 40, "1,25": 50, "26,50": 60, "51,75": 70, "76,100": 80, "101,101": 90 }, "101,101": { "-101,-101": 0, "-76,-100": 10, "-51,-75": 20, "-26,-50": 30, "-1,-25": 40, "0,0": 50, "1,25": 60, "26,50": 70, "51,75": 80, "76,100": 90, "101,101": 100 } } var $score = null; var $resDamage = function(ocurrencyKey) { let $result = null; for (damageKey in json_matrix[ocurrencyKey]) { let damageLimit = damageKey.split(','); if ((damage == 0) && (damage == damageLimit[0])) { $result = json_matrix[ocurrencyKey][damageKey]; } else if ((damage >= 101) && (damage >= damageLimit[0])) { $result = json_matrix[ocurrencyKey][damageKey]; } else if ((damage <= -101) && (damage <= damageLimit[0])) { $result = json_matrix[ocurrencyKey][damageKey]; } else if (((damage >= 1) && (damage <= 100)) && ((damage >= damageLimit[0]) && (damage <= damageLimit[1]))) { $result = json_matrix[ocurrencyKey][damageKey]; } else if (((damage >= -100) && (damage <= -1)) && ((damage >= damageLimit[1]) && (damage <= damageLimit[0]))) { $result = json_matrix[ocurrencyKey][damageKey]; } } return $result; }; for (ocurrencyKey in json_matrix) { let ocurrencyLimit = ocurrencyKey.split(','); if ((ocurrency == 0) && (ocurrency == ocurrencyLimit[0])) { $score = $resDamage(ocurrencyKey); } else if ((ocurrency >= 101) && (ocurrency >= ocurrencyLimit[0])) { $score = $resDamage(ocurrencyKey); } else if ((ocurrency <= -101) && (ocurrency <= ocurrencyLimit[0])) { $score = $resDamage(ocurrencyKey); } else if (((ocurrency >= 1) && (ocurrency <= 100)) && ((ocurrency >= ocurrencyLimit[0]) && (ocurrency <= ocurrencyLimit[1]))) { $score = $resDamage(ocurrencyKey); } else if (((ocurrency >= -100) && (ocurrency <= -1)) && ((ocurrency >= ocurrencyLimit[1]) && (ocurrency <= ocurrencyLimit[0]))) { $score = $resDamage(ocurrencyKey); } } return $score; } function getSlopeScore($slope) { let $score = null; if ($slope <= 3) $score = 0; else if (($slope >= 4) & ($slope <= 12)) $score = 33; else if (($slope >= 13) & ($slope <= 35)) $score = 66; else if ($slope >= 35) $score = 100; return $score; } function getDensityScore($density) { let $score = null; if (($density >= 0) && ($density <= 100)) $score = 0; else if (($density >= 101) & ($density <= 300)) $score = 50; else if ($density >= 301) $score = 100; return $score; } function checkIntegerOnDecimal(value) { if (value % 1 == 0) value = Math.round(value); else value = value.toFixed(2); return value; } $(window).on('load', function() { /** calculates riesgo of comuna */ $.ajax({ url: window.location.origin + '/admin/reportes/comunales/riesgo', type: 'GET', success: function(response) { let riesgo = 0; let amenaza = 0; let vulnerabilidad = 0; /** variables to prevalencia */ let inc_quin_mean = 0; let inc_actual = 0; let inc_years = 0; let inc_percentage_dif = 0; let sup_quin_mean = 0; let sup_actual = 0; let sup_years = 0; let sup_percentage_dif = 0; let res_prevalence = 0; /** variables to pendiente */ let pen_total = 0; let pen_percentage_clase_1 = 0; let pen_percentage_clase_2 = 0; let pen_percentage_clase_3 = 0; let pen_average = 0; let res_pen_average = 0; /** variables to vegetación combustible */ let total_combustible = response['combustible_total']['total']; let res_combustible = 0; /** variables to densidad poblacional */ let pbl_density = 0; let res_pbl_density = 0; /** variable to estructura techos viviendas */ let techo_tipo_1 = 0; let techo_tipo_1a = 0; let techo_tipo_1b = 0; let techo_tipo_2 = 0; let techo_tipo_3 = 0; let techo_tipo_4 = 0; let techo_total = 0; let res_techo = 0; let colors = ['#44bd00', '#ffbb00', '#ff7300', '#ff1b19']; /** calculates prevalencia */ for (let key in response['incendios']) { if (key != 'inc_2020') { inc_quin_mean += parseInt(response['incendios'][key]); inc_years += 1; } else { inc_actual += parseInt(response['incendios'][key]); } } for (let key in response['superficie']) { if (key != 'sup_2020') { sup_quin_mean += parseFloat(response['superficie'][key]); sup_years += 1; } else { sup_actual += parseFloat(response['superficie'][key]); } } inc_quin_mean = inc_quin_mean / inc_years; if (inc_quin_mean == 0) inc_percentage_dif = 0; else inc_percentage_dif = Math.round(((inc_actual - inc_quin_mean) / inc_quin_mean) * 100); sup_quin_mean = sup_quin_mean / sup_years; if (sup_quin_mean == 0) sup_percentage_dif = 0; else sup_percentage_dif = Math.round(((sup_actual - sup_quin_mean) / sup_quin_mean) * 100); res_prevalence = getPrevalenceScore(inc_percentage_dif, sup_percentage_dif); /** calculates pendiente */ /*for (let key in response['pendiente']) { if (response['pendiente'][key]['gridcode'] == 1) pen_percentage_clase_1 += ((parseInt(response['pendiente'][key]['gridcode']) * parseFloat(response['pendiente'][key]['pc_superficie'])) / 100); else if (response['pendiente'][key]['gridcode'] == 2) pen_percentage_clase_2 += ((parseInt(response['pendiente'][key]['gridcode']) * parseFloat(response['pendiente'][key]['pc_superficie'])) / 100); else if (response['pendiente'][key]['gridcode'] == 3) pen_percentage_clase_3 += ((parseInt(response['pendiente'][key]['gridcode']) * parseFloat(response['pendiente'][key]['pc_superficie'])) / 100); pen_total = pen_total + 1; } pen_average = Math.round((pen_percentage_clase_1 + pen_percentage_clase_2 + pen_percentage_clase_3) / pen_total); res_pen_average = getSlopeScore(pen_average);*/ /** calculates vegetación combustible */ for (let key in response['combustible']) { res_combustible = res_combustible + (parseInt(response['combustible'][key]['puntaje']) * (parseFloat(response['combustible'][key]['hectareas'])/total_combustible)); } res_combustible = Math.round(res_combustible); /** calculates densidad poblacional */ pbl_density = Math.round(parseFloat(response['poblacion']['sum_densidad']) / parseInt(response['poblacion']['cantidad'])); res_pbl_density = getDensityScore(pbl_density); /** calculation of the materialidad de techos viviendas */ for (let key in response['vivienda_techos']) { if (key == 'techo_tipo_1a') techo_tipo_1a += parseInt(response['vivienda_techos'][key]); else if (key == 'techo_tipo_1b') techo_tipo_1b += parseInt(response['vivienda_techos'][key]); else if (key == 'techo_tipo_2') techo_tipo_2 += parseInt(response['vivienda_techos'][key]); else if (key == 'techo_tipo_3') techo_tipo_3 += parseInt(response['vivienda_techos'][key]); else if (key == 'techo_tipo_4') techo_tipo_4 += parseInt(response['vivienda_techos'][key]); techo_total += parseInt(response['vivienda_techos'][key]); } if (techo_total == 0) { res_techo = 0; } else { techo_tipo_1 = ((techo_tipo_1a + techo_tipo_1b) / techo_total) * 100; techo_tipo_2 = (techo_tipo_2 / techo_total) * 75; techo_tipo_3 = (techo_tipo_3 / techo_total) * 50; techo_tipo_4 = (techo_tipo_4 / techo_total) * 25; res_techo = Math.round(techo_tipo_1 + techo_tipo_2 + techo_tipo_3 + techo_tipo_4); } if (res_pen_average == null) res_pen_average = 0; if (res_combustible == null) res_combustible = 0; if (res_pbl_density == null) res_pbl_density = 0; if ((res_prevalence != null) && (res_pen_average != null) && (res_combustible != null) && (res_pbl_density != null) && (!isNaN(res_techo))) { amenaza = Math.round(0.5*res_prevalence + 0.25*res_pen_average + 0.25*res_combustible); vulnerabilidad = Math.round(0.2*res_pbl_density + 0.1*res_techo); if (amenaza < 0) amenaza = 0; if (vulnerabilidad < 0) vulnerabilidad = 0; riesgo = Math.round(0.5*amenaza + 0.5*vulnerabilidad); if (riesgo < 0) riesgo = 0; $('#val-amenaza-comuna').html(amenaza); $('#val-vulnerabilidad-comuna').html(vulnerabilidad); if (riesgo <= 25) { $('#val-riesgo-comuna').trigger('configure', { 'fgColor': colors[0], 'min': 0, 'max': 100 }).val(riesgo).trigger('change'); $('#val-riesgo-comuna').css({ 'color': colors[0] }); } else if (riesgo > 25 && riesgo <= 50) { $('#val-riesgo-comuna').trigger('configure', { 'fgColor': colors[1], 'min': 0, 'max': 100 }).val(riesgo).trigger('change'); $('#val-riesgo-comuna').css({ 'color': colors[1] }); } else if (riesgo > 50 && riesgo <= 75) { $('#val-riesgo-comuna').trigger('configure', { 'fgColor': colors[2], 'min': 0, 'max': 100 }).val(riesgo).trigger('change'); $('#val-riesgo-comuna').css({ 'color': colors[2] }); } else if (riesgo > 75) { $('#val-riesgo-comuna').trigger('configure', { 'fgColor': colors[3], 'min': 0, 'max': 100 }).val(riesgo).trigger('change'); $('#val-riesgo-comuna').css({ 'color': colors[3] }); } } }, error: function(xhr, textStatus, error){ console.log(xhr.statusText); console.log(textStatus); console.log(error); } }); /** get data of Prevalencia */ $.ajax({ url: window.location.origin + '/admin/reportes/comunales/riesgo/prevalencia', type: 'GET', success: function(response) { let yearsQuinquenio = 5; let ocurrenciaQuinquenio = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let ocurrenciaUltima = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let danoQuinquenio = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let danoUltima = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; let months = ['Julio', 'Agosto', 'Septiembre', 'Octubre', 'Noviembre', 'Diciembre', 'Enero', 'Febrero', 'Marzo', 'Abril', 'Mayo', 'Junio']; let sumOcurrenciaQuinquenio = 0; let sumOcurrenciaUltima = 0; var sumDanoQuinquenio = 0; let sumDanoUltima = 0; let percentageOcurrencia = 0; let percentageDano = 0; let valuePrevalencia = 0; response.quinquenio.forEach(element => { for (let index = 0; index < months.length; index++) { if (element.mes_ocurre.match(months[index])) ocurrenciaQuinquenio[index] = ocurrenciaQuinquenio[index] + 1; } }); response.ultima_temporada.forEach(element => { for (let index = 0; index < months.length; index++) { if (element.mes_ocurre.match(months[index])) ocurrenciaUltima[index] = ocurrenciaUltima[index] + 1; } }); response.quinquenio.forEach(element => { for (let index = 0; index < months.length; index++) { if (element.mes_ocurre.match(months[index])) danoQuinquenio[index] = danoQuinquenio[index] + parseFloat(element.sup_total); } }); response.ultima_temporada.forEach(element => { for (let index = 0; index < months.length; index++) { if (element.mes_ocurre.match(months[index])) danoUltima[index] = danoUltima[index] + parseFloat(element.sup_total); } }); for (let index = 0; index < months.length; index++) { ocurrenciaQuinquenio[index] = parseFloat((ocurrenciaQuinquenio[index] / yearsQuinquenio).toFixed(2)); danoQuinquenio[index] = parseFloat((danoQuinquenio[index] / yearsQuinquenio).toFixed(2)); danoUltima[index] = parseFloat(danoUltima[index].toFixed(2)); } sumOcurrenciaQuinquenio = ocurrenciaQuinquenio.reduce(function(pv, cv) { if (isNaN(cv)) return pv + 0; else return pv + parseFloat(cv); }, 0); sumOcurrenciaUltima = ocurrenciaUltima.reduce(function(pv, cv) { if (isNaN(cv)) return pv + 0; else return pv + parseInt(cv); }, 0); sumDanoQuinquenio = danoQuinquenio.reduce(function(pv, cv) { if (isNaN(cv)) return pv + 0; else return pv + parseFloat(cv); }, 0); sumDanoUltima = danoUltima.reduce(function(pv, cv) { if (isNaN(cv)) return pv + 0; else return pv + parseFloat(cv); }, 0); if (sumOcurrenciaQuinquenio == 0) percentageOcurrencia = 0; else percentageOcurrencia = Math.round(((sumOcurrenciaUltima - sumOcurrenciaQuinquenio) / sumOcurrenciaQuinquenio)*100); if (sumDanoQuinquenio == 0) percentageDano = 0; else percentageDano = Math.round(((sumDanoUltima - sumDanoQuinquenio) / sumDanoQuinquenio)*100); sumOcurrenciaQuinquenio = checkIntegerOnDecimal(sumOcurrenciaQuinquenio); sumDanoQuinquenio = checkIntegerOnDecimal(sumDanoQuinquenio); sumDanoUltima = checkIntegerOnDecimal(sumDanoUltima); valuePrevalencia = getPrevalenceScore(percentageOcurrencia, percentageDano); $('#val-ocurrencia-quinquenio').text(sumOcurrenciaQuinquenio); $('#val-ocurrencia-ultima').text(sumOcurrenciaUltima); $('#val-dano-quinquenio').text(sumDanoQuinquenio + ' ha'); $('#val-dano-ultima').text(sumDanoUltima + ' ha'); percentageOcurrencia = Math.round(percentageOcurrencia); percentageDano = Math.round(percentageDano); let colors = ['#44bd00', '#ffbb00', '#ff7300', '#ff1b19'] /** sets value of prevalencia and complete knob object with colors */ if (valuePrevalencia <= -50) { $('#val-prevalencia').trigger('configure', { 'fgColor': colors[0], 'min': -100, 'max': 100 }).val(valuePrevalencia).trigger('change'); $('#val-prevalencia').css({ 'color': colors[0] }); } else if (valuePrevalencia > -50 && valuePrevalencia <= 0) { $('#val-prevalencia').trigger('configure', { 'fgColor': colors[1], 'min': -100, 'max': 100 }).val(valuePrevalencia).trigger('change'); $('#val-prevalencia').css({ 'color': colors[1] }); } else if (valuePrevalencia > 0 && valuePrevalencia <= 50) { $('#val-prevalencia').trigger('configure', { 'fgColor': colors[2], 'min': -100, 'max': 100 }).val(valuePrevalencia).trigger('change'); $('#val-prevalencia').css({ 'color': colors[2] }); } else if (valuePrevalencia > 50) { $('#val-prevalencia').trigger('configure', { 'fgColor': colors[3], 'min': -100, 'max': 100 }).val(valuePrevalencia).trigger('change'); $('#val-prevalencia').css({ 'color': colors[3] }); } /** sets value of ocurrencia and complete knob object with colors */ $('#val-ocurrencia').val(percentageOcurrencia); if (percentageOcurrencia <= 25) $('#val-ocurrencia').css({ 'color': colors[0] }); else if (percentageOcurrencia > 25 && percentageOcurrencia <= 50) $('#val-ocurrencia').css({ 'color': colors[1] }); else if (percentageOcurrencia > 50 && percentageOcurrencia <= 75) $('#val-ocurrencia').css({ 'color': colors[2] }); else if (percentageOcurrencia > 75) $('#val-ocurrencia').css({ 'color': colors[3] }); /** sets value of daño and complete knob object with colors */ $('#val-dano').val(percentageDano); if (percentageDano <= 25) $('#val-dano').css({ 'color': colors[0] }); else if (percentageDano > 25 && percentageDano <= 50) $('#val-dano').css({ 'color': colors[1] }); else if (percentageDano > 50 && percentageDano <= 75) $('#val-dano').css({ 'color': colors[2] }); else if (percentageDano > 75) $('#val-dano').css({ 'color': colors[3] }); ocurrenciaQuinquenio.unshift('data1'); ocurrenciaUltima.unshift('data2'); danoQuinquenio.unshift('data1'); danoUltima.unshift('data2'); c3.generate({ bindto: '#chart-ocurrencia', // id of chart wrapper data: { columns: [ // each columns data ocurrenciaQuinquenio, ocurrenciaUltima ], type: 'area', // default type of chart colors: { 'data1': Aero.colors["red"], 'data2': Aero.colors["green"] }, names: { // name of each serie 'data1': 'Promedio quinquenio', 'data2': 'Última temporada' } }, axis: { x: { type: 'category', // name of each category categories: ['Jul', 'Ago', 'Sep', 'Oct', 'Nov', 'Dic', 'Ene', 'Feb', 'Mar', 'Abr', 'May', 'Jun'] }, }, legend: { show: true, //hide legend }, padding: { bottom: 0, top: 0 }, }); c3.generate({ bindto: '#chart-dano', // id of chart wrapper data: { columns: [ // each columns data danoQuinquenio, danoUltima ], type: 'area', // default type of chart colors: { 'data1': Aero.colors["orange"], 'data2': Aero.colors["blue"] }, names: { // name of each serie 'data1': 'Promedio quinquenio', 'data2': 'Última temporada' } }, axis: { x: { type: 'category', // name of each category categories: ['Jul', 'Ago', 'Sep', 'Oct', 'Nov', 'Dic', 'Ene', 'Feb', 'Mar', 'Abr', 'May', 'Jun'] }, y: { 'label': { 'text': 'hectáreas' } } }, legend: { show: true, //hide legend }, padding: { bottom: 0, top: 0 }, }); }, error: function(xhr, textStatus, error){ console.log(xhr.statusText); console.log(textStatus); console.log(error); } }); }); function getDetailsGrupoCausas(btn) { let data = btn.value.split(','); let grupoCausa = data[0]; let grupoEspecifica = data[1]; $.ajax({ url: window.location.origin + '/admin/reportes/comunales/causas/grupos', type: 'POST', data: { 'grupo-causa': grupoCausa, 'grupo-especifica': grupoEspecifica }, success: function(response) { let totalIncendios = 0; $('#div-content-causas-especificas').empty(); response.forEach(element => { totalIncendios = totalIncendios + parseInt(element.cantidad); }); response.forEach(element => { let content = '<div class="col-lg-4 col-md-6 col-sm-6 col-6 text-center">' + '<div class="card">' + '<div class="body" style="border: 1px solid green;">' + '<div class="d-flex bd-highlight text-left mt-0">' + '<div class="flex-fill bd-highlight">' + '<small class="text-muted">Causa específica</small>' + '<p>' + element.causa_especifica + '</p>' + '</div>' + '</div>' + '<div class="d-flex bd-highlight text-left">' + '<div class="flex-fill bd-highlight">' + '<small class="text-muted">N° incendios</small>' + '<p>' + element.cantidad + '</p>' + '</div>' + '</div>' + '<div class="d-flex bd-highlight text-left">' + '<div class="flex-fill bd-highlight">' + '<small class="text-muted">% incendios</small>' + '<p>' + ((element.cantidad/totalIncendios)*100).toFixed(1) + '</p>' + '</div>' + '</div>' + '</div>' + '</div>' + '</div>'; $('#div-content-causas-especificas').append(content); }); $('#modal-nombre-grupo-causa').text(grupoEspecifica); $('#modal-causas-especificas').modal('show'); }, error: function(xhr, textStatus, error){ console.log(xhr.statusText); console.log(textStatus); console.log(error); } }); }
from __future__ import absolute_import from __future__ import print_function import collections import datetime import re import os.path from .shims import subprocess_check_output MANUAL_EDIT_WARNING = """This file is generated using the %s script. DO NOT MANUALLY EDIT!!!! Last Modified: %s """ % (os.path.basename(__file__), datetime.datetime.now().strftime("%Y-%m-%d %H:%M")) email_mappings = { # Firas AlShafei "firas.alshafei@gmail.com": "firas.alshafei@us.abb.com", # Jeff Blank "jeff@t440.local": "blank@eclipse.ncsc.mil", # Frank Caviggia "fcaviggia@users.noreply.github.com": "fcaviggi@ra.iad.redhat.com", # Jean-Baptiste Donnette "donnet_j@epita.fr": "jean-baptiste.donnette@epita.fr", # Greg Elin "greg@fotonotes.net": "gregelin@gitmachines.com", # Andrew Gilmore "agilmore@ecahdb2.bor.doi.net": "agilmore2@gmail.com", # Marek Haicman "dahaic@users.noreply.github.com": "mhaicman@redhat.com", # John Hooks "hooksie11@gmail.com": "jhooks@starscream.pa.jhbcomputers.com", # Simon Lukasik "isimluk@fedoraproject.org": "slukasik@redhat.com", # Milan Lysonek "milan.lysonek@gmail.com": "mlysonek@redhat.com", # Zbynek Moravec "ybznek@users.noreply.github.com": "zmoravec@redhat.com", "moraveczbynek@gmail.com": "zmoravec@redhat.com", # Nathan Peters "nathan@nathanpeters.com": "Nathaniel.Peters@ca.com", "petna01@ca.com": "Nathaniel.Peters@ca.com", # Martin Preisler "martin@preisler.me": "mpreisle@redhat.com", # Robin Price II "rprice@users.noreply.github.com": "robin@redhat.com", "rprice@redhat.com": "robin@redhat.com", # Dave / David Smith "dsmith@secure-innovations.net": "dsmith@eclipse.ncsc.mil", "dsmith@fornax.eclipse.ncsc.mil": "dsmith@eclipse.ncsc.mil", # Philippe Thierry "phil@internal.reseau-libre.net": "phil@reseau-libre.net", "philippe.thierry@reseau-libre.net": "phil@reseau-libre.net", "philippe.thierry@thalesgroup.com": "phil@reseau-libre.net", # Shawn Wells "shawn@localhost.localdomain": "shawn@redhat.com", "shawnw@localhost.localdomain": "shawn@redhat.com", # No idea / ignore "lyd@chippy.(none)": "", "nick@null.net": "", "root@localhost.localdomain": "", "root@rhel6.(none)": "", } name_mappings = { "Gabe": "Gabe Alford", "Olivier": "Olivier Bonhomme", "OnceUponALoop": "Firas AlShafei", } def _get_contributions_by_canonical_email(output): contributions_by_email = collections.defaultdict(list) for line in output.split("\n"): match = re.match(r"[\s]*([0-9]+)\s+(.+)\s+\<(.+)\>", line) if match is None: continue commits_count, author_name, email = match.groups() canonical_email = email_mappings.get(email, email) if canonical_email == "": continue # ignored contributions_by_email[canonical_email].append((int(commits_count), author_name)) return contributions_by_email def _get_name_used_most_in_contributions(contribution_sets): _, name_used_most = sorted(contribution_sets, reverse=True)[0] return name_used_most def _get_contributor_email_mapping(contributions_by_email): contributors = {} for email in contributions_by_email: name_used_most = _get_name_used_most_in_contributions(contributions_by_email[email]) canonical_name_used_most = name_mappings.get(name_used_most, name_used_most) contributors[canonical_name_used_most] = email return contributors def _names_sorted_by_last_name(names): return sorted(names, key=lambda x: tuple(n.upper() for n in x.split(" "))[::-1]) def generate(): output = subprocess_check_output(["git", "shortlog", "-se"]).decode("utf-8") contributions_by_email = _get_contributions_by_canonical_email(output) contributors = _get_contributor_email_mapping(contributions_by_email) contributors_md = "<!---%s--->\n\n" % MANUAL_EDIT_WARNING contributors_md += \ "The following people have contributed to the SCAP Security Guide project\n" contributors_md += "(listed in alphabetical order):\n\n" contributors_xml = "<!--%s-->\n\n" % MANUAL_EDIT_WARNING contributors_xml += "<text>\n" for name in _names_sorted_by_last_name(list(contributors.keys())): email = contributors[name] contributors_md += "* %s <%s>\n" % (name, email) contributors_xml += "<contributor>%s &lt;%s&gt;</contributor>\n" % (name, email) contributors_xml += "</text>\n" return contributors_md, contributors_xml
import React from "react" import Layout from "../components/layout" import Seo from "../components/seo" const NotFoundPage = () => ( <Layout> <Seo title="404: Not found" /> <div className="about_inner"> <h1>NOT FOUND</h1> <p>You just hit a route that doesn&#39;t exist... the sadness.</p> </div> </Layout> ) export default NotFoundPage
/* Este archivo debe estar * colocado en la carpeta raíz del * sitio. * * Cualquier cambio en el * contenido de este archivo hace * que el service worker se * reinstale. * * Normalmente se cambia el número * en el nombre del caché cuando * cambia el contenido de los * archivos. * * Cuando uses GitHub Pages espera * 11 minutos después de hacer los * cambios en tu sitio, para * depués actualizar este archivo. */ const CACHE = " iotadangomez-2.03"; /** Archivos requeridos para que * la aplicación funcione fuera de * línea. */ const ARCHIVOS = [ "historial.html", "dispositivo.html", ".vscode/settings.json", "favicon.ico", "index.html", "LICENSE", "site.webmanifest", "cmp/mi-footer.js", "css/colores.css", "css/estilos.css", "img/icono1024.png", "img/icono2048.png", "img/icono256.png", "disp/ProxyEntrada.js", "disp/CtrlDispositivo.js", "disp/ProxyHistorial.js", "disp/ProxySalida.js", "disp/ResInt.js ", "disp/utilIoT.js ", "js/config.js", "js/CtrlDivide.js", "js/regSw.js", "js/init.js", "js/CtrlMovil.js ", "js/CtrlHistorial.js", "js/tipos.js ", "lib/campo-dinamico.js", "lib/campos.css", "lib/icono.css", "lib/material-icons.css", "lib/MaterialIcons-Regular.codepoints", "lib/MaterialIcons-Regular.ttf", "lib/mi-nav.css", "lib/movil.js", "lib/principal.css", "lib/roboto-v20-latin-700.woff", "lib/roboto-v20-latin-700.woff2", "lib/roboto-v20-latin-regular.woff", "lib/roboto-v20-latin-regular.woff2", "lib/roboto.css", "lib/util.js", "lib/tiposFire.js ", "lib/fabrica.js ", "/" ]; self.addEventListener("install", evt => { console.log("sw instalado."); /* Realiza la instalación. * Carga los archivos * requeridos en la caché. */ // @ts-ignore evt.waitUntil(cargaCache()); }); /* Toma los archivos solicitados * de la caché; si no los * encuentra, se descargan. */ self.addEventListener("fetch", evt => { // @ts-ignore if (evt.request.method === "GET") { // @ts-ignore evt.respondWith( usaCache(evt)); } }); self.addEventListener("activate", () => console.log("sw activo.")); async function cargaCache() { console.log( "Intentando cargar cache", CACHE); const cache = await caches.open(CACHE); await cache.addAll(ARCHIVOS); console.log("Cache", CACHE, "cargado"); } async function usaCache(evt) { const cache = await caches.open(CACHE); const response = await cache.match(evt.request, { ignoreSearch: true }); if (response) { return response; } else { return fetch(evt.request); } }
(function() { define(['oraculum'], function(Oraculum) { 'use strict'; /* SortableColumn.ModelMixin ========================= A mixin to provide a sorting interface on a "column". It expects `sortCollection` to support the following interface: * Method: `getAttributeDirection` * Method: `addAttributeDirection` * Method: `removeAttributeDirection` * Method: `unsort` These can be custom methods on the `sortCollection`, or the `sortCollection` can mixin one of the provided sorting mixins: @see models/mixins/sort-by-attribute-direction.coffee @see models/mixins/sort-by-multi-attribute-direction.coffee @see models/mixins/sort-by-attribute-direction-interface.coffee @see models/mixins/sort-by-multi-attribute-direction-interface.coffee */ return Oraculum.defineMixin('SortableColumn.ModelMixin', { mixinOptions: { sortableColumn: { collection: null, directions: [1, 0, -1] } }, mixconfig: function(arg, attrs, options) { var sortCollection, sortDirections, sortableColumn; sortableColumn = arg.sortableColumn; if (options == null) { options = {}; } sortCollection = options.sortCollection, sortDirections = options.sortDirections; if (sortCollection != null) { sortableColumn.collection = sortCollection; } if (sortDirections != null) { sortableColumn.directions = sortDirections; } if (!sortableColumn.collection) { throw new Error('SortableColumn.ModelMixin requires a sortCollection'); } }, mixinitialize: function() { this._sortableCollection = this.mixinOptions.sortableColumn.collection; if (_.isString(this._sortableCollection)) { this._sortableCollection = this.__factory().get(this._sortableCollection); } if (_.isFunction(this._sortableCollection)) { this._sortableCollection = this._sortableCollection.call(this); } this.listenTo(this._sortableCollection, 'sort', this._collectionSorted); return this._collectionSorted(); }, _collectionSorted: function() { var attribute, currentDirection; attribute = this.get('attribute'); currentDirection = this._sortableCollection.getAttributeDirection(attribute); if (!currentDirection) { return this.unset('sortDirection'); } return this.set('sortDirection', currentDirection); }, nextDirection: function() { var attribute, nextDirection; attribute = this.get('attribute'); nextDirection = this.getNextDirection(); this._sortableCollection.addAttributeDirection(attribute, nextDirection); return this._collectionSorted(); }, getNextDirection: function() { var attribute, currentDirection, directions, index, nextDirection; attribute = this.get('attribute'); directions = this.mixinOptions.sortableColumn.directions; currentDirection = this._sortableCollection.getAttributeDirection(attribute); index = directions.indexOf(currentDirection); nextDirection = directions[++index]; if (nextDirection == null) { nextDirection = directions[0]; } return nextDirection; }, isSorted: function() { return this.has('sortDirection'); } }); }); }).call(this);
// Auto-generated file. Do not edit! // Template: src/f16-dwconv/up-neonfp16arith.c.in // Generator: tools/xngen // // Copyright 2020 Google LLC // // This source code is licensed under the BSD-style license found in the // LICENSE file in the root directory of this source tree. #include <assert.h> #include <arm_neon.h> #include <xnnpack/dwconv.h> void xnn_f16_dwconv_minmax_ukernel_up32x9__neonfp16arith_acc2( size_t channels, size_t output_width, const void** input, const void* weights, void* output_ptr, size_t input_stride, size_t output_increment, size_t input_offset, const void* zero, const union xnn_f16_minmax_params params[restrict XNN_MIN_ELEMENTS(1)]) XNN_OOB_READS { assert(channels != 0); assert(output_width != 0); __fp16* output = (__fp16*) output_ptr; const float16x8_t vmax = vreinterpretq_f16_u16(vld1q_dup_u16(&params->neon.max)); const float16x8_t vmin = vreinterpretq_f16_u16(vld1q_dup_u16(&params->neon.min)); do { const __fp16* i0 = (const __fp16*) input[0]; assert(i0 != NULL); if XNN_UNPREDICTABLE(i0 != (const __fp16*) zero) { i0 = (const __fp16*) ((uintptr_t) i0 + input_offset); } const __fp16* i1 = (const __fp16*) input[1]; assert(i1 != NULL); if XNN_UNPREDICTABLE(i1 != (const __fp16*) zero) { i1 = (const __fp16*) ((uintptr_t) i1 + input_offset); } const __fp16* i2 = (const __fp16*) input[2]; assert(i2 != NULL); if XNN_UNPREDICTABLE(i2 != (const __fp16*) zero) { i2 = (const __fp16*) ((uintptr_t) i2 + input_offset); } const __fp16* i3 = (const __fp16*) input[3]; assert(i3 != NULL); if XNN_UNPREDICTABLE(i3 != (const __fp16*) zero) { i3 = (const __fp16*) ((uintptr_t) i3 + input_offset); } const __fp16* i4 = (const __fp16*) input[4]; assert(i4 != NULL); if XNN_UNPREDICTABLE(i4 != (const __fp16*) zero) { i4 = (const __fp16*) ((uintptr_t) i4 + input_offset); } const __fp16* i5 = (const __fp16*) input[5]; assert(i5 != NULL); if XNN_UNPREDICTABLE(i5 != (const __fp16*) zero) { i5 = (const __fp16*) ((uintptr_t) i5 + input_offset); } const __fp16* i6 = (const __fp16*) input[6]; assert(i6 != NULL); if XNN_UNPREDICTABLE(i6 != (const __fp16*) zero) { i6 = (const __fp16*) ((uintptr_t) i6 + input_offset); } const __fp16* i7 = (const __fp16*) input[7]; assert(i7 != NULL); if XNN_UNPREDICTABLE(i7 != (const __fp16*) zero) { i7 = (const __fp16*) ((uintptr_t) i7 + input_offset); } const __fp16* i8 = (const __fp16*) input[8]; assert(i8 != NULL); if XNN_UNPREDICTABLE(i8 != (const __fp16*) zero) { i8 = (const __fp16*) ((uintptr_t) i8 + input_offset); } input = (const void**) ((uintptr_t) input + input_stride); size_t c = channels; const __fp16* w = (const __fp16*) weights; for (; c >= 32; c -= 32) { float16x8_t vacc01234567p0 = vld1q_f16(w); w += 8; float16x8_t vacc89ABCDEFp0 = vld1q_f16(w); w += 8; float16x8_t vaccGHIJKLMNp0 = vld1q_f16(w); w += 8; float16x8_t vaccOPQRSTUVp0 = vld1q_f16(w); w += 8; const float16x8_t vi0x01234567 = vld1q_f16(i0); i0 += 8; const float16x8_t vi0x89ABCDEF = vld1q_f16(i0); i0 += 8; const float16x8_t vi0xGHIJKLMN = vld1q_f16(i0); i0 += 8; const float16x8_t vi0xOPQRSTUV = vld1q_f16(i0); i0 += 8; const float16x8_t vk0x01234567 = vld1q_f16(w); w += 8; const float16x8_t vk0x89ABCDEF = vld1q_f16(w); w += 8; const float16x8_t vk0xGHIJKLMN = vld1q_f16(w); w += 8; const float16x8_t vk0xOPQRSTUV = vld1q_f16(w); w += 8; vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi0x01234567, vk0x01234567); vacc89ABCDEFp0 = vfmaq_f16(vacc89ABCDEFp0, vi0x89ABCDEF, vk0x89ABCDEF); vaccGHIJKLMNp0 = vfmaq_f16(vaccGHIJKLMNp0, vi0xGHIJKLMN, vk0xGHIJKLMN); vaccOPQRSTUVp0 = vfmaq_f16(vaccOPQRSTUVp0, vi0xOPQRSTUV, vk0xOPQRSTUV); const float16x8_t vi1x01234567 = vld1q_f16(i1); i1 += 8; const float16x8_t vi1x89ABCDEF = vld1q_f16(i1); i1 += 8; const float16x8_t vi1xGHIJKLMN = vld1q_f16(i1); i1 += 8; const float16x8_t vi1xOPQRSTUV = vld1q_f16(i1); i1 += 8; const float16x8_t vk1x01234567 = vld1q_f16(w); w += 8; const float16x8_t vk1x89ABCDEF = vld1q_f16(w); w += 8; const float16x8_t vk1xGHIJKLMN = vld1q_f16(w); w += 8; const float16x8_t vk1xOPQRSTUV = vld1q_f16(w); w += 8; float16x8_t vacc01234567p1 = vmulq_f16(vi1x01234567, vk1x01234567); float16x8_t vacc89ABCDEFp1 = vmulq_f16(vi1x89ABCDEF, vk1x89ABCDEF); float16x8_t vaccGHIJKLMNp1 = vmulq_f16(vi1xGHIJKLMN, vk1xGHIJKLMN); float16x8_t vaccOPQRSTUVp1 = vmulq_f16(vi1xOPQRSTUV, vk1xOPQRSTUV); const float16x8_t vi2x01234567 = vld1q_f16(i2); i2 += 8; const float16x8_t vi2x89ABCDEF = vld1q_f16(i2); i2 += 8; const float16x8_t vi2xGHIJKLMN = vld1q_f16(i2); i2 += 8; const float16x8_t vi2xOPQRSTUV = vld1q_f16(i2); i2 += 8; const float16x8_t vk2x01234567 = vld1q_f16(w); w += 8; const float16x8_t vk2x89ABCDEF = vld1q_f16(w); w += 8; const float16x8_t vk2xGHIJKLMN = vld1q_f16(w); w += 8; const float16x8_t vk2xOPQRSTUV = vld1q_f16(w); w += 8; vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi2x01234567, vk2x01234567); vacc89ABCDEFp0 = vfmaq_f16(vacc89ABCDEFp0, vi2x89ABCDEF, vk2x89ABCDEF); vaccGHIJKLMNp0 = vfmaq_f16(vaccGHIJKLMNp0, vi2xGHIJKLMN, vk2xGHIJKLMN); vaccOPQRSTUVp0 = vfmaq_f16(vaccOPQRSTUVp0, vi2xOPQRSTUV, vk2xOPQRSTUV); const float16x8_t vi3x01234567 = vld1q_f16(i3); i3 += 8; const float16x8_t vi3x89ABCDEF = vld1q_f16(i3); i3 += 8; const float16x8_t vi3xGHIJKLMN = vld1q_f16(i3); i3 += 8; const float16x8_t vi3xOPQRSTUV = vld1q_f16(i3); i3 += 8; const float16x8_t vk3x01234567 = vld1q_f16(w); w += 8; const float16x8_t vk3x89ABCDEF = vld1q_f16(w); w += 8; const float16x8_t vk3xGHIJKLMN = vld1q_f16(w); w += 8; const float16x8_t vk3xOPQRSTUV = vld1q_f16(w); w += 8; vacc01234567p1 = vfmaq_f16(vacc01234567p1, vi3x01234567, vk3x01234567); vacc89ABCDEFp1 = vfmaq_f16(vacc89ABCDEFp1, vi3x89ABCDEF, vk3x89ABCDEF); vaccGHIJKLMNp1 = vfmaq_f16(vaccGHIJKLMNp1, vi3xGHIJKLMN, vk3xGHIJKLMN); vaccOPQRSTUVp1 = vfmaq_f16(vaccOPQRSTUVp1, vi3xOPQRSTUV, vk3xOPQRSTUV); const float16x8_t vi4x01234567 = vld1q_f16(i4); i4 += 8; const float16x8_t vi4x89ABCDEF = vld1q_f16(i4); i4 += 8; const float16x8_t vi4xGHIJKLMN = vld1q_f16(i4); i4 += 8; const float16x8_t vi4xOPQRSTUV = vld1q_f16(i4); i4 += 8; const float16x8_t vk4x01234567 = vld1q_f16(w); w += 8; const float16x8_t vk4x89ABCDEF = vld1q_f16(w); w += 8; const float16x8_t vk4xGHIJKLMN = vld1q_f16(w); w += 8; const float16x8_t vk4xOPQRSTUV = vld1q_f16(w); w += 8; vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi4x01234567, vk4x01234567); vacc89ABCDEFp0 = vfmaq_f16(vacc89ABCDEFp0, vi4x89ABCDEF, vk4x89ABCDEF); vaccGHIJKLMNp0 = vfmaq_f16(vaccGHIJKLMNp0, vi4xGHIJKLMN, vk4xGHIJKLMN); vaccOPQRSTUVp0 = vfmaq_f16(vaccOPQRSTUVp0, vi4xOPQRSTUV, vk4xOPQRSTUV); const float16x8_t vi5x01234567 = vld1q_f16(i5); i5 += 8; const float16x8_t vi5x89ABCDEF = vld1q_f16(i5); i5 += 8; const float16x8_t vi5xGHIJKLMN = vld1q_f16(i5); i5 += 8; const float16x8_t vi5xOPQRSTUV = vld1q_f16(i5); i5 += 8; const float16x8_t vk5x01234567 = vld1q_f16(w); w += 8; const float16x8_t vk5x89ABCDEF = vld1q_f16(w); w += 8; const float16x8_t vk5xGHIJKLMN = vld1q_f16(w); w += 8; const float16x8_t vk5xOPQRSTUV = vld1q_f16(w); w += 8; vacc01234567p1 = vfmaq_f16(vacc01234567p1, vi5x01234567, vk5x01234567); vacc89ABCDEFp1 = vfmaq_f16(vacc89ABCDEFp1, vi5x89ABCDEF, vk5x89ABCDEF); vaccGHIJKLMNp1 = vfmaq_f16(vaccGHIJKLMNp1, vi5xGHIJKLMN, vk5xGHIJKLMN); vaccOPQRSTUVp1 = vfmaq_f16(vaccOPQRSTUVp1, vi5xOPQRSTUV, vk5xOPQRSTUV); const float16x8_t vi6x01234567 = vld1q_f16(i6); i6 += 8; const float16x8_t vi6x89ABCDEF = vld1q_f16(i6); i6 += 8; const float16x8_t vi6xGHIJKLMN = vld1q_f16(i6); i6 += 8; const float16x8_t vi6xOPQRSTUV = vld1q_f16(i6); i6 += 8; const float16x8_t vk6x01234567 = vld1q_f16(w); w += 8; const float16x8_t vk6x89ABCDEF = vld1q_f16(w); w += 8; const float16x8_t vk6xGHIJKLMN = vld1q_f16(w); w += 8; const float16x8_t vk6xOPQRSTUV = vld1q_f16(w); w += 8; vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi6x01234567, vk6x01234567); vacc89ABCDEFp0 = vfmaq_f16(vacc89ABCDEFp0, vi6x89ABCDEF, vk6x89ABCDEF); vaccGHIJKLMNp0 = vfmaq_f16(vaccGHIJKLMNp0, vi6xGHIJKLMN, vk6xGHIJKLMN); vaccOPQRSTUVp0 = vfmaq_f16(vaccOPQRSTUVp0, vi6xOPQRSTUV, vk6xOPQRSTUV); const float16x8_t vi7x01234567 = vld1q_f16(i7); i7 += 8; const float16x8_t vi7x89ABCDEF = vld1q_f16(i7); i7 += 8; const float16x8_t vi7xGHIJKLMN = vld1q_f16(i7); i7 += 8; const float16x8_t vi7xOPQRSTUV = vld1q_f16(i7); i7 += 8; const float16x8_t vk7x01234567 = vld1q_f16(w); w += 8; const float16x8_t vk7x89ABCDEF = vld1q_f16(w); w += 8; const float16x8_t vk7xGHIJKLMN = vld1q_f16(w); w += 8; const float16x8_t vk7xOPQRSTUV = vld1q_f16(w); w += 8; vacc01234567p1 = vfmaq_f16(vacc01234567p1, vi7x01234567, vk7x01234567); vacc89ABCDEFp1 = vfmaq_f16(vacc89ABCDEFp1, vi7x89ABCDEF, vk7x89ABCDEF); vaccGHIJKLMNp1 = vfmaq_f16(vaccGHIJKLMNp1, vi7xGHIJKLMN, vk7xGHIJKLMN); vaccOPQRSTUVp1 = vfmaq_f16(vaccOPQRSTUVp1, vi7xOPQRSTUV, vk7xOPQRSTUV); const float16x8_t vi8x01234567 = vld1q_f16(i8); i8 += 8; const float16x8_t vi8x89ABCDEF = vld1q_f16(i8); i8 += 8; const float16x8_t vi8xGHIJKLMN = vld1q_f16(i8); i8 += 8; const float16x8_t vi8xOPQRSTUV = vld1q_f16(i8); i8 += 8; const float16x8_t vk8x01234567 = vld1q_f16(w); w += 8; const float16x8_t vk8x89ABCDEF = vld1q_f16(w); w += 8; const float16x8_t vk8xGHIJKLMN = vld1q_f16(w); w += 8; const float16x8_t vk8xOPQRSTUV = vld1q_f16(w); w += 8; vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi8x01234567, vk8x01234567); vacc89ABCDEFp0 = vfmaq_f16(vacc89ABCDEFp0, vi8x89ABCDEF, vk8x89ABCDEF); vaccGHIJKLMNp0 = vfmaq_f16(vaccGHIJKLMNp0, vi8xGHIJKLMN, vk8xGHIJKLMN); vaccOPQRSTUVp0 = vfmaq_f16(vaccOPQRSTUVp0, vi8xOPQRSTUV, vk8xOPQRSTUV); // Add up all accumulators to vacc0123456789ABCDEFGHIJKLMNOPQRSTUVp0 vacc01234567p0 = vaddq_f16(vacc01234567p0, vacc01234567p1); vacc89ABCDEFp0 = vaddq_f16(vacc89ABCDEFp0, vacc89ABCDEFp1); vaccGHIJKLMNp0 = vaddq_f16(vaccGHIJKLMNp0, vaccGHIJKLMNp1); vaccOPQRSTUVp0 = vaddq_f16(vaccOPQRSTUVp0, vaccOPQRSTUVp1); float16x8_t vacc01234567 = vmaxq_f16(vacc01234567p0, vmin); float16x8_t vacc89ABCDEF = vmaxq_f16(vacc89ABCDEFp0, vmin); float16x8_t vaccGHIJKLMN = vmaxq_f16(vaccGHIJKLMNp0, vmin); float16x8_t vaccOPQRSTUV = vmaxq_f16(vaccOPQRSTUVp0, vmin); vacc01234567 = vminq_f16(vacc01234567, vmax); vacc89ABCDEF = vminq_f16(vacc89ABCDEF, vmax); vaccGHIJKLMN = vminq_f16(vaccGHIJKLMN, vmax); vaccOPQRSTUV = vminq_f16(vaccOPQRSTUV, vmax); vst1q_f16(output, vacc01234567); output += 8; vst1q_f16(output, vacc89ABCDEF); output += 8; vst1q_f16(output, vaccGHIJKLMN); output += 8; vst1q_f16(output, vaccOPQRSTUV); output += 8; } for (; c >= 8; c -= 8) { float16x8_t vacc01234567p0 = vld1q_f16(w); w += 8; const float16x8_t vi0x01234567 = vld1q_f16(i0); i0 += 8; const float16x8_t vk0x01234567 = vld1q_f16(w + 24); vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi0x01234567, vk0x01234567); const float16x8_t vi1x01234567 = vld1q_f16(i1); i1 += 8; const float16x8_t vk1x01234567 = vld1q_f16(w + 56); float16x8_t vacc01234567p1 = vmulq_f16(vi1x01234567, vk1x01234567); const float16x8_t vi2x01234567 = vld1q_f16(i2); i2 += 8; const float16x8_t vk2x01234567 = vld1q_f16(w + 88); vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi2x01234567, vk2x01234567); const float16x8_t vi3x01234567 = vld1q_f16(i3); i3 += 8; const float16x8_t vk3x01234567 = vld1q_f16(w + 120); vacc01234567p1 = vfmaq_f16(vacc01234567p1, vi3x01234567, vk3x01234567); const float16x8_t vi4x01234567 = vld1q_f16(i4); i4 += 8; const float16x8_t vk4x01234567 = vld1q_f16(w + 152); vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi4x01234567, vk4x01234567); const float16x8_t vi5x01234567 = vld1q_f16(i5); i5 += 8; const float16x8_t vk5x01234567 = vld1q_f16(w + 184); vacc01234567p1 = vfmaq_f16(vacc01234567p1, vi5x01234567, vk5x01234567); const float16x8_t vi6x01234567 = vld1q_f16(i6); i6 += 8; const float16x8_t vk6x01234567 = vld1q_f16(w + 216); vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi6x01234567, vk6x01234567); const float16x8_t vi7x01234567 = vld1q_f16(i7); i7 += 8; const float16x8_t vk7x01234567 = vld1q_f16(w + 248); vacc01234567p1 = vfmaq_f16(vacc01234567p1, vi7x01234567, vk7x01234567); const float16x8_t vi8x01234567 = vld1q_f16(i8); i8 += 8; const float16x8_t vk8x01234567 = vld1q_f16(w + 280); vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi8x01234567, vk8x01234567); // Add up all accumulators to vacc01234567p0 vacc01234567p0 = vaddq_f16(vacc01234567p0, vacc01234567p1); float16x8_t vacc01234567 = vmaxq_f16(vacc01234567p0, vmin); vacc01234567 = vminq_f16(vacc01234567, vmax); vst1q_f16(output, vacc01234567); output += 8; } if XNN_UNLIKELY(c != 0) { float16x8_t vacc01234567p0 = vld1q_f16(w); const float16x8_t vi0x01234567 = vld1q_f16(i0); const float16x8_t vk0x01234567 = vld1q_f16(w + 32); vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi0x01234567, vk0x01234567); const float16x8_t vi1x01234567 = vld1q_f16(i1); const float16x8_t vk1x01234567 = vld1q_f16(w + 64); float16x8_t vacc01234567p1 = vmulq_f16(vi1x01234567, vk1x01234567); const float16x8_t vi2x01234567 = vld1q_f16(i2); const float16x8_t vk2x01234567 = vld1q_f16(w + 96); vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi2x01234567, vk2x01234567); const float16x8_t vi3x01234567 = vld1q_f16(i3); const float16x8_t vk3x01234567 = vld1q_f16(w + 128); vacc01234567p1 = vfmaq_f16(vacc01234567p1, vi3x01234567, vk3x01234567); const float16x8_t vi4x01234567 = vld1q_f16(i4); const float16x8_t vk4x01234567 = vld1q_f16(w + 160); vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi4x01234567, vk4x01234567); const float16x8_t vi5x01234567 = vld1q_f16(i5); const float16x8_t vk5x01234567 = vld1q_f16(w + 192); vacc01234567p1 = vfmaq_f16(vacc01234567p1, vi5x01234567, vk5x01234567); const float16x8_t vi6x01234567 = vld1q_f16(i6); const float16x8_t vk6x01234567 = vld1q_f16(w + 224); vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi6x01234567, vk6x01234567); const float16x8_t vi7x01234567 = vld1q_f16(i7); const float16x8_t vk7x01234567 = vld1q_f16(w + 256); vacc01234567p1 = vfmaq_f16(vacc01234567p1, vi7x01234567, vk7x01234567); const float16x8_t vi8x01234567 = vld1q_f16(i8); const float16x8_t vk8x01234567 = vld1q_f16(w + 288); vacc01234567p0 = vfmaq_f16(vacc01234567p0, vi8x01234567, vk8x01234567); // Add up all accumulators to vacc01234567p0 vacc01234567p0 = vaddq_f16(vacc01234567p0, vacc01234567p1); float16x8_t vacc01234567 = vmaxq_f16(vacc01234567p0, vmin); vacc01234567 = vminq_f16(vacc01234567, vmax); float16x4_t vacc0123 = vget_low_f16(vacc01234567); if (c & 4) { vst1_f16(output, vacc0123); output += 4; vacc0123 = vget_high_f16(vacc01234567); } if (c & 2) { vst1_lane_u32((void*) output, vreinterpret_u32_f16(vacc0123), 0); output += 2; vacc0123 = vext_f16(vacc0123, vacc0123, 2); } if (c & 1) { vst1_lane_f16(output, vacc0123, 0); output += 1; } } output = (__fp16*) ((uintptr_t) output + output_increment); } while (--output_width != 0); }
// This file is generated automatically by `scripts/build/fp.js`. Please, don't change it. import fn from "../../addYears/index.js"; import convertToFP from "../_lib/convertToFP/index.js"; export default convertToFP(fn, 2);
// Copyright (c) 2012, Jason Davies // All rights reserved. // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // * Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // * The name Jason Davies may not be used to endorse or promote products // derived from this software without specific prior written permission. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL JASON DAVIES BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE // OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF // ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. (function() { // d3.parsets = function() { parsets = function() { var event = d3.dispatch("sortDimensions", "sortCategories"), // event is a d3.dispatch object dimensions_ = autoDimensions, dimensionFormat = String, tooltip_ = defaultTooltip, categoryTooltip = defaultCategoryTooltip, value_, spacing = 10, width = 500, // change this for height height = 680, tension = 0.6, tension0, duration = 500; function parsets(selection) { selection.each(function(data, i) { var g = d3.select(this), ordinal = d3.scale.ordinal(), dragging = false, dimensionNames = dimensions_.call(this, data, i), dimensions = [], tree = {children: {}, count: 0}, nodes, total, ribbon; d3.select(window).on("mousemove.parsets." + ++parsetsId, unhighlight); if (tension0 == null) tension0 = tension; g.selectAll(".ribbon, .ribbon-mouse") .data(["ribbon", "ribbon-mouse"], String) .enter().append("g") .attr("class", String); updateDimensions(); if (tension != tension0) { var t = d3.transition(g); if (t.tween) t.tween("ribbon", tensionTween); else tensionTween()(1); } function tensionTween() { var i = d3.interpolateNumber(tension0, tension); return function(t) { tension0 = i(t); ribbon.attr("d", ribbonPath); }; } function updateDimensions() { // Cache existing bound dimensions to preserve sort order. var dimension = g.selectAll("g.dimension"), cache = {}; dimension.each(function(d) { cache[d.name] = d; }); dimensionNames.forEach(function(d) { if (!cache.hasOwnProperty(d)) { cache[d] = {name: d, categories: []}; } dimensions.push(cache[d]); }); dimensions.sort(compareY); // Populate tree with existing nodes. g.select(".ribbon").selectAll("path") .each(function(d) { // console.log(d); // console.log(d.path); var path = d.path.split("\0"), node = tree, n = path.length - 1; // console.log(path); for (var i = 0; i < n; i++) { var p = path[i]; node = node.children.hasOwnProperty(p) ? node.children[p] : node.children[p] = {children: {}, count: 0}; } node.children[d.name] = d; // console.log(node); }); tree = buildTree(tree, data, dimensions.map(dimensionName), value_); // console.log(tree); cache = dimensions.map(function(d) { var t = {}; d.categories.forEach(function(c) { t[c.name] = c; }); return t; }); // console.log(cache); (function categories(d, i) { if (!d.children) return; var dim = dimensions[i], t = cache[i]; for (var k in d.children) { if (!t.hasOwnProperty(k)) { dim.categories.push(t[k] = {name: k}); } categories(d.children[k], i + 1); } dim.categories.sort(function(a, b) { return b.count - a.count;}); // added to move "None" to the rightmost position for (var inc=0; inc<dim.categories.length; inc++) { var cat_name = dim.categories[inc]['name'] if ((cat_name == "None" || cat_name == "N/A") && inc < dim.categories.length-1) { // swap "None" with the element at the end of the array // dim.categories[inc] = dim.categories.splice(dim.categories.length-1, 1, dim.categories[inc])[0]; var removed_item = dim.categories.splice(inc,1)[0]; dim.categories.splice(dim.categories.length,0,removed_item); } } // then reverse order of the array dim.categories.reverse(); })(tree, 0); ordinal.domain([]).range(d3.range(dimensions[0].categories.length)); nodes = layout(tree, dimensions, ordinal); total = getTotal(dimensions); dimensions.forEach(function(d) { d.count = total; }); dimension = dimension.data(dimensions, dimensionName); var dEnter = dimension.enter().append("g") .attr("class", "dimension") .attr("transform", translateY) //function(d) { return "translate(0," + d.y + ")"; }) .on("mousedown.parsets", cancelEvent); dimension.each(function(d) { d.y0 = d.y; d.categories.forEach(function(d) { d.x0 = d.x; }); }); dEnter.append("rect") .attr("width", width) .attr("y", -45) .attr("height", 45); var textEnter = dEnter.append("text") .attr("class", "dimension") // .attr("transform", "translate(0,-25)"); // ended here .attr("transform", "translate(" + width + ",0)rotate(90)") .attr("text-anchor", "middle") .attr("dy","-.6em"); // changed from "dx", ".6em" textEnter.append("tspan") .attr("class", "name") .text(function(d) { return d.name; }); // textEnter.append("tspan") // .attr("class", "sort alpha") // .attr("dx", "2em") // .text("alpha »") // .on("mousedown.parsets", cancelEvent); // textEnter.append("tspan") // .attr("class", "sort size") // .attr("dx", "2em") // .text("size »") // .on("mousedown.parsets", cancelEvent); dimension .call(d3.behavior.drag() .origin(identity) .on("dragstart", function(d) { dragging = true; d.y0 = d.y; }) .on("drag", function(d) { d.y0 = d.y = (window.event.x - 300);// d.y0 = d.y = d3.event.y; for (var i = 1; i < dimensions.length; i++) { if (height * dimensions[i].y < height * dimensions[i - 1].y) { dimensions.sort(compareY); dimensionNames = dimensions.map(dimensionName); ordinal.domain([]).range(d3.range(dimensions[0].categories.length)); nodes = layout(tree = buildTree({children: {}, count: 0}, data, dimensionNames, value_), dimensions, ordinal); total = getTotal(dimensions); g.selectAll(".ribbon, .ribbon-mouse").selectAll("path").remove(); updateRibbons(); updateCategories(dimension); dimension.transition().duration(duration) .attr("transform", translateY)//function(d) { return "translate(" + d.y + ",960)rotate(270)"; })//translateY) .tween("ribbon", ribbonTweenY); event.sortDimensions(); break; } } d3.select(this) .attr("transform", translateY) .transition(); ribbon.filter(function(r) { return r.source.dimension === d || r.target.dimension === d; }) .attr("d", ribbonPath); }) .on("dragend", function(d) { dragging = false; unhighlight(); var y0 = 45, dy = (height - y0 - 2) / (dimensions.length - 1); dimensions.forEach(function(d, i) { d.y = y0 + i * dy; }); transition(d3.select(this)) .attr("transform", translateY) .tween("ribbon", ribbonTweenY); })); dimension.select("text").select("tspan.sort.alpha") .on("click.parsets", sortBy("alpha", function(a, b) { return a.name < b.name ? 1 : -1; }, dimension)); dimension.select("text").select("tspan.sort.size") .on("click.parsets", sortBy("size", function(a, b) { return a.count - b.count; }, dimension)); dimension.transition().duration(duration) .attr("transform", translateY) .tween("ribbon", ribbonTweenY); dimension.exit().remove(); updateCategories(dimension); updateRibbons(); } function sortBy(type, f, dimension) { return function(d) { var direction = this.__direction = -(this.__direction || 1); d3.select(this).text(direction > 0 ? type + " »" : "« " + type); d.categories.sort(function() { return direction * f.apply(this, arguments); }); nodes = layout(tree, dimensions, ordinal); updateCategories(dimension); updateRibbons(); event.sortCategories(); }; } function updateRibbons() { // console.log("updating ribbons"); ribbon = g.select(".ribbon").selectAll("path") .data(nodes, function(d) { return d.path; }); ribbon.enter().append("path") .each(function(d) { // console.log(d); // object w keys: children, count, dimension, major, name, node, parent, source, target // d.source {dimension: {name: , y: , y0: }, dx: , node: {}, x: , x0: } // d.source and d.target have dx, x and x0 // d.source.dimension and d.target.dimension have y and y0 d.source.x0 = d.source.x; d.target.x0 = d.target.x; // console.log(d); }) .attr("class", function(d) { return "category-" + d.major; }) .attr("d", ribbonPath); ribbon.sort(function(a, b) { return b.count - a.count; }); ribbon.exit().remove(); var mouse = g.select(".ribbon-mouse").selectAll("path") .data(nodes, function(d) { return d.path; }); mouse.enter().append("path") .on("mousemove.parsets", function(d) { ribbon.classed("active", false); if (dragging) return; highlight(d = d.node, true); showTooltip(tooltip_.call(this, d)); // console.log('path tooltip d is '); // console.log(d); d3.event.stopPropagation(); }); mouse .sort(function(a, b) { return b.count - a.count; }) .attr("d", ribbonPathStatic); mouse.on('click', function(d) { var parent = d.parent; var item; while (parent.parent) { item = $('#' + parent.dimension + '-' + parent.name); if (item.length > 0) { item[0].checked = 'checked'; } parent = parent.parent; } item = $('#' + d.dimension + '-' + d.name); if (item.length > 0) { item[0].checked = 'checked'; $(item[0]).trigger('change'); } }); mouse.exit().remove(); } // Animates the x-coordinates only of the relevant ribbon paths. function ribbonTweenX(d) { var nodes = [d], r = ribbon.filter(function(r) { var s, t; if (r.source.node === d) nodes.push(s = r.source); if (r.target.node === d) nodes.push(t = r.target); return s || t; }), i = nodes.map(function(d) { return d3.interpolateNumber(d.x0, d.x); }), n = nodes.length; return function(t) { for (var j = 0; j < n; j++) nodes[j].x0 = i[j](t); r.attr("d", ribbonPath); }; } // Animates the y-coordinates only of the relevant ribbon paths. function ribbonTweenY(d) { var r = ribbon.filter(function(r) { return r.source.dimension.name == d.name || r.target.dimension.name == d.name; }), i = d3.interpolateNumber(d.y0, d.y); return function(t) { d.y0 = i(t); r.attr("d", ribbonPath); }; } // Highlight a node and its descendants, and optionally its ancestors. function highlight(d, ancestors) { if (dragging) return; var highlight = []; (function recurse(d) { highlight.push(d); for (var k in d.children) recurse(d.children[k]); })(d); highlight.shift(); if (ancestors) while (d) highlight.push(d), d = d.parent; ribbon.filter(function(d) { var active = highlight.indexOf(d.node) >= 0; if (active) this.parentNode.appendChild(this); return active; }).classed("active", true); } // Unhighlight all nodes. function unhighlight() { if (dragging) return; ribbon.classed("active", false); hideTooltip(); } function updateCategories(g) { var category = g.selectAll("g.category") .data(function(d) { return d.categories; }, function(d) { return d.name; }); var categoryEnter = category.enter().append("g") .attr("class", "category") .attr("transform", function(d) { return "translate(" + d.x + ")"; }); category.exit().remove(); category .on("mousemove.parsets", function(d) { ribbon.classed("active", false); if (dragging) return; d.nodes.forEach(function(d) { highlight(d); }); showTooltip(categoryTooltip.call(this, d)); // console.log('d is'); // console.log(d); // console.log('parameter is'); // console.log(categoryTooltip.call(this, d)); d3.event.stopPropagation(); }) .on("mouseout.parsets", unhighlight) .on("mousedown.parsets", cancelEvent) .call(d3.behavior.drag() .origin(identity) .on("dragstart", function(d) { dragging = true; d.x0 = d.x; }) .on("drag", function(d) { // console.log('d3.event.x'); // console.log(d3.event.x); d.x = d3.event.x; var categories = d.dimension.categories; for (var i = 0, c = categories[0]; ++i < categories.length;) { if (c.x + c.dx / 2 > (c = categories[i]).x + c.dx / 2) { categories.sort(function(a, b) { return a.x + a.dx / 2 - b.x - b.dx / 2; }); nodes = layout(tree, dimensions, ordinal); updateRibbons(); updateCategories(g); highlight(d.node); event.sortCategories(); break; } } var x = 0, p = spacing / (categories.length - 1); categories.forEach(function(e) { if (d === e) e.x0 = d3.event.x; e.x = x; x += e.count / total * (width - spacing) + p; }); d3.select(this) .attr("transform", function(d) { return "translate(" + d.x0 + ")"; }) .transition(); ribbon.filter(function(r) { return r.source.node === d || r.target.node === d; }) .attr("d", ribbonPath); }) .on("dragend", function(d) { dragging = false; unhighlight(); updateRibbons(); transition(d3.select(this)) .attr("transform", "translate(" + d.x + ")") .tween("ribbon", ribbonTweenX); })); category.transition().duration(duration) .attr("transform", function(d) { return "translate(" + d.x + ")"; }) .tween("ribbon", ribbonTweenX); categoryEnter.append("rect") .attr("width", function(d) { return d.dx; }) .attr("y", -20) .attr("height", 20); categoryEnter.append("line") .style("stroke-width", 2); categoryEnter.append("text") .attr("dx", "-.3em")//.attr("dy", "-.3em"); .attr("text-anchor", "end") // new .attr("transform", function(d) { return "translate(" + (d.dx/2) + ",0)rotate(90)"; }); //new category.select("rect") .attr("width", function(d) { return d.dx; }) .attr("class", function(d) { return "category-" + (d.dimension === dimensions[0] ? ordinal(d.name) : "background"); }); category.select("line") .attr("x2", function(d) { return d.dx; }); category.select("text") .text(function(d) { return d.name; });//truncateText(function(d) { return d.name; }, function(d) { return d.dx; })); } }); } // end of function parsets(selection) parsets.dimensionFormat = function(_) { if (!arguments.length) return dimensionFormat; dimensionFormat = _; return parsets; }; parsets.dimensions = function(_) { if (!arguments.length) return dimensions_; dimensions_ = d3.functor(_); return parsets; }; parsets.value = function(_) { if (!arguments.length) return value_; value_ = d3.functor(_); return parsets; }; parsets.width = function(_) { if (!arguments.length) return width; width = +_; return parsets; }; parsets.height = function(_) { if (!arguments.length) return height; height = +_; return parsets; }; parsets.spacing = function(_) { if (!arguments.length) return spacing; spacing = +_; return parsets; }; parsets.tension = function(_) { if (!arguments.length) return tension; tension = +_; return parsets; }; parsets.duration = function(_) { if (!arguments.length) return duration; duration = +_; return parsets; }; parsets.tooltip = function(_) { if (!arguments.length) return tooltip; tooltip_ = _ == null ? defaultTooltip : _; return parsets; }; parsets.categoryTooltip = function(_) { if (!arguments.length) return categoryTooltip; categoryTooltip = _ == null ? defaultCategoryTooltip : _; // console.log('categoryTooltip arguments are:') // console.log(_); // console.log('defaultCategoryTooltip is ' + defaultCategoryTooltip); // console.log('categoryTooltip is ' + categoryTooltip); return parsets; }; var body = d3.select("body"); // s-paquette@ISB: Don't remake the tooltip DOM object if it's already there var tooltip = (!d3.select(".parsets-tooltip").empty() ? d3.select(".parsets-tooltip") : body.append("div") .style("display", "none") .attr("class", "parsets-tooltip")); // copies event.on to parsets.on and returns parsets return d3.rebind(parsets, event, "on").value(1).width(width).height(height);//height was once 600); function dimensionFormatName(d, i) { return dimensionFormat.call(this, d.name, i); } function showTooltip(html) { // console.log('showing '+html); var m = d3.mouse(body.node()); tooltip .style("display", null) .style("left", m[0] + 30 + "px") .style("top", m[1] - 20 + "px") .html(html); } function hideTooltip() { tooltip.style("display", "none"); } function transition(g) { return duration ? g.transition().duration(duration).ease(parsetsEase) : g; } function layout(tree, dimensions, ordinal) { var nodes = [], nd = dimensions.length, y0 = 45, dy = (height - y0 - 2) / (nd - 1); dimensions.forEach(function(d, i) { d.categories.forEach(function(c) { c.dimension = d; c.count = 0; c.nodes = []; }); d.y = y0 + i * dy; }); // Compute per-category counts. var total = (function rollup(d, i) { if (!d.children) return d.count; var dim = dimensions[i], total = 0; dim.categories.forEach(function(c) { var child = d.children[c.name]; if (!child) return; c.nodes.push(child); var count = rollup(child, i + 1); c.count += count; total += count; }); return total; })(tree, 0); // Stack the counts. dimensions.forEach(function(d) { d.categories = d.categories.filter(function(d) { return d.count; }); var x = 0, p = spacing / (d.categories.length - 1); d.categories.forEach(function(c) { c.x = x; c.dx = c.count / total * (width - spacing); c.in = {dx: 0}; c.out = {dx: 0}; x += c.dx + p; }); }); var dim = dimensions[0]; dim.categories.forEach(function(c) { var k = c.name; if (tree.children.hasOwnProperty(k)) { recurse(c, {node: tree.children[k], path: k}, 1, ordinal(k)); } }); function recurse(p, d, depth, major) { var node = d.node, dimension = dimensions[depth]; dimension.categories.forEach(function(c) { var k = c.name; if (!node.children.hasOwnProperty(k)) return; var child = node.children[k]; child.path = d.path + "\0" + k; var target = child.target || {node: c, dimension: dimension}; target.x = c.in.dx; target.dx = child.count / total * (width - spacing); c.in.dx += target.dx; var source = child.source || {node: p, dimension: dimensions[depth - 1]}; source.x = p.out.dx; source.dx = target.dx; p.out.dx += source.dx; // // trying to flip axes // target.y = c.in.dy; // target.dy = child.count / total * (width - spacing); // c.in.dy += target.dy; // var source = child.source || {node: p, dimension: dimensions[depth - 1]}; // source.y = p.out.dy; // source.dy = target.dy; // p.out.dy += source.dy; child.node = child; child.source = source; child.target = target; child.major = major; nodes.push(child); if (depth + 1 < dimensions.length) recurse(c, child, depth + 1, major); }); } return nodes; } // Dynamic path string for transitions. function ribbonPath(d) { var s = d.source, t = d.target; return ribbonPathString(s.node.x0 + s.x0, s.dimension.y0, s.dx, t.node.x0 + t.x0, t.dimension.y0, t.dx, tension0); } // Static path string for mouse handlers. function ribbonPathStatic(d) { var s = d.source, t = d.target; return ribbonPathString(s.node.x + s.x, s.dimension.y, s.dx, t.node.x + t.x, t.dimension.y, t.dx, tension); } function ribbonPathString(sx, sy, sdx, tx, ty, tdx, tension) { var m0, m1; return (tension === 1 ? [ "M", [sx, sy], "L", [tx, ty], "h", tdx, "L", [sx + sdx, sy], "Z"] : ["M", [sx, sy], "C", [sx, m0 = tension * sy + (1 - tension) * ty], " ", [tx, m1 = tension * ty + (1 - tension) * sy], " ", [tx, ty], "h", tdx, "C", [tx + tdx, m1], " ", [sx + sdx, m0], " ", [sx + sdx, sy], "Z"]).join(""); } function compareY(a, b) { a = height * a.y, b = height * b.y; return a < b ? -1 : a > b ? 1 : a >= b ? 0 : a <= a ? -1 : b <= b ? 1 : NaN; } }; // end of parsets = function() (formerly d3.parsets = function()) //d3.parsets.tree = buildTree; parsets.tree = buildTree; // first step: define tree function in parsets as buildTree function function autoDimensions(d) { return d.length ? d3.keys(d[0]).sort() : []; } function cancelEvent() { d3.event.stopPropagation(); d3.event.preventDefault(); } function dimensionName(d) { return d.name; } function getTotal(dimensions) { return dimensions[0].categories.reduce(function(a, d) { return a + d.count; }, 0); } // Given a text function and width function, truncates the text if necessary to // fit within the given width. function truncateText(text, width) { return function(d, i) { var t = this.textContent = text(d, i), w = width(d, i); if (this.getComputedTextLength() < w) return t; this.textContent = "…" + t; var lo = 0, hi = t.length + 1, x; while (lo < hi) { var mid = lo + hi >> 1; if ((x = this.getSubStringLength(0, mid)) < w) lo = mid + 1; else hi = mid; } return lo > 1 ? t.substr(0, lo - 2) + "…" : ""; }; } var percent = d3.format("%"), comma = d3.format(",f"), parsetsEase = "elastic", parsetsId = 0; // Construct tree of all category counts for a given ordered list of // dimensions. Similar to d3.nest, except we also set the parent. function buildTree(root, data, dimensions, value) { zeroCounts(root); var n = data.length, nd = dimensions.length; for (var i = 0; i < n; i++) { var d = data[i], v = value(d, i), node = root; for (var j = 0; j < nd; j++) { var dimension = dimensions[j], category = d[dimension], children = node.children; // console.log(children); // children are ribbons node.count += v; node = children.hasOwnProperty(category) ? children[category] : children[category] = { children: j === nd - 1 ? null : {}, count: 0, parent: node, dimension: dimension, name: category }; // console.log('node'); // console.log(node); // nodes have count, dimension, name, node{}, parent{}, source{}, target{} // node.source and node.target have dx (also x, x0 which are 0 at this time) // node.source.dimension and node.target.dimension have y and y0 } node.count += v; } return root; } function zeroCounts(d) { d.count = 0; if (d.children) { for (var k in d.children) zeroCounts(d.children[k]); } } function identity(d) { return d; } function translateY(d) { return "translate(" + (d.y+0) + ",550)rotate(270)";} function defaultTooltip(d) { var count = d.count, path = []; while (d.parent) { if (d.name) path.unshift(d.dimension + ': ' + d.name);//d.name); d = d.parent; } return path.join(" → ") + "<br>" + comma(count) + " (" + percent(count / d.count) + ")"; } function defaultCategoryTooltip(d) { return d.dimension.name + ": " + d.name + "<br>" + comma(d.count) + " (" + percent(d.count / d.dimension.count) + ")"; } })();
import validateHttpStatus from '../validate-http-status'; /** * @description * Validate HTTP Status code 417 type CLIENT ERROR * * @param {Integer} statusCode - The HTTP Status code * @return {Boolean} * @throws {HTTPStatusError} When the statusCode is different then 417 */ function isExpectationFailed(statusCode) { return validateHttpStatus(statusCode, 417); } export default isExpectationFailed;
#!/usr/bin/python import os, sys, string from low import * from orthomcl import OrthoMCLCluster # ============================================================================= def usage(): print >> sys.stderr, "prints a mapping between each gene id and its cluster from orthomcl output\n" print >> sys.stderr, "usage: " + sys.argv[0] + " orthomcl.out" sys.exit(1) def plausi(): if len(sys.argv) != 2: usage() inFile = sys.argv[1] return inFile def main(): inFile = plausi() fo = open(inFile) for line in fo: o = OrthoMCLCluster(line.rstrip()) name = o.get_name() geneHash = o.get_gene_hash() for geneid, species in geneHash.iteritems(): print geneid + "\t" + name main()
from verify_email import verify_email enter = input("Enter the email to verify: ") verify = verify_email(enter) if verify == False: print("Invalid Email") else: print("Valid email")
import argparse import datetime import numpy as np import time import torch import torch.backends.cudnn as cudnn import json from pathlib import Path from timm.data import Mixup from timm.models import create_model from timm.loss import LabelSmoothingCrossEntropy, SoftTargetCrossEntropy from timm.scheduler import create_scheduler from timm.optim import create_optimizer from timm.utils import NativeScaler, get_state_dict, ModelEma from datasets import build_dataset from engine import train_one_epoch, evaluate from samplers import RASampler import utils import models import ipdb # from fvcore.nn import FlopCountAnalysis def get_args_parser(): parser = argparse.ArgumentParser('DeiT training and evaluation script', add_help=False) parser.add_argument('--batch-size', default=2, type=int) parser.add_argument('--epochs', default=300, type=int) # Model parameters parser.add_argument('--model', default='Conformer_tiny_patch16', type=str, metavar='MODEL', help='Name of model to train') parser.add_argument('--input-size', default=224, type=int, help='images input size') parser.add_argument('--drop', type=float, default=0.0, metavar='PCT', help='Dropout rate (default: 0.)') parser.add_argument('--drop-path', type=float, default=0.1, metavar='PCT', help='Drop path rate (default: 0.1)') parser.add_argument('--drop-block', type=float, default=None, metavar='PCT', help='Drop block rate (default: None)') parser.add_argument('--model-ema', action='store_true') parser.add_argument('--no-model-ema', action='store_false', dest='model_ema') parser.set_defaults(model_ema=True) parser.add_argument('--model-ema-decay', type=float, default=0.99996, help='') parser.add_argument('--model-ema-force-cpu', action='store_true', default=False, help='') # Optimizer parameters parser.add_argument('--opt', default='adamw', type=str, metavar='OPTIMIZER', help='Optimizer (default: "adamw"') parser.add_argument('--opt-eps', default=1e-8, type=float, metavar='EPSILON', help='Optimizer Epsilon (default: 1e-8)') parser.add_argument('--opt-betas', default=None, type=float, nargs='+', metavar='BETA', help='Optimizer Betas (default: None, use opt default)') parser.add_argument('--clip-grad', type=float, default=None, metavar='NORM', help='Clip gradient norm (default: None, no clipping)') parser.add_argument('--momentum', type=float, default=0.9, metavar='M', help='SGD momentum (default: 0.9)') parser.add_argument('--weight-decay', type=float, default=0.05, help='weight decay (default: 0.05)') # Learning rate schedule parameters parser.add_argument('--sched', default='cosine', type=str, metavar='SCHEDULER', help='LR scheduler (default: "cosine"') parser.add_argument('--lr', type=float, default=5e-4, metavar='LR', help='learning rate (default: 5e-4)') parser.add_argument('--lr-noise', type=float, nargs='+', default=None, metavar='pct, pct', help='learning rate noise on/off epoch percentages') parser.add_argument('--lr-noise-pct', type=float, default=0.67, metavar='PERCENT', help='learning rate noise limit percent (default: 0.67)') parser.add_argument('--lr-noise-std', type=float, default=1.0, metavar='STDDEV', help='learning rate noise std-dev (default: 1.0)') parser.add_argument('--warmup-lr', type=float, default=1e-6, metavar='LR', help='warmup learning rate (default: 1e-6)') parser.add_argument('--min-lr', type=float, default=1e-5, metavar='LR', help='lower lr bound for cyclic schedulers that hit 0 (1e-5)') parser.add_argument('--decay-epochs', type=float, default=30, metavar='N', help='epoch interval to decay LR') parser.add_argument('--warmup-epochs', type=int, default=5, metavar='N', help='epochs to warmup LR, if scheduler supports') parser.add_argument('--cooldown-epochs', type=int, default=10, metavar='N', help='epochs to cooldown LR at min_lr, after cyclic schedule ends') parser.add_argument('--patience-epochs', type=int, default=10, metavar='N', help='patience epochs for Plateau LR scheduler (default: 10') parser.add_argument('--decay-rate', '--dr', type=float, default=0.1, metavar='RATE', help='LR decay rate (default: 0.1)') # Augmentation parameters parser.add_argument('--color-jitter', type=float, default=0.4, metavar='PCT', help='Color jitter factor (default: 0.4)') parser.add_argument('--aa', type=str, default='rand-m9-mstd0.5-inc1', metavar='NAME', help='Use AutoAugment policy. "v0" or "original". " + \ "(default: rand-m9-mstd0.5-inc1)'), parser.add_argument('--smoothing', type=float, default=0.1, help='Label smoothing (default: 0.1)') parser.add_argument('--train-interpolation', type=str, default='bicubic', help='Training interpolation (random, bilinear, bicubic default: "bicubic")') parser.add_argument('--repeated-aug', action='store_true') parser.add_argument('--no-repeated-aug', action='store_false', dest='repeated_aug') parser.set_defaults(repeated_aug=True) # * Random Erase params parser.add_argument('--reprob', type=float, default=0.25, metavar='PCT', help='Random erase prob (default: 0.25)') parser.add_argument('--remode', type=str, default='pixel', help='Random erase mode (default: "pixel")') parser.add_argument('--recount', type=int, default=1, help='Random erase count (default: 1)') parser.add_argument('--resplit', action='store_true', default=False, help='Do not random erase first (clean) augmentation split') # * Mixup params parser.add_argument('--mixup', type=float, default=0.8, help='mixup alpha, mixup enabled if > 0. (default: 0.8)') parser.add_argument('--cutmix', type=float, default=1.0, help='cutmix alpha, cutmix enabled if > 0. (default: 1.0)') parser.add_argument('--cutmix-minmax', type=float, nargs='+', default=None, help='cutmix min/max ratio, overrides alpha and enables cutmix if set (default: None)') parser.add_argument('--mixup-prob', type=float, default=1.0, help='Probability of performing mixup or cutmix when either/both is enabled') parser.add_argument('--mixup-switch-prob', type=float, default=0.5, help='Probability of switching to cutmix when both mixup and cutmix enabled') parser.add_argument('--mixup-mode', type=str, default='batch', help='How to apply mixup/cutmix params. Per "batch", "pair", or "elem"') # Dataset parameters parser.add_argument('--data-path', default='D:/AI/AIData/cat-dog-panda/', type=str, help='dataset path') parser.add_argument('--data-set', default='cat-dog-panda', choices=['CIFAR', 'CIFAR10', 'IMNET', 'INAT', 'INAT19', 'cat-dog-panda'], type=str, help='Image Net dataset path') parser.add_argument('--inat-category', default='name', choices=['kingdom', 'phylum', 'class', 'order', 'supercategory', 'family', 'genus', 'name'], type=str, help='semantic granularity') # * Finetuning params parser.add_argument('--finetune', default='./weights/Conformer_tiny_patch16.pth', help='finetune from checkpoint') parser.add_argument('--evaluate-freq', type=int, default=1, help='frequency of perform evaluation (default: 5)') parser.add_argument('--output_dir', default='./output/Conformer_tiny_patch16_batch_32_lr1e-3_300epochs', help='path where to save, empty for no saving') parser.add_argument('--device', default='cuda', help='device to use for training / testing') parser.add_argument('--seed', default=0, type=int) parser.add_argument('--resume', default='', help='resume from checkpoint') parser.add_argument('--start_epoch', default=0, type=int, metavar='N', help='start epoch') parser.add_argument('--eval', action='store_true', help='Perform evaluation only') parser.add_argument('--num_workers', default=0, type=int) parser.add_argument('--pin-mem', action='store_true', help='Pin CPU memory in DataLoader for more efficient (sometimes) transfer to GPU.') parser.add_argument('--no-pin-mem', action='store_false', dest='pin_mem', help='') parser.set_defaults(pin_mem=True) # distributed training parameters parser.add_argument('--world_size', default=1, type=int, help='number of distributed processes') parser.add_argument('--dist_url', default='env://', help='url used to set up distributed training') return parser def main(args): utils.init_distributed_mode(args) print(args) device = torch.device(args.device) # fix the seed for reproducibility seed = args.seed + utils.get_rank() torch.manual_seed(seed) np.random.seed(seed) # random.seed(seed) cudnn.benchmark = True dataset_train, args.nb_classes = build_dataset(is_train=True, args=args) dataset_val, _ = build_dataset(is_train=False, args=args) if True: # args.distributed: num_tasks = utils.get_world_size() global_rank = utils.get_rank() if args.repeated_aug: sampler_train = RASampler( dataset_train, num_replicas=num_tasks, rank=global_rank, shuffle=True ) else: sampler_train = torch.utils.data.DistributedSampler( dataset_train, num_replicas=num_tasks, rank=global_rank, shuffle=True ) else: sampler_train = torch.utils.data.RandomSampler(dataset_train) data_loader_train = torch.utils.data.DataLoader( dataset_train, sampler=sampler_train, batch_size=args.batch_size, num_workers=args.num_workers, pin_memory=args.pin_mem, drop_last=True, ) data_loader_val = torch.utils.data.DataLoader( dataset_val, batch_size=int(3.0 * args.batch_size), shuffle=False, num_workers=args.num_workers, pin_memory=args.pin_mem, drop_last=False ) mixup_fn = None mixup_active = args.mixup > 0 or args.cutmix > 0. or args.cutmix_minmax is not None if mixup_active: mixup_fn = Mixup( mixup_alpha=args.mixup, cutmix_alpha=args.cutmix, cutmix_minmax=args.cutmix_minmax, prob=args.mixup_prob, switch_prob=args.mixup_switch_prob, mode=args.mixup_mode, label_smoothing=args.smoothing, num_classes=args.nb_classes) print(f"Creating model: {args.model}") model = create_model( args.model, pretrained=False, num_classes=args.nb_classes, drop_rate=args.drop, drop_path_rate=args.drop_path, drop_block_rate=args.drop_block, ) if args.finetune: if args.finetune.startswith('https'): checkpoint = torch.hub.load_state_dict_from_url( args.finetune, map_location='cpu', check_hash=True) else: checkpoint = torch.load(args.finetune, map_location='cpu') if 'model' in checkpoint.keys(): checkpoint_model = checkpoint['model'] else: checkpoint_model = checkpoint state_dict = model.state_dict() # ipdb.set_trace() for k in ['head.weight', 'head.bias', 'head_dist.weight', 'head_dist.bias', 'trans_cls_head.weight', 'trans_cls_head.bias', 'conv_cls_head.weight', 'conv_cls_head.bias']: if k in checkpoint_model and checkpoint_model[k].shape != state_dict[k].shape: print(f"Removing key {k} from pretrained checkpoint") del checkpoint_model[k] if 'pos_embed' in checkpoint_model.keys(): # interpolate position embedding pos_embed_checkpoint = checkpoint_model['pos_embed'] embedding_size = pos_embed_checkpoint.shape[-1] num_patches = model.patch_embed.num_patches num_extra_tokens = model.pos_embed.shape[-2] - num_patches # height (== width) for the checkpoint position embedding orig_size = int((pos_embed_checkpoint.shape[-2] - num_extra_tokens) ** 0.5) # height (== width) for the new position embedding new_size = int(num_patches ** 0.5) # class_token and dist_token are kept unchanged extra_tokens = pos_embed_checkpoint[:, :num_extra_tokens] # only the position tokens are interpolated pos_tokens = pos_embed_checkpoint[:, num_extra_tokens:] pos_tokens = pos_tokens.reshape(-1, orig_size, orig_size, embedding_size).permute(0, 3, 1, 2) pos_tokens = torch.nn.functional.interpolate( pos_tokens, size=(new_size, new_size), mode='bicubic', align_corners=False) pos_tokens = pos_tokens.permute(0, 2, 3, 1).flatten(1, 2) new_pos_embed = torch.cat((extra_tokens, pos_tokens), dim=1) checkpoint_model['pos_embed'] = new_pos_embed model.load_state_dict(checkpoint_model, strict=False) model.to(device) model_ema = None if args.model_ema: # Important to create EMA model after cuda(), DP wrapper, and AMP but before SyncBN and DDP wrapper model_ema = ModelEma( model, decay=args.model_ema_decay, device='cpu' if args.model_ema_force_cpu else '', resume='') model_without_ddp = model if args.distributed: model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu]) model_without_ddp = model.module n_parameters = sum(p.numel() for p in model.parameters() if p.requires_grad) print('number of params:', n_parameters) # linear_scaled_lr = args.lr * args.batch_size * utils.get_world_size() / 512.0 # args.lr = linear_scaled_lr optimizer = create_optimizer(args, model) loss_scaler = NativeScaler() lr_scheduler, _ = create_scheduler(args, optimizer) criterion = LabelSmoothingCrossEntropy() if args.mixup > 0.: # smoothing is handled with mixup label transform criterion = SoftTargetCrossEntropy() elif args.smoothing: criterion = LabelSmoothingCrossEntropy(smoothing=args.smoothing) else: criterion = torch.nn.CrossEntropyLoss() output_dir = Path(args.output_dir) if args.resume: if args.resume.startswith('https'): checkpoint = torch.hub.load_state_dict_from_url( args.resume, map_location='cpu', check_hash=True) else: checkpoint = torch.load(args.resume, map_location='cpu') # ipdb.set_trace() if 'model' in checkpoint.keys(): model_without_ddp.load_state_dict(checkpoint['model']) else: model_without_ddp.load_state_dict(checkpoint) if not args.eval and 'optimizer' in checkpoint and 'lr_scheduler' in checkpoint and 'epoch' in checkpoint: optimizer.load_state_dict(checkpoint['optimizer']) lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) args.start_epoch = checkpoint['epoch'] + 1 if args.model_ema: utils._load_checkpoint_for_ema(model_ema, checkpoint['model_ema']) if args.eval: test_stats = evaluate(data_loader_val, model, device) print(f"Accuracy of the network on the {len(dataset_val)} test images: {test_stats['acc1']:.1f}%") return print("Start training") start_time = time.time() max_accuracy = 0.0 for epoch in range(args.start_epoch, args.epochs): if args.distributed: data_loader_train.sampler.set_epoch(epoch) train_stats = train_one_epoch( model, criterion, data_loader_train, optimizer, device, epoch, loss_scaler, args.clip_grad, model_ema, mixup_fn, set_training_mode=args.finetune == '' # keep in eval mode during finetuning ) lr_scheduler.step(epoch) if args.output_dir: checkpoint_paths = [output_dir / 'checkpoint.pth'] for checkpoint_path in checkpoint_paths: utils.save_on_master({ 'model': model_without_ddp.state_dict(), 'optimizer': optimizer.state_dict(), 'lr_scheduler': lr_scheduler.state_dict(), 'epoch': epoch, 'model_ema': get_state_dict(model_ema), 'args': args, }, checkpoint_path) if epoch % args.evaluate_freq == 0: test_stats = evaluate(data_loader_val, model, device) print(f"Accuracy of the network on the {len(dataset_val)} test images: {test_stats['acc1']:.1f}%") max_accuracy = max(max_accuracy, test_stats["acc1"]) print(f'Max accuracy: {max_accuracy:.2f}%') log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, **{f'test_{k}': v for k, v in test_stats.items()}, 'epoch': epoch, 'n_parameters': n_parameters} if args.output_dir and utils.is_main_process(): with (output_dir / "log.txt").open("a") as f: f.write(json.dumps(log_stats) + "\n") total_time = time.time() - start_time total_time_str = str(datetime.timedelta(seconds=int(total_time))) print('Training time {}'.format(total_time_str)) if __name__ == '__main__': parser = argparse.ArgumentParser('DeiT training and evaluation script', parents=[get_args_parser()]) args = parser.parse_args() if args.output_dir: Path(args.output_dir).mkdir(parents=True, exist_ok=True) main(args)
import * as i0 from '@angular/core'; import { Component, ChangeDetectionStrategy, ViewEncapsulation, Input, NgModule } from '@angular/core'; import * as i1 from '@angular/common'; import { CommonModule } from '@angular/common'; class ProgressSpinner { constructor() { this.strokeWidth = "2"; this.fill = "none"; this.animationDuration = "2s"; } } ProgressSpinner.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "12.0.5", ngImport: i0, type: ProgressSpinner, deps: [], target: i0.ɵɵFactoryTarget.Component }); ProgressSpinner.ɵcmp = i0.ɵɵngDeclareComponent({ minVersion: "12.0.0", version: "12.0.5", type: ProgressSpinner, selector: "p-progressSpinner", inputs: { style: "style", styleClass: "styleClass", strokeWidth: "strokeWidth", fill: "fill", animationDuration: "animationDuration" }, host: { classAttribute: "p-element" }, ngImport: i0, template: ` <div class="p-progress-spinner" [ngStyle]="style" [ngClass]="styleClass" role="alert" aria-busy="true"> <svg class="p-progress-spinner-svg" viewBox="25 25 50 50" [style.animation-duration]="animationDuration"> <circle class="p-progress-spinner-circle" cx="50" cy="50" r="20" [attr.fill]="fill" [attr.stroke-width]="strokeWidth" stroke-miterlimit="10"/> </svg> </div> `, isInline: true, styles: [".p-progress-spinner{position:relative;margin:0 auto;width:100px;height:100px;display:inline-block}.p-progress-spinner:before{content:\"\";display:block;padding-top:100%}.p-progress-spinner-svg{animation:p-progress-spinner-rotate 2s linear infinite;height:100%;transform-origin:center center;width:100%;position:absolute;top:0;bottom:0;left:0;right:0;margin:auto}.p-progress-spinner-circle{stroke-dasharray:89,200;stroke-dashoffset:0;stroke:#d62d20;animation:p-progress-spinner-dash 1.5s ease-in-out infinite,p-progress-spinner-color 6s ease-in-out infinite;stroke-linecap:round}@keyframes p-progress-spinner-rotate{to{transform:rotate(1turn)}}@keyframes p-progress-spinner-dash{0%{stroke-dasharray:1,200;stroke-dashoffset:0}50%{stroke-dasharray:89,200;stroke-dashoffset:-35px}to{stroke-dasharray:89,200;stroke-dashoffset:-124px}}@keyframes p-progress-spinner-color{0%,to{stroke:#d62d20}40%{stroke:#0057e7}66%{stroke:#008744}80%,90%{stroke:#ffa700}}"], directives: [{ type: i1.NgStyle, selector: "[ngStyle]", inputs: ["ngStyle"] }, { type: i1.NgClass, selector: "[ngClass]", inputs: ["class", "ngClass"] }], changeDetection: i0.ChangeDetectionStrategy.OnPush, encapsulation: i0.ViewEncapsulation.None }); i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "12.0.5", ngImport: i0, type: ProgressSpinner, decorators: [{ type: Component, args: [{ selector: 'p-progressSpinner', template: ` <div class="p-progress-spinner" [ngStyle]="style" [ngClass]="styleClass" role="alert" aria-busy="true"> <svg class="p-progress-spinner-svg" viewBox="25 25 50 50" [style.animation-duration]="animationDuration"> <circle class="p-progress-spinner-circle" cx="50" cy="50" r="20" [attr.fill]="fill" [attr.stroke-width]="strokeWidth" stroke-miterlimit="10"/> </svg> </div> `, changeDetection: ChangeDetectionStrategy.OnPush, encapsulation: ViewEncapsulation.None, styleUrls: ['./progressspinner.css'], host: { 'class': 'p-element' } }] }], propDecorators: { style: [{ type: Input }], styleClass: [{ type: Input }], strokeWidth: [{ type: Input }], fill: [{ type: Input }], animationDuration: [{ type: Input }] } }); class ProgressSpinnerModule { } ProgressSpinnerModule.ɵfac = i0.ɵɵngDeclareFactory({ minVersion: "12.0.0", version: "12.0.5", ngImport: i0, type: ProgressSpinnerModule, deps: [], target: i0.ɵɵFactoryTarget.NgModule }); ProgressSpinnerModule.ɵmod = i0.ɵɵngDeclareNgModule({ minVersion: "12.0.0", version: "12.0.5", ngImport: i0, type: ProgressSpinnerModule, declarations: [ProgressSpinner], imports: [CommonModule], exports: [ProgressSpinner] }); ProgressSpinnerModule.ɵinj = i0.ɵɵngDeclareInjector({ minVersion: "12.0.0", version: "12.0.5", ngImport: i0, type: ProgressSpinnerModule, imports: [[CommonModule]] }); i0.ɵɵngDeclareClassMetadata({ minVersion: "12.0.0", version: "12.0.5", ngImport: i0, type: ProgressSpinnerModule, decorators: [{ type: NgModule, args: [{ imports: [CommonModule], exports: [ProgressSpinner], declarations: [ProgressSpinner] }] }] }); /** * Generated bundle index. Do not edit. */ export { ProgressSpinner, ProgressSpinnerModule }; //# sourceMappingURL=primeng-progressspinner.js.map
from time import sleep from abc import ABCMeta, abstractmethod from typing import List, Any class ClientBaseClass(metaclass=ABCMeta): @abstractmethod def cursor(self): """Return Something that inherits CursorBaseClass """ pass class CursorBaseClass(metaclass=ABCMeta): @abstractmethod def run(self, query): pass @abstractmethod def poll(self) -> bool: """Update the internal states of the cursor. And checks if the query is completed Returns: bool -- True if the query is completed """ pass @abstractmethod def cancel(self): pass @abstractmethod def get_one_row(self) -> List[Any]: pass @abstractmethod def get_columns(self) -> List[str]: pass # The follow functions are optional overrides def get_n_rows(self, n: int) -> List[List[Any]]: """ Creates a generator which fetches n rows one by one, consider override this for more efficiency Arguments: n {int} -- max number of rows to fetch Returns: row -- [description] """ for _ in range(n): row = self.get_one_row() if row is None: break yield row @property def tracking_url(self) -> str: """Some query engine provides an url for other Returns: [type] -- [description] """ return None @property def percent_complete(self) -> float: """Get a percentage estimate of query completion For Presto, it is #completed_split/#total_split Returns: float -- the percentage completion between [0, 100] """ return 0 def get_logs(self) -> str: """Fetch the logs from the engine. Note that every time this function is called, it should return the logs after last call Returns: str -- The logs """ return "" # These functions are intended to use as is def get_rows_iter(self, chunk_size: int = 10000): while True: rows = self.get_n_rows(chunk_size) if rows is None or len(rows) == 0: break for row in rows: yield row def get_rows(self) -> List: return [row for row in self.get_rows_iter()] def get(self) -> List: """Return all columns + rows Returns: List -- The tabular data, first row is columns, followed by the return results """ columns = self.get_columns() if columns is None: return None else: rows = self.get_rows() return [columns] + rows def poll_until_finish(self, poll_interval=5): """Calling this function will continuously poll and sleep until finish Keyword Arguments: poll_interval {int} -- The sleep interval (default: {5}) """ while True: # Poll returns true when finished if self.poll(): break sleep(poll_interval)
/** * [denomination description] * Editor : Iwan Gunawan * Email : iwan.gunawan81@gmail.com * @param {[type]} amount [description] * @return {[type]} [description] */ module.exports = function denomination(notes, amount) { //const notes = [1e5, 5e4, 2e4, 1e4, 5e3, 2e3, 1e3, 500, 100, 50]; if (typeof notes === "undefined" || typeof notes === "string" || typeof notes === "number" ) { console.error('Your first argument must be array'); return 0; } const noteCounter = []; let result = []; notes.map((f, i) => { noteCounter.push(0); if (amount >= f) { noteCounter[i] = parseInt(amount / f); amount = amount - noteCounter[i] * f; } if (noteCounter[i] != 0) result.push({ notes: f, counter: noteCounter[i] }); }); return result; };
import os import sys import numpy as np srcDir = sys.argv[1] output = sys.argv[2] interval = 1 files = os.listdir(srcDir) files = list(filter(lambda x:".jpg" in x,files)) numImages = 20 #len(files) prefix = "_".join(files[0].split(".jpg")[0].split("_")[:-1]) files = [prefix+"_"+str(i)+".jpg" for i in range(0,numImages,interval)] inds = range(0,numImages,interval) if inds[-1]!=numImages-1: files += [prefix+"_"+str(numImages-1)+".jpg"] print(files) from PIL import Image imageList = [] for f in files: im = np.asarray(Image.open(os.path.join(srcDir,f))) imageList.append(im) imageList = np.concatenate(imageList,1) im = Image.fromarray(imageList) im.save(output)
from time import time import sys import copy import logging class SodukuSolver: def __init__(self,dim,fileDir): self.dim = dim self.expandedNodes = 0 with open(fileDir) as f: content = f.readlines() self.board = [list(x.strip()) for x in content] self.rv = self.getRemainingValues() def __str__(self): string = '\n' for row in self.board: for x in row: string += x+" " string+='\n' return string def isSafe(self,row,col,choice): choiceStr = str(choice) for i in range(self.dim): if self.board[row][i] == choiceStr or self.board[i][col] ==choiceStr: return False boxR = row - (row % 3) boxV = col - (col % 3) for i in range(3): for j in range(3): if self.board[boxR + i][boxV + j] == choiceStr: return False return True def getNextLocation(self): for i in range(self.dim): for j in range(self.dim): if self.board[i][j] == '0': return (i,j) return (-1,-1) def getDomainLength(self,lst): # 'x' represents fixed value(already filled) if 'x' in lst or lst == []: return 10 # to prevent the agent from choosing an empty domain as MRV cell else: return len(lst) # returning the next smallest domain # filling the cells with smaller domain size further reduces the search space in next steps def getNextMRVRowCol(self): # stores lengths of all lists(domains) from RV rvMap = list(map(self.getDomainLength,self.rv)) minimum = min(rvMap) if minimum == 10: return (-1,-1) index = rvMap.index(minimum) # as domains are stored linearly return(index // 9, index % 9) # a function to find a truncated domain which reduces our search space def getDomain(self,row,col): # creating a list of numbers (1-9) RVCell = [str(i) for i in range(1 ,self.dim + 1)] # removing elements from the RVCell which are found in the row for i in range(self.dim): if self.board[row][i] != '0': if self.board[row][i] in RVCell: RVCell.remove(self.board[row][i]) # removing elements from the RVCell which are found in the column for i in range(self.dim): if self.board[i][col] != '0': if self.board[i][col] in RVCell: RVCell.remove(self.board[i][col]) # removing elements from the RVCell which are found in the sector boxRow = row - row%3 boxCol = col - col%3 for i in range(3): for j in range(3): if self.board[boxRow+i][boxCol+j]!=0: if self.board[boxRow+i][boxCol+j] in RVCell: RVCell.remove(self.board[boxRow+i][boxCol+j]) return RVCell # finding possible values for each cell # already filled cells are represented as ['x'] def getRemainingValues(self): RV=[] for row in range(self.dim): for col in range(self.dim): if self.board[row][col] != '0': RV.append(['x']) else: RV.append(self.getDomain(row,col)) return RV '''Solving methods''' def solveBacktracking(self): location = self.getNextLocation() if location[0] == -1: return True else: self.expandedNodes += 1 for choice in range(1,self.dim+1): if self.isSafe(location[0],location[1],choice): self.board[location[0]][location[1]] = str(choice) if self.solveBacktracking(): return True self.board[location[0]][location[1]] = '0' return False # forward checking def isEmptyDomainProduced(self,row,col,choice): element = self.rv.pop(row*9 + col) if [] in self.rv: self.rv.insert(row*9 + col,element) return True else: self.rv.insert(row*9 + col,element) return False def solveCSP(self): location = self.getNextMRVRowCol() if location[0] == -1: return True else: self.expandedNodes+=1 row = location[0] col = location[1] for choice in self.rv[row*9 + col]: choice_str = str(choice) self.board[row][col] = choice_str cpy = copy.deepcopy(self.rv) self.rv = self.getRemainingValues() if not self.isEmptyDomainProduced(row,col,choice_str): if self.solveCSP(): return True self.board[row][col] = '0' self.rv = cpy return False def main(): # taking filename of puzzle and algorithm to be used as command line input # to run : python3 solver.py [filename] [algo] file = sys.argv[1] algo = sys.argv[2] # create and configure the logger logging.basicConfig(filename = "sudoku.log", format = '[%(asctime)s]: %(message)s', level = logging.INFO) try: s = SodukuSolver(9,'problems/{}.txt'.format(file)) start = time() time_elapsed = 0.0 if(algo == "CSP"): print(s) s.solveCSP() print(s) end = time() time_elapsed = round((end - start), 3) logging.info(f"{file}.txt {algo} {s.expandedNodes} -- {time_elapsed}") elif(algo == "backtracking"): print(s) s.solveBacktracking() print(s) end = time() time_elapsed = round((end - start), 3) logging.info(f"{file}.txt {algo} {s.expandedNodes} -- {time_elapsed}") else: print("\nAlgorithm not implemented") logging.info(f"\"wrong algorithm specified\"") print(f"Time Elapsed: {time_elapsed}") print(f"Nodes Expanded: {s.expandedNodes}") except FileNotFoundError: logging.info(f"\"{file}.txt not found\"") print(f"{file}.txt not found") if __name__ == "__main__": main()
from __future__ import annotations from abc import ABC, abstractmethod class Strategy(ABC): pass class StrategyImplementor(ABC): @abstractmethod def perform_action(self, *, strategy: Strategy): raise NotImplementedError()
/* * Copyright 2015-2017 WorldWind Contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @exports BMNGLandsatLayer */ define([ '../geom/Location', '../geom/Sector', '../layer/TiledImageLayer', '../util/WmsUrlBuilder' ], function (Location, Sector, TiledImageLayer, WmsUrlBuilder) { "use strict"; /** * Constructs a combined Blue Marble and Landsat image layer. * @alias BMNGLandsatLayer * @constructor * @augments TiledImageLayer * @classdesc Displays a combined Blue Marble and Landsat image layer that spans the entire globe. */ var BMNGLandsatLayer = function () { TiledImageLayer.call(this, Sector.FULL_SPHERE, new Location(45, 45), 10, "image/jpeg", "BMNGLandsat256", 256, 256); this.displayName = "Blue Marble & Landsat"; this.pickEnabled = false; this.urlBuilder = new WmsUrlBuilder("https://cors.aworldbridgelabs.com/https://worldwind25.arc.nasa.gov/wms", "BlueMarble-200405,esat", "", "1.3.0"); }; BMNGLandsatLayer.prototype = Object.create(TiledImageLayer.prototype); return BMNGLandsatLayer; });
from django.conf import settings from django.db import models class Profile(models.Model): uuid = models.CharField(max_length=36, unique=True, blank = False, null = False, db_index = True) def getDBName(self): return "User_" + str(self.uuid).replace("-", "_") def __unicode__(self): return self.uuid class AuditEntry(models.Model): ''' Represents an audit of a request against the PDS Given that there will be many entries (one for each request), we are strictly limiting the size of data entered for each row The assumption is that script names and symbolic user ids will be under 64 characters ''' datastore_owner = models.ForeignKey(Profile, blank = False, null = False, related_name="auditentry_owner", db_index=True) requester = models.ForeignKey(Profile, blank = False, null = False, related_name="auditentry_requester", db_index=True) method = models.CharField(max_length=10) scopes = models.CharField(max_length=1024) # actually storing csv of valid scopes purpose = models.CharField(max_length=64, blank=True, null=True) script = models.CharField(max_length=64) token = models.CharField(max_length=64) system_entity_toggle = models.BooleanField() trustwrapper_result = models.CharField(max_length=64) timestamp = models.DateTimeField(auto_now_add = True, db_index=True) def __unicode__(self): self.pk class Notification(models.Model): ''' Represents a notification about a user's data. This can be filled in while constructing answers ''' datastore_owner = models.ForeignKey(Profile, blank = False, null = False, related_name="notification_owner") title = models.CharField(max_length = 64, blank = False, null = False) content = models.CharField(max_length = 1024, blank = False, null = False) type = models.IntegerField(blank = False, null = False) timestamp = models.DateTimeField(auto_now_add = True) uri = models.URLField(blank = True, null = True) def __unicode__(self): self.pk class Device(models.Model): datastore_owner = models.ForeignKey(Profile, blank=False, null=False, related_name="device_owner", db_index=True) gcm_reg_id = models.CharField(max_length=1024, blank=False, null=False)
from django.apps import AppConfig class AnalyticsConfig(AppConfig): name = 'v1.analytics'
import pygame import logging import os from game import Game from transition import Transition from text import Text WHITE = (255, 255, 255) BLACK = (0, 0, 0) LIMEGREEN = (50, 205, 50) YELLOW = (255, 255, 0) RED = (255, 0, 0) SLIVER = (192, 192, 192) FLAGS = pygame.FULLSCREEN | pygame.HWSURFACE | pygame.DOUBLEBUF class GameMenu(object): colorButtonList = [LIMEGREEN, YELLOW, RED, SLIVER] def __init__(self, screen, items): self.screenSize = screen.get_size() self.__FPS = 30 self.__exit = False self.__action = "" backgroundImageFile = os.path.abspath("../Data/images/MainMenuBackground.png") self.mainMenuMusicFile = os.path.abspath("../Data/Music/MainMenuMusic/SuperMonkeyBall2SelectMode.wav") movementEffectFile = os.path.abspath("../Data/Music/SoundEffects/MenuMovement.wav") selectionEffectFile = os.path.abspath("../Data/Music/SoundEffects/MenuSelection.wav") try: self.__backgroundImage = pygame.image.load(backgroundImageFile).convert() pygame.mixer.music.load(self.mainMenuMusicFile) self.__movementSoundEffect = pygame.mixer.Sound(movementEffectFile) self.__selectionSoundEffect = pygame.mixer.Sound(selectionEffectFile) except pygame.error as errorMessage: logging.exception(errorMessage) raise self.__backgroundImage = pygame.transform.scale(self.__backgroundImage, screen.get_size()) self.__backgroundRect = self.__backgroundImage.get_rect() pygame.mixer.music.set_volume(0.75) pygame.mixer.music.play(-1, 0.0) self.__items = [] self.__addMainMenuItems(items) self.__totalItems = len(self.__items) self.__BackToMenuText = Text("Back", "../Data/Fonts/", "SpicyRice-Regular.otf", 35, BLACK) self.__BackToMenuText.renderText() posY = self.screenSize[1] - self.__BackToMenuText.textSize[1] self.__BackToMenuText.setPosition(0, posY) self.__mouseVisible = True self.__currentMenuItem = None self.__transition = Transition(screen, "Fade Out", 255, BLACK) def __addMainMenuItems(self, items): self.__titleMenuItem = Text("Welcome to Snake", "../Data/Fonts/", "SpicyRice-Regular.otf", 50, BLACK) self.__titleMenuItem.renderText() posX = (self.screenSize[0] / 2) - (self.__titleMenuItem.textSize[0] / 2) posY = (self.screenSize[1] / 4) - (self.__titleMenuItem.textSize[1] / 4) self.__titleMenuItem.setPosition(posX, posY) for index, item in enumerate(items): menuItem = Text(item, "../Data/Fonts/", "SpicyRice-Regular.otf", 25, BLACK) menuItem.renderText() posX = (self.screenSize[0] / 2) - (menuItem.textSize[0] / 2) posY = (self.screenSize[1] / 2) - (menuItem.textSize[1] / 2) + ((index * 15) + (index * menuItem.textSize[1])) menuItem.setPosition(posX, posY) self.__items.append(menuItem) def __setItemSelection(self, keys): for item in self.__items: item.font.set_italic(False) item.setFontColor(BLACK) if self.__currentMenuItem is None: self.__currentMenuItem = 0 else: if keys[pygame.K_UP] and self.__currentMenuItem > 0: self.__currentMenuItem -= 1 elif keys[pygame.K_UP] and self.__currentMenuItem == 0: self.__currentMenuItem = self.__totalItems - 1 elif keys[pygame.K_DOWN] and self.__currentMenuItem < self.__totalItems - 1: self.__currentMenuItem += 1 elif keys[pygame.K_DOWN] and self.__currentMenuItem == self.__totalItems - 1: self.__currentMenuItem = 0 self.__items[self.__currentMenuItem].font.set_italic(True) self.__items[self.__currentMenuItem].setFontColor(WHITE) self.__movementSoundEffect.play() def __setMouseSelection(self, item, mousePosition): if item.isMouseOverText(mousePosition): item.font.set_italic(True) item.setFontColor(WHITE) else: item.font.set_italic(False) item.setFontColor(BLACK) def __handleEvents(self): for event in pygame.event.get(): keys = pygame.key.get_pressed() if event.type == pygame.QUIT or (keys[pygame.K_F4] and keys[pygame.K_LALT]): self.__exit = True if keys[pygame.K_UP] or keys[pygame.K_DOWN]: self.__mouseVisible = False self.__setItemSelection(keys) if keys[pygame.K_RETURN] and self.__currentMenuItem is not None: self.__action = self.__items[self.__currentMenuItem].getText if event.type == pygame.MOUSEBUTTONDOWN: mousePosition = pygame.mouse.get_pos() for item in self.__items: if item.isMouseOverText(mousePosition): self.__action = item.getText if pygame.mouse.get_rel() > (0, 0): self.__mouseVisible = True self.__currentMenuItem = None if self.__mouseVisible: pygame.mouse.set_visible(True) else: pygame.mouse.set_visible(False) def __draw(self, screen): screen.fill(WHITE) screen.blit(self.__backgroundImage, self.__backgroundRect) screen.blit(self.__titleMenuItem.label, self.__titleMenuItem.labelRect) for index, item in enumerate(self.__items): if self.__mouseVisible: mousePosition = pygame.mouse.get_pos() self.__setMouseSelection(item, mousePosition) pygame.draw.rect(screen, GameMenu.colorButtonList[index], item.labelRect) screen.blit(item.label, item.labelRect) if not self.__transition.isTransitionDone: self.__transition.performTransition(screen) pygame.display.update() def __executeAction(self, screen): if self.__action: if self.__action == 'Start': self.__selectionSoundEffect.play() self.__startGame(screen) elif self.__action == 'How to Play': self.__selectionSoundEffect.play() self.__showInstructions(screen) elif self.__action == 'Quit': self.__selectionSoundEffect.play() self.__exit = True self.__action = "" def __startGame(self, screen): pygame.mixer.music.stop() self.__transition.setNewTransition("Fade In", 0, BLACK) while not self.__transition.isTransitionDone and not self.__exit: for event in pygame.event.get(): keys = pygame.key.get_pressed() if event.type == pygame.QUIT or (keys[pygame.K_F4] and keys[pygame.K_LALT]): self.__exit = True self.__draw(screen) if not self.__exit: game = Game(screen) self.__exit = game.playGame(screen) if not self.__exit: pygame.mixer.music.load(self.mainMenuMusicFile) pygame.mixer.music.set_volume(0.75) pygame.mixer.music.play(-1, 0.0) self.__transition.setNewTransition("Fade Out", 255, BLACK) def __showInstructions(self, screen): if not self.__mouseVisible: pygame.mouse.set_visible(True) self.__mouseVisible = True while self.__action != "Back" and not self.__exit: mousePosition = pygame.mouse.get_pos() for event in pygame.event.get(): keys = pygame.key.get_pressed() if event.type == pygame.QUIT or (keys[pygame.K_F4] and keys[pygame.K_LALT]): self.__exit = True if event.type == pygame.MOUSEBUTTONDOWN: if self.__BackToMenuText.isMouseOverText(mousePosition): self.__action = self.__BackToMenuText.getText screen.fill(WHITE) screen.blit(self.__backgroundImage, self.__backgroundRect) pygame.draw.rect(screen, GameMenu.colorButtonList[3], self.__BackToMenuText.labelRect) screen.blit(self.__BackToMenuText.label, self.__BackToMenuText.labelRect) self.__setMouseSelection(self.__BackToMenuText, mousePosition) if not self.__transition.isTransitionDone: self.__transition.performTransition(screen) pygame.display.update() def run(self, screen): clock = pygame.time.Clock() pygame.event.set_allowed(None) # initially blocked all the events. pygame.event.set_allowed([pygame.QUIT, pygame.KEYDOWN, pygame.MOUSEBUTTONDOWN]) # initialize the events that we are looking for while not self.__exit: self.__handleEvents() self.__draw(screen) self.__executeAction(screen) clock.tick(self.__FPS) def initPygame(): pygame.display.init() pygame.mixer.pre_init(22050, -16, 2, 4096) pygame.mixer.init() pygame.font.init() def quitPygame(): pygame.mixer.quit() pygame.display.quit() pygame.font.quit() def main(): initPygame() screen = pygame.display.set_mode((0, 0), FLAGS) menuItems = ('Start', 'How to Play', 'Quit') pygame.display.set_caption('Snake') gameMenu = GameMenu(screen, menuItems) gameMenu.run(screen) quitPygame() if __name__ == "__main__": main()
import os.path from argparse import ArgumentParser import skimage.io as io import torch from torch.optim import Adam from torch.utils.data import DataLoader from datasets.dreyeve import DREYEVE from model.objective import KLD from model.single_branch import SingleBranchModel from utils import device from utils import set_random_seed from utils import to_image for d in ['ckps', 'out']: if not os.path.exists(d): os.makedirs(d) def parse_args(): parser = ArgumentParser('DR(eye)VE dataset training') parser.add_argument('--batch_size', type=int, default=16) parser.add_argument('--lr', type=float, default=1e-4) parser.add_argument('--num_workers', type=int, default=8) parser.add_argument('--n_epochs', type=int, default=20) return parser.parse_args() def train_epoch(ds, model, kld, optim, epoch, args): ds.train() model.train() dl = DataLoader(ds, args.batch_size, num_workers=args.num_workers, worker_init_fn=set_random_seed) for batch_idx, data in enumerate(dl): x_res, x_crp, x_ff, y, y_crp = (d.to(device) for d in data) # DO STUFF HERE - perform a training update optim.zero_grad() p_crp, p_res = model(x_res, x_crp, x_ff) loss_crp = kld(p_crp, y_crp) loss_res = kld(p_res, y) loss_tot = loss_crp + loss_res loss_tot.backward() optim.step() print(f'Train epoch: {epoch} ' f'[{batch_idx * len(x_res)}/{len(ds)} ' f'({100. * batch_idx / len(dl):.0f}%)]\t' f'Loss crop: {loss_crp.item():.4f}\t' f'Loss resize: {loss_res.item():.4f}\t') def visualize(ds, model, args, epoch): ds.train() model.eval() dl = DataLoader(ds, args.batch_size, num_workers=args.num_workers, worker_init_fn=set_random_seed) with torch.no_grad(): x_res, x_crp, x_ff, y_res, y_crp = (d.to(device) for d in next(iter(dl))) # DO STUFF HERE - get the network prediction p_crp, p_res = model(x_res, x_crp, x_ff) image = to_image(p_res, y_res, x_ff) io.imsave(f'out/{epoch:03d}.png', image) def main(): args = parse_args() ds = DREYEVE() model = SingleBranchModel().to(device) loss = KLD().to(device) optimizer = Adam(model.parameters(), args.lr) for epoch in range(0, args.n_epochs): visualize(ds, model, args, epoch) train_epoch(ds, model, loss, optimizer, epoch, args) torch.save(model.state_dict(), f'ckps/{epoch:03d}.pt') if __name__ == '__main__': main()
import React from 'react'; import ReactDOM from 'react-dom'; import App from './App'; import * as serviceWorker from './serviceWorker'; ReactDOM.render(<App />, document.getElementById('root')); // If you want your app to work offline and load faster, you can change // unregister() to register() below. Note this comes with some pitfalls. // Learn more about service workers: https://bit.ly/CRA-PWA serviceWorker.unregister();
"use strict" class Matrix4f { static MATRIX_SIZE = 4; /** * @param {Float[4][4]} matrixArray */ constructor( matrixArray ) { this.m = matrixArray; } static identity = () => { return new Matrix4f( [ [1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1] ] ); } static perspective = (fov, aspectRatio, zNear, zFar) => { let mat = Matrix4f.identity(); let invTanHalfFov = 1.0 / Math.tan( radians(fov) / 2 ); let depth = zNear - zFar; mat.m[ 0 ][ 0 ] = invTanHalfFov * ( 1.0 / aspectRatio ); mat.m[ 1 ][ 1 ] = invTanHalfFov; mat.m[ 2 ][ 2 ] = ( -zNear - zFar ) / depth; mat.m[ 2 ][ 3 ] = (2 * zFar * zNear) / depth; mat.m[ 3 ][ 2 ] = 1; mat.m[ 3 ][ 3 ] = 0; return mat; } static translation = ( vec ) => { let mat = Matrix4f.identity(); mat.m[ 0 ][ 3 ] = vec.x; mat.m[ 1 ][ 3 ] = vec.y; mat.m[ 2 ][ 3 ] = vec.z; return mat; } static scale = ( vec ) => { let mat = Matrix4f.identity(); mat.m[ 0 ][ 0 ] = vec.x; mat.m[ 1 ][ 1 ] = vec.y; mat.m[ 2 ][ 2 ] = vec.z; return mat; } /** * * @param {Vector3f} f : forward vector * @param {Vector3f} u : upward vector * @param {Vector3f} r : right vector * @returns Rotation Matrix */ static rotation = (f, u, r) => { let mat = Matrix4f.identity(); mat.m[ 0 ][ 0 ] = r.x; mat.m[ 0 ][ 1 ] = r.y; mat.m[ 0 ][ 2 ] = r.z; mat.m[ 1 ][ 0 ] = u.x; mat.m[ 1 ][ 1 ] = u.y; mat.m[ 1 ][ 2 ] = u.z; mat.m[ 2 ][ 0 ] = f.x; mat.m[ 2 ][ 1 ] = f.y; mat.m[ 2 ][ 2 ] = f.z; return mat; } /** * * @param {Matrix4f} other * @returns Multiplication between this and other */ mul = ( other ) => { let mat = Matrix4f.identity(); for ( let i = 0; i < Matrix4f.MATRIX_SIZE; i++ ) { for ( let j = 0; j < Matrix4f.MATRIX_SIZE; j++ ) { let value = 0; for ( let k = 0; k < Matrix4f.MATRIX_SIZE; k++ ) { value += this.m[i][k] * other.m[k][j]; } mat.m[i][j] = value; } } return mat; } transpose = () => { let mat = Matrix4f.identity(); for ( var i = 0; i < Matrix4f.MATRIX_SIZE; ++i ) for ( var j = 0; j < Matrix4f.MATRIX_SIZE; ++j ) mat.m[i][j] = this.m[j][i]; return mat; } }
#!/usr/bin/env python # Copyright 2019 Paul Archer # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from pvstats.pvinverter.base import BasePVInverter from datetime import datetime from decimal import * import urllib2 import json import logging _logger = logging.getLogger(__name__) class PVInverter_Fronius(BasePVInverter): def __init__(self, cfg, **kwargs): self.url = "http://{}:{}/solar_api/v1/GetInverterRealtimeData.cgi?Scope=Device&DeviceID=1&DataCollection=CommonInverterData".format(cfg["host"],cfg["port"]) def connect(self): pass def close(self): pass def read(self): """Reads the PV inverters status""" # Dummy data response = """ { "Body": { "Data": { "DAY_ENERGY": { "Unit": "Wh", "Value": 55550 }, "DeviceStatus": { "ErrorCode": 567, "LEDColor": 2, "LEDState": 0, "MgmtTimerRemainingTime": -1, "StateToReset": true, "StatusCode": 7 }, "FAC": { "Unit": "Hz", "Value": 50.03 }, "IAC": { "Unit": "A", "Value": 16.27 }, "IDC": { "Unit": "A", "Value": 9.74 }, "PAC": { "Unit": "W", "Value": 4051 }, "TOTAL_ENERGY": { "Unit": "Wh", "Value": 14272110 }, "UAC": { "Unit": "V", "Value": 245.6 }, "UDC": { "Unit": "V", "Value": 407.8 }, "YEAR_ENERGY": { "Unit": "Wh", "Value": 2512712 } } }, "Head": { "RequestArguments": { "DataCollection": "CommonInverterData", "DeviceClass": "Inverter", "DeviceId": "1", "Scope": "Device" }, "Status": { "Code": 0, "Reason": "", "UserMessage": "" }, "Timestamp": "2019-02-10T17:23:28+11:00" } } """ response = urllib2.urlopen(self.url).read() data = json.loads(response) d = json.dumps(data, sort_keys=True, indent=2, separators=(',', ': '),default=str) print d self.registers = {'timestamp': datetime.strptime(data['Head']['Timestamp'][:-6], "%Y-%m-%dT%H:%M:%S"), 'daily_pv_power':Decimal(data['Body']['Data']['DAY_ENERGY']['Value']), 'total_pv_power':Decimal(data['Body']['Data']['PAC']['Value']), #'internal_temp': Decimal(data['Body']['Data']['T_AMBIENT']['Value']).quantize(Decimal('.1')), 'internal_temp': Decimal(0), 'pv1_voltage': Decimal(data['Body']['Data']['UDC']['Value']), 'pv2_voltage': Decimal('0')} print self.registers #----------------- # Exported symbols #----------------- __all__ = [ "PVInverter_Fronius" ] # vim: set expandtab ts=2 sw=2:
class pid: def _init_(p, i, d, target): self.kp = p self.ki = i self.kd = d self.integrator = 0.0 self.derivator = 0.0 self.err = 0.0 self.target = target def update(data): # Calculate error self.error = self.target - data # P part of PID p = self.kp * self.error # I part of PID (integrals) self.integrator = self.integrator + self.error i = self.ki * self.integrator # D part of PID (derivatives) d = self.kd * (self.error - self.derivator) self.derivator = self.error # Return PID output value return p + i + d
/** * @license * Copyright 2018 Google Inc. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ============================================================================= */ import { getGlslDifferences } from './glsl_version'; import { ENCODE_FLOAT_SNIPPET } from './shader_compiler_util'; import { TextureUsage } from './tex_util'; export class EncodeFloatPackedProgram { constructor(outputShape) { this.variableNames = ['A']; this.packedInputs = true; this.packedOutput = false; this.outTexUsage = TextureUsage.DOWNLOAD; const glsl = getGlslDifferences(); this.outputShape = outputShape; this.userCode = ` ${ENCODE_FLOAT_SNIPPET} void main() { ivec3 coords = getOutputCoords(); float x = getChannel(getAAtOutCoords(), vec2(coords.y, coords.z)); ${glsl.output} = encode_float(x); } `; } } //# sourceMappingURL=encode_float_packed_gpu.js.map
/** * FreeRDP: A Remote Desktop Protocol Implementation * Wayland Client Interface * * Copyright 2014 Manuel Bachmann <tarnyko@tarnyko.net> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <stdio.h> #include <errno.h> #include <fcntl.h> #include <unistd.h> #include <sys/mman.h> #include <freerdp/freerdp.h> #include <freerdp/client/cmdline.h> #include <freerdp/channels/channels.h> #include <freerdp/gdi/gdi.h> #include <wayland-client.h> struct display { struct wl_display* display; struct wl_registry* registry; struct wl_compositor* compositor; struct wl_shell* shell; struct wl_shm* shm; }; struct buffer { struct wl_buffer* buffer; void* shm_data; int busy; }; struct window { int width, height; struct wl_surface* surface; struct wl_shell_surface* shell_surface; struct wl_callback* callback; struct buffer buffers[2]; struct display* display; void* data; }; struct wl_context { rdpContext _p; struct display* display; struct window* window; }; static void wl_buffer_release(void* data, struct wl_buffer* wl_buffer) { struct buffer* buffer = data; buffer->busy = 0; } static const struct wl_buffer_listener wl_buffer_listener = { wl_buffer_release }; static void window_redraw(void* data, struct wl_callback* callback, uint32_t time); static const struct wl_callback_listener wl_callback_listener = { window_redraw }; static void window_redraw(void* data, struct wl_callback* callback, uint32_t time) { struct window* window = data; struct wl_shm_pool* shm_pool; struct buffer* buffer; int fd; int fdt; if (!window->buffers[0].busy) buffer = &window->buffers[0]; else if (!window->buffers[1].busy) buffer = &window->buffers[1]; else return; fd = shm_open("wlfreerdp_shm", O_CREAT | O_TRUNC | O_RDWR, 0666); fdt = ftruncate(fd, window->width * window->height * 4); if (fdt != 0) { fprintf(stderr, "window_redraw: could not allocate memory\n"); close(fd); return; } buffer->shm_data = mmap(0, window->width * window->height * 4, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0); if (buffer->shm_data == MAP_FAILED) { fprintf(stderr, "window_redraw: failed to memory map buffer\n"); close(fd); return; } shm_pool = wl_shm_create_pool(window->display->shm, fd, window->width * window->height * 4); buffer->buffer = wl_shm_pool_create_buffer(shm_pool, 0, window->width, window->height, window->width* 4, WL_SHM_FORMAT_XRGB8888); wl_buffer_add_listener(buffer->buffer, &wl_buffer_listener, buffer); wl_shm_pool_destroy(shm_pool); shm_unlink("wlfreerdp_shm"); close(fd); /* this is the real surface data */ memcpy(buffer->shm_data, (void*) window->data, window->width * window->height * 4); wl_surface_attach(window->surface, buffer->buffer, 0, 0); wl_surface_damage(window->surface, 0, 0, window->width, window->height); if (callback) wl_callback_destroy(callback); window->callback = wl_surface_frame(window->surface); wl_callback_add_listener(window->callback, &wl_callback_listener, window); wl_surface_commit(window->surface); buffer->busy = 1; munmap(buffer->shm_data, window->width * window->height * 4); } static void wl_shell_surface_handle_ping(void* data, struct wl_shell_surface* shell_surface, uint32_t serial) { wl_shell_surface_pong(shell_surface, serial); } static const struct wl_shell_surface_listener wl_shell_surface_listener = { wl_shell_surface_handle_ping }; static void wl_registry_handle_global(void* data, struct wl_registry* registry, uint32_t id, const char *interface, uint32_t version) { struct display* display = data; if (strcmp(interface, "wl_compositor") == 0) display->compositor = wl_registry_bind(registry, id, &wl_compositor_interface, 1); else if (strcmp(interface, "wl_shell") == 0) display->shell = wl_registry_bind(registry, id, &wl_shell_interface, 1); else if (strcmp(interface, "wl_shm") == 0) display->shm = wl_registry_bind(registry, id, &wl_shm_interface, 1); } static void wl_registry_handle_global_remove(void* data, struct wl_registry* registry, uint32_t name) { } static const struct wl_registry_listener wl_registry_listener = { wl_registry_handle_global, wl_registry_handle_global_remove }; int wl_context_new(freerdp* instance, rdpContext* context) { context->channels = freerdp_channels_new(); return 0; } void wl_context_free(freerdp* instance, rdpContext* context) { } void wl_begin_paint(rdpContext* context) { rdpGdi* gdi; gdi = context->gdi; gdi->primary->hdc->hwnd->invalid->null = 1; } void wl_end_paint(rdpContext* context) { rdpGdi* gdi; struct display* display; struct window* window; struct wl_context* context_w; gdi = context->gdi; if (gdi->primary->hdc->hwnd->invalid->null) return; context_w = (struct wl_context*) context; display = context_w->display; window = context_w->window; realloc(window->data, gdi->width * gdi->height * 4); memcpy(window->data, (void*) gdi->primary_buffer, gdi->width * gdi->height * 4); wl_display_dispatch(display->display); } BOOL wl_pre_connect(freerdp* instance) { struct display* display; struct wl_context* context; freerdp_channels_pre_connect(instance->context->channels, instance); display = malloc(sizeof(*display)); display->display = wl_display_connect(NULL); if (!display->display) { fprintf(stderr, "wl_pre_connect: failed to connect to Wayland compositor\n"); fprintf(stderr, "Please check that the XDG_RUNTIME_DIR environment variable is properly set.\n"); free(display); return FALSE; } display->registry = wl_display_get_registry(display->display); wl_registry_add_listener(display->registry, &wl_registry_listener, display); wl_display_roundtrip(display->display); if (!display->compositor || !display->shell || !display->shm) { fprintf(stderr, "wl_pre_connect: failed to find needed compositor interfaces\n"); free(display); return FALSE; } /* put Wayland data in the context here */ context = (struct wl_context*) instance->context; context->display = display; return TRUE; } BOOL wl_post_connect(freerdp* instance) { struct window* window; struct wl_context* context; context = (struct wl_context*) instance->context; window = malloc(sizeof(*window)); window->width = instance->settings->DesktopWidth; window->height = instance->settings->DesktopHeight; window->buffers[0].busy = 0; window->buffers[1].busy = 0; window->callback = NULL; window->display = context->display; window->surface = wl_compositor_create_surface(window->display->compositor); window->shell_surface = wl_shell_get_shell_surface(window->display->shell, window->surface); wl_shell_surface_add_listener(window->shell_surface, &wl_shell_surface_listener, NULL); wl_shell_surface_set_title(window->shell_surface, "FreeRDP"); wl_shell_surface_set_toplevel(window->shell_surface); wl_surface_damage(window->surface, 0, 0, window->width, window->height); /* GC/GDI logic here */ rdpGdi* gdi; gdi_init(instance, CLRCONV_ALPHA | CLRCONV_INVERT | CLRBUF_32BPP, NULL); gdi = instance->context->gdi; /* fill buffer with first image here */ window->data = malloc (gdi->width * gdi->height *4); memcpy(window->data, (void*) gdi->primary_buffer, gdi->width * gdi->height * 4); instance->update->BeginPaint = wl_begin_paint; instance->update->EndPaint = wl_end_paint; /* put Wayland data in the context here */ context->window = window; freerdp_channels_post_connect(instance->context->channels, instance); window_redraw(window, NULL, 0); return TRUE; } int wlfreerdp_run(freerdp* instance) { int i; int fds; int max_fds; int rcount; int wcount; void* rfds[32]; void* wfds[32]; fd_set rfds_set; fd_set wfds_set; ZeroMemory(rfds, sizeof(rfds)); ZeroMemory(wfds, sizeof(wfds)); freerdp_connect(instance); while (1) { rcount = 0; wcount = 0; if (freerdp_get_fds(instance, rfds, &rcount, wfds, &wcount) != TRUE) { printf("Failed to get FreeRDP file descriptor"); break; } if (freerdp_channels_get_fds(instance->context->channels, instance, rfds, &rcount, wfds, &wcount) != TRUE) { printf("Failed to get FreeRDP file descriptor"); break; } max_fds = 0; FD_ZERO(&rfds_set); FD_ZERO(&wfds_set); for (i = 0; i < rcount; i++) { fds = (int)(long)(rfds[i]); if (fds > max_fds) max_fds = fds; FD_SET(fds, &rfds_set); } if (max_fds == 0) break; if (select(max_fds + 1, &rfds_set, &wfds_set, NULL, NULL) == -1) { if (!((errno == EAGAIN) || (errno == EWOULDBLOCK) || (errno == EINPROGRESS) || (errno == EINTR))) { printf("wlfreerdp_run: select failed\n"); break; } } if (freerdp_check_fds(instance) != TRUE) { printf("Failed to check FreeRDP file descriptor\n"); break; } if (freerdp_channels_check_fds(instance->context->channels, instance) != TRUE) { printf("Failed to check channel manager file descriptor\n"); break; } } struct display* display; struct window* window; struct wl_context* context; context = (struct wl_context*) instance->context; display = context->display; window = context->window; free(window->buffers[0].shm_data); free(window->buffers[1].shm_data); free(window->data); wl_buffer_destroy(window->buffers[0].buffer); wl_buffer_destroy(window->buffers[1].buffer); wl_shell_surface_destroy(window->shell_surface); wl_surface_destroy(window->surface); wl_shm_destroy(display->shm); wl_shell_destroy(display->shell); wl_compositor_destroy(display->compositor); wl_registry_destroy(display->registry); wl_display_disconnect(display->display); freerdp_channels_close(instance->context->channels, instance); freerdp_channels_free(instance->context->channels); freerdp_free(instance); return 0; } int main(int argc, char* argv[]) { int status; freerdp* instance; freerdp_channels_global_init(); instance = freerdp_new(); instance->PreConnect = wl_pre_connect; instance->PostConnect = wl_post_connect; instance->ContextSize = sizeof(struct wl_context); instance->ContextNew = wl_context_new; instance->ContextFree = wl_context_free; freerdp_context_new(instance); status = freerdp_client_parse_command_line_arguments(argc, argv, instance->settings); if (status < 0) exit(0); freerdp_client_load_addins(instance->context->channels, instance->settings); wlfreerdp_run(instance); freerdp_channels_global_uninit(); return 0; }
import copy import json import queue import random import traceback from threading import Thread, get_ident from compipe.runtime_env import Environment as env from ..response.response import response_channel_handlers, RespChannel from ..cmd_enroller import command_list from ..exception.task_queue_error import GErrorDuplicateSingletonCMD from ..response.command_result import CommandSingleResult, MSGStatusCodes from ..response.response import RespChannel from .hash_code_helper import encrypt_str from .logging import logger from .parameters import (ARG_ARGUMENT, ARG_CHANNEL, ARG_COMMAND, ARG_RESPONSE, ARG_USER) current_thread = None # only support single task to avoid content conflict on a single vm NUM_WORKERS = 1 GREETING_HEADER = 'Apollo 11, this is Houston!' # AIR_TO_GROUND_VOICE ATDV_TRANSCRIPTION = ['You are confirmed GO for orbit.', 'Through Canary. Over.', 'The booster has been configured for orbital coast.', 'Through Tananarive. Over.', 'We are receiving your FM downlink now.', 'We are seeing the pitch hot firing and it looks good.', 'Go ahead and we\'ll watch you on TM.', 'We\'ve completed the uplink.', 'You are GO for TLI. Over.', 'We\'re reading you readability about 3,' 'strength 5. Sounds pretty good. Over.'] class Task(): def __init__(self, interpreter=None, arguments=None, kwargs=None): self.interpreter = interpreter self.arguments = arguments self.kwargs = kwargs self._response = None self.thread_ts = None @property def hash(self): key_str = json.dumps(self.kwargs) return encrypt_str(key_str) @property def command(self): return self.kwargs[ARG_COMMAND] @property def args(self): return self.kwargs[ARG_ARGUMENT] or [] @property def user(self): return self.kwargs.get(ARG_USER, 'com') @property def channel(self): return self.kwargs.get(ARG_CHANNEL, env.dev_channel) @property def response_channel(self): return self.kwargs.get(ARG_RESPONSE, RespChannel.console.value) @property def response(self): if not self._response: # parse the response channel class from the name resp_inst = response_channel_handlers.get(self.response_channel, RespChannel.console.value) # trigger the corresponding channel to response messages self._response = resp_inst(channel=self.channel, user=self.user) return self._response @property def is_singleton(self): """ Singleton mode task, it means the command would not be triggered in multi-threads at the same time Returns: {bool} -- Represent the flag identifying singleton mode. """ return self.command in command_list and command_list[self.command]['singleton'] def pop(self): return self.interpreter, self.arguments or [], self.kwargs or {} def run(self): self.interpreter(*self.arguments, **self.kwargs) return True def __str__(self): # filtered build-in kwargs # e.g. 'ARG_TASK_WORKER' flag would be added to the param # when trigging through task queue params = " ".join(self.args) return f'{self.command} -a {params}' class TaskQueue(queue.Queue): current_task_count = 0 # def __init__(self): queue.Queue.__init__(self) # get worker number (multi-thread support) # default value would be single thread. self.num_workers = int(env.worker_num) logger.debug(f'Start task queue service: thread number [{self.num_workers}]') self.start_workers() self.current_task = {} def get_thread_task_hash(self): return list(task.hash for task in self.current_task.values()) def get(self): task = super().get() if task.is_singleton and task.hash in self.get_thread_task_hash(): # add back to the end of the task queue self.add_task(task) raise GErrorDuplicateSingletonCMD(f'Found duplicate singleton command.[{task.command}]') # record current task self.current_task.update({ get_ident(): task }) cmd_res = CommandSingleResult( message=f"{GREETING_HEADER} {random.choice(ATDV_TRANSCRIPTION)}", payload=f'[Command] `{str(task)}` \n[User] `{task.user}`') # retrieve the thread context, which is used to reply message in the # same threads (discord or slack channel) task.thread_ts = task.response.send(cmd_res) # blow codes were deprecated, leave the code snippet for further investigation # TODO: Clean up # if task.thread_ts: # update message when grabbing task from queue # team, channel = task.channel.split('#') # payload = SlackChannel.resolve_payload(cmd_res) # payload.update({ # 'ts': task.thread_ts, # 'channel': channel, # 'team': team # }) # chat_update(**payload) # else: # task.thread_ts = task.response.send(cmd_res) return task def task_done(self): super().task_done() # remove finished task instance task_inst = self.current_task.get(get_ident()) del task_inst TaskQueue.current_task_count -= 1 def add_task(self, task): # Response greetings, ignore the cmd header when receving from compe if TaskQueue.current_task_count != 0: task.thread_ts = task.response.post( payload=f"Joined in task queue [{TaskQueue.current_task_count}] `{str(task)}`", msg_status=MSGStatusCodes.default) TaskQueue.current_task_count += 1 self.put(task) def start_workers(self): for _ in range(self.num_workers): t = Thread(target=self.worker) t.daemon = not env.console_mode t.start() def worker(self): while True: task = self.get() try: task.run() except: logger.error(str(traceback.format_exc())) finally: logger.debug(f'Task\'s been Done! [{str(task)}]') self.task_done() # stop listenning task queue when trigging from consoles if env.console_mode: logger.debug('Exit Task Queue : Current process ' '[Environment.console_mode]') break def get_queue_list(self): queue_tasks = [{ARG_COMMAND: cmd[ARG_COMMAND], ARG_ARGUMENT: cmd[ARG_ARGUMENT]} for _, _, cmd, in self.queue] if self.current_task: queue_tasks.append(copy.deepcopy(self.current_task)) # reformat command context for item in queue_tasks: args = (arg for arg in item[ARG_ARGUMENT] if '=' in arg) for arg in args: pairs = arg.split('=') item.update({pairs[0]: pairs[1]}) return queue_tasks
// $(document).ready(function(){ // // Menu Function // // Window scroll function // }); (function () { $('.nav-button').click(function(){ $('.nav-button, .side-nav, .nav-header, .nav-options').toggleClass('nav-open'); return false; }); // Food Menu function $('.nav-link.the-menu').click(function(){ $('.nav-button, .side-nav, .nav-header, .nav-options').removeClass('nav-open'); $('.menu-container').addClass('displayed'); return false; }); // closing the menu-overlay $('.close, .menu-container').click(function(){ $('.menu-container').removeClass('displayed'); return false;}); // preventing clicks on the menu closing it for now because there is nothing inside yet. $('.tab.menu-tab').click(function(){ return false; }); "use strict"; var car = function () { $(".owl-carousel1").owlCarousel({ loop: true, center: true, margin: 0, responsiveClass: true, nav: false, autoplay:true, autoplayTimeout:2800, autoplayHoverPause:true, responsive: { 0: { items: 1, nav: false }, 680: { items: 2, nav: false, loop: true, }, 1000: { items: 3, nav: true } } }); }; (function ($) { car(); })(jQuery); })(); jQuery(document).ready(function(){ $(".dropdown").hover( function() { $('.dropdown-menu', this).fadeIn("fast"); }, function() { $('.dropdown-menu', this).fadeOut("fast"); }); });
/* Copyright (c) 2012 Dirk Willrodt <willrodt@zbh.uni-hamburg.de> Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #ifndef S_SPLINT_S #include <unistd.h> #endif #include "core/assert_api.h" #include "core/log_api.h" #include "core/ma_api.h" #include "core/safearith.h" #include "core/undef_api.h" #include "core/xansi_api.h" #include "core/unused_api.h" #include "extended/sampling.h" typedef enum { GT_SAMPLING_REGULAR, GT_SAMPLING_PAGES, } GtSamplingMethod; struct GtSampling { GtSamplingMethod method; unsigned long numofsamples, sampling_rate, arraysize, *page_sampling, current_sample_num, current_sample_elementnum; size_t *samplingtab; long pagesize; }; static void sampling_init_sampling(GtSampling *sampling, unsigned long rate) { sampling->numofsamples = 1UL; sampling->arraysize = 10UL; sampling->sampling_rate = rate; sampling->current_sample_elementnum = 0; sampling->current_sample_num = 0; sampling->pagesize = sysconf((int) _SC_PAGESIZE); } GtSampling *gt_sampling_new_regular(unsigned long rate, off_t first_offset) { GtSampling *sampling = gt_malloc(sizeof (*sampling)); sampling->method = GT_SAMPLING_REGULAR; gt_assert(rate != 0); sampling_init_sampling(sampling, rate); gt_assert(first_offset % sampling->pagesize == 0); sampling->page_sampling = NULL; sampling->samplingtab = gt_malloc((size_t) sampling->arraysize * sizeof (*sampling->samplingtab)); gt_safe_assign(sampling->samplingtab[0], first_offset); return sampling; } GtSampling *gt_sampling_new_page(unsigned long rate, off_t first_offset) { GtSampling *sampling = gt_sampling_new_regular(rate, first_offset); sampling->method = GT_SAMPLING_PAGES; gt_assert(rate != 0); sampling->page_sampling = gt_malloc((size_t) sampling->arraysize * sizeof (*sampling->page_sampling)); sampling->page_sampling[0] = 0; return sampling; } static inline void sampling_gt_xfwrite(void *ptr, size_t size, size_t nmemb, FILE *stream) { gt_xfwrite((const void*) ptr, size, nmemb, stream); } static inline void sampling_gt_xfread(void *ptr, size_t size, size_t nmemb, FILE *stream) { GT_UNUSED size_t read; read = gt_xfread(ptr, size, nmemb, stream); gt_assert(read == nmemb); } typedef void (*SamplingIOFunc)(void *ptr, size_t size, size_t nmemb, FILE *stream); #define SAMPLING_IO_ONE(element, fp) \ do { \ io_func(&element, sizeof (element), (size_t) 1, fp);\ } while (false) static inline void sampling_io_page_sampling(GtSampling *sampling, FILE *fp, SamplingIOFunc io_func) { if (sampling->page_sampling == NULL) { sampling->page_sampling = gt_malloc((size_t) sampling->arraysize * sizeof (*sampling->page_sampling)); } io_func(sampling->page_sampling, sizeof (*sampling->page_sampling), (size_t) sampling->numofsamples, fp); } static inline void sampling_io_samplingtab(GtSampling *sampling, FILE *fp, SamplingIOFunc io_func) { io_func(sampling->samplingtab, sizeof (*sampling->samplingtab), (size_t) sampling->numofsamples, fp); } static inline void sampling_io_header(GtSampling *sampling, FILE *fp, SamplingIOFunc io_func) { SAMPLING_IO_ONE(sampling->numofsamples, fp); gt_assert(sampling->numofsamples != 0); SAMPLING_IO_ONE(sampling->method, fp); SAMPLING_IO_ONE(sampling->sampling_rate, fp); gt_assert(sampling->sampling_rate != 0); } /* TODO: add checksums for data */ static void sampling_io_header_samplingtab(GtSampling *sampling, FILE *fp, SamplingIOFunc io_func) { sampling_io_header(sampling, fp, io_func); gt_assert(sampling->method == GT_SAMPLING_REGULAR || sampling->method == GT_SAMPLING_PAGES); if (sampling->samplingtab == NULL) { sampling->arraysize = sampling->numofsamples; sampling->samplingtab = gt_malloc((size_t) sampling->arraysize * sizeof (*sampling->samplingtab)); } sampling_io_samplingtab(sampling, fp, io_func); } void gt_sampling_write(GtSampling *sampling, FILE *fp) { gt_assert(sampling); gt_assert(fp); sampling_io_header_samplingtab(sampling, fp, sampling_gt_xfwrite); if (sampling->method == GT_SAMPLING_PAGES) sampling_io_page_sampling(sampling, fp, sampling_gt_xfwrite); } GtSampling *gt_sampling_read(FILE *fp) { GtSampling *sampling; gt_assert(fp); sampling = gt_malloc(sizeof (*sampling)); sampling->samplingtab = NULL; sampling->page_sampling = NULL; sampling->current_sample_num = sampling->current_sample_elementnum = 0; sampling->pagesize = sysconf((int) _SC_PAGESIZE); sampling_io_header_samplingtab(sampling, fp, sampling_gt_xfread); if (sampling->method == GT_SAMPLING_PAGES) sampling_io_page_sampling(sampling, fp, sampling_gt_xfread); gt_assert(sampling->arraysize == sampling->numofsamples); return sampling; } static void get_regular_page(GtSampling *sampling, unsigned long element_num, unsigned long *sampled_element, size_t *position) { sampling->current_sample_num = element_num/sampling->sampling_rate; *sampled_element = sampling->current_sample_elementnum = sampling->current_sample_num * sampling->sampling_rate; *position = sampling->samplingtab[sampling->current_sample_num]; } static void get_pagewise_page(GtSampling *sampling, unsigned long element_num, unsigned long *sampled_element, size_t *position) { unsigned long start = 0, end, middle; gt_assert(sampling->numofsamples != 0); end = sampling->numofsamples - 1; middle = (end - start) >> 1; while (start < end) { if (sampling->page_sampling[middle] == element_num) break; else { if (sampling->page_sampling[middle] > element_num) { end = middle - 1; middle = start + ((end - start) >> 1); } else { if (sampling->page_sampling[middle + 1] > element_num) break; else { start = middle + 1; middle = start + ((end - start) >> 1); } } } } *sampled_element = sampling->current_sample_elementnum = sampling->page_sampling[middle]; sampling->current_sample_num = middle; *position = sampling->samplingtab[middle]; } void gt_sampling_get_page(GtSampling *sampling, unsigned long element_num, unsigned long *sampled_element, size_t *position) { gt_assert(sampling != NULL); gt_assert(sampled_element != NULL); gt_assert(position != NULL); switch (sampling->method) { case GT_SAMPLING_REGULAR: get_regular_page(sampling, element_num, sampled_element, position); break; case GT_SAMPLING_PAGES: get_pagewise_page(sampling, element_num, sampled_element, position); break; } } unsigned long gt_sampling_get_current_elementnum(GtSampling *sampling) { return sampling->current_sample_elementnum; } unsigned long gt_sampling_get_next_elementnum(GtSampling *sampling) { gt_assert(sampling->arraysize == sampling->numofsamples); gt_assert(sampling->current_sample_num < sampling->numofsamples); if (sampling->current_sample_num + 1 == sampling->numofsamples) return 0; gt_assert((sampling->current_sample_num + 1) < sampling->arraysize); switch (sampling->method) { case GT_SAMPLING_REGULAR: return sampling->current_sample_elementnum + sampling->sampling_rate; case GT_SAMPLING_PAGES: return sampling->page_sampling[sampling->current_sample_num + 1]; default: return GT_UNDEF_ULONG; } } int gt_sampling_get_next_sample(GtSampling *sampling, unsigned long *sampled_element, size_t *position) { enum state { ERROR = -1, END, SUCCESS }; enum state status = END; if (sampling->current_sample_num + 1 == sampling->numofsamples) { sampling->current_sample_num = 0; *sampled_element = sampling->current_sample_elementnum = 0; } else { status = SUCCESS; sampling->current_sample_num++; switch (sampling->method) { case GT_SAMPLING_REGULAR: *sampled_element = sampling->current_sample_elementnum += sampling->sampling_rate; break; case GT_SAMPLING_PAGES: *sampled_element = sampling->current_sample_elementnum = sampling->page_sampling[sampling->current_sample_num]; break; default: status = ERROR; } } if (status != ERROR) *position = sampling->samplingtab[sampling->current_sample_num]; return (int) status; } bool gt_sampling_is_regular(GtSampling *sampling) { gt_assert(sampling); return sampling->method == GT_SAMPLING_REGULAR; } unsigned long gt_sampling_get_rate(GtSampling *sampling) { gt_assert(sampling); return sampling->sampling_rate; } void gt_sampling_add_sample(GtSampling *sampling, size_t position, unsigned long element_num) { gt_assert(sampling); gt_assert(sampling->samplingtab); sampling->numofsamples++; if (sampling->numofsamples == sampling->arraysize) { sampling->arraysize += sampling->arraysize/100 + 10; sampling->samplingtab = gt_realloc(sampling->samplingtab, (size_t) sampling->arraysize * sizeof (*sampling->samplingtab)); if (sampling->method == GT_SAMPLING_PAGES) { gt_assert(sampling->page_sampling); sampling->page_sampling = gt_realloc(sampling->page_sampling, (size_t) sampling->arraysize * sizeof (*sampling->page_sampling)); } } if (sampling->method == GT_SAMPLING_PAGES) sampling->page_sampling[sampling->numofsamples -1] = element_num; else gt_assert(element_num % sampling->sampling_rate == 0); sampling->samplingtab[sampling->numofsamples -1] = position; } bool gt_sampling_is_next_element_sample( GtSampling *sampling, unsigned long pages_written, unsigned long elements_written, unsigned long elem_bit_size, unsigned long free_pagespace_bitsize) { if (sampling->method == GT_SAMPLING_REGULAR) return elements_written >= sampling->sampling_rate; else { if (pages_written >= sampling->sampling_rate) { return free_pagespace_bitsize < elem_bit_size; } } return false; } void gt_sampling_delete(GtSampling *sampling) { if (!sampling) return; gt_free(sampling->samplingtab); gt_free(sampling->page_sampling); gt_free(sampling); }
# This Python file uses the following encoding: utf-8 """autogenerated by genpy from topic_tools/MuxAddRequest.msg. Do not edit.""" import sys python3 = True if sys.hexversion > 0x03000000 else False import genpy import struct class MuxAddRequest(genpy.Message): _md5sum = "d8f94bae31b356b24d0427f80426d0c3" _type = "topic_tools/MuxAddRequest" _has_header = False #flag to mark the presence of a Header object _full_text = """string topic """ __slots__ = ['topic'] _slot_types = ['string'] def __init__(self, *args, **kwds): """ Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: topic :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields. """ if args or kwds: super(MuxAddRequest, self).__init__(*args, **kwds) #message fields cannot be None, assign default values for those that are if self.topic is None: self.topic = '' else: self.topic = '' def _get_types(self): """ internal API method """ return self._slot_types def serialize(self, buff): """ serialize message into buffer :param buff: buffer, ``StringIO`` """ try: _x = self.topic length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) if python3: buff.write(struct.pack('<I%sB'%length, length, *_x)) else: buff.write(struct.pack('<I%ss'%length, length, _x)) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self))))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self))))) def deserialize(self, str): """ unpack serialized message in str into this message instance :param str: byte array of serialized message, ``str`` """ try: end = 0 start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.topic = str[start:end].decode('utf-8') else: self.topic = str[start:end] return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: _x = self.topic length = len(_x) if python3 or type(_x) == unicode: _x = _x.encode('utf-8') length = len(_x) if python3: buff.write(struct.pack('<I%sB'%length, length, *_x)) else: buff.write(struct.pack('<I%ss'%length, length, _x)) except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self))))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self))))) def deserialize_numpy(self, str, numpy): """ unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module """ try: end = 0 start = end end += 4 (length,) = _struct_I.unpack(str[start:end]) start = end end += length if python3: self.topic = str[start:end].decode('utf-8') else: self.topic = str[start:end] return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill _struct_I = genpy.struct_I # This Python file uses the following encoding: utf-8 """autogenerated by genpy from topic_tools/MuxAddResponse.msg. Do not edit.""" import sys python3 = True if sys.hexversion > 0x03000000 else False import genpy import struct class MuxAddResponse(genpy.Message): _md5sum = "d41d8cd98f00b204e9800998ecf8427e" _type = "topic_tools/MuxAddResponse" _has_header = False #flag to mark the presence of a Header object _full_text = """ """ __slots__ = [] _slot_types = [] def __init__(self, *args, **kwds): """ Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields. """ if args or kwds: super(MuxAddResponse, self).__init__(*args, **kwds) def _get_types(self): """ internal API method """ return self._slot_types def serialize(self, buff): """ serialize message into buffer :param buff: buffer, ``StringIO`` """ try: pass except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self))))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self))))) def deserialize(self, str): """ unpack serialized message in str into this message instance :param str: byte array of serialized message, ``str`` """ try: end = 0 return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill def serialize_numpy(self, buff, numpy): """ serialize message with numpy array types into buffer :param buff: buffer, ``StringIO`` :param numpy: numpy python module """ try: pass except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self))))) except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self))))) def deserialize_numpy(self, str, numpy): """ unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module """ try: end = 0 return self except struct.error as e: raise genpy.DeserializationError(e) #most likely buffer underfill _struct_I = genpy.struct_I class MuxAdd(object): _type = 'topic_tools/MuxAdd' _md5sum = 'd8f94bae31b356b24d0427f80426d0c3' _request_class = MuxAddRequest _response_class = MuxAddResponse
from line_profiler import LineProfiler from pymongo import MongoClient from threading import Thread import dash import dash_core_components as dcc import dash_html_components as html from dash.dependencies import Input, Output import itertools from io import BytesIO from PIL import Image import pickle import base64 import numpy as np import sys ################ **Optimizing Utils** ################## import time import sys def timeFigureUpdate(title): def wrapper(func): def timeit(*args): start = time.time() x = func(*args) end = time.time() # print("Elapsed Time: {}".format(end-start),file=sys.stdout) sys.stdout.write("Elapsed Time of {} update_figure_and_data_structure function: {}\n".format(title,end-start)) return x return timeit return wrapper def profile(title): def wrapper(f): def printProfile(*args): lp = LineProfiler() dec_f = lp(f) output_value = dec_f(*args) print("Line Profile for:",title) print("----------------------") lp.print_stats() return output_value return printProfile return wrapper ############################################################## class Database(): def __init__(self): self.client = MongoClient() self.checkConnection() ## Database utilities ## I do not want the user to accidently delete all their data # def removeDataBase(self,folder_name): # self.client.drop_database(folder_name) def removeFolder(self,database_name,folder_name): self.client[database_name][folder_name].drop() def viewDataBase(self,database_name): ''' show all collections in a folder ''' # include include_system_collections=False? for collection in self.client[database_name].list_collection_names(): print(collection) def getAllFolderIteratorsFromDatabase(self,database_name): folder_iterators_list= [] folder_names = self.client[database_name].list_collection_names() for folder_name in folder_names: iterator = self.client[database_name][folder_name].find() folder_iterators_list.append(iterator) return folder_iterators_list def viewFolder(self,database_name,folder_name): ''' show all documents in a collection ''' for doc in self.client[database_name][folder_name].find(): print(doc) def close(self): self.client.close() ## Connection utilties, not meant to be used by user def checkConnection(self): t = Thread(target=self.testInsert) t.start() t.join(2) if t.is_alive(): raise Exception("Cannot connect to MongoDB") def testInsert(self): doc = self.client['test_db']['test_collection'] doc.insert({"Test":1}) doc.remove({"Test":1}) ################ **Misc** ################## from functools import partial def partial_decomaker(partial_name): def decorator(func): partial_func = partial(func,partial_name=partial_name) return partial_func return decorator from inspect import getsource def code(function): print(getsource(function)) ################ **Functions used to load Data in** ################## def getParamDict(database_name,folder_name): mongo = Database() runs = mongo.client[database_name][folder_name] ## all the runs in the folder runs_iterator = runs.find() dict_of_dicts = {} for run_object in runs_iterator: Experimental_Parameters = run_object['Experimental Parameters'] time = Experimental_Parameters['Time'] dict_of_dicts[time] = Experimental_Parameters return dict_of_dicts def getLegendNames(dict_of_param_dicts): list_of_param_names = [] for time,plot_dict in dict_of_param_dicts.items(): list_of_param_names.append(plot_dict.keys()) legend_names = sorted(set(list(itertools.chain(*list_of_param_names)))) return legend_names ## Object Related def getDictOfNameObjects(database_name,folder_name,name,f=None): mongo = Database() runs = mongo.client[database_name][folder_name] ## all the runs in the folder runs_iterator = runs.find() nameObjects_for_each_run = {} # paramObjects_for_each_run = {} for run_object in runs_iterator: Experimental_Parameters = run_object['Experimental Parameters'] time = Experimental_Parameters['Time'] # param_objects_for_each_run[time] = Experimental_Parameters try: one_run_dict = run_object[name] if f: one_run_dict = f(one_run_dict) nameObjects_for_each_run[time] = one_run_dict except KeyError: print("Name does not exist in the run") mongo.close() # return nameObjects_for_each_run, paramObjects_for_each_run return nameObjects_for_each_run def getBase64Encoding(one_run_dict): return {image_name:binaryToBase64(binary_image) for image_name,binary_image in one_run_dict.items()} def binaryToBase64(binary_image): numpy_matrix=pickle.loads(binary_image) img = Image.fromarray(np.uint8(numpy_matrix*255),'L') # base64_string= base64.b64encode(numpy_matrix) buff = BytesIO() img.save(buff, format="JPEG") base64_string = base64.b64encode(buff.getvalue()) buff.close() return str(base64_string)[2:-1] def getFigureNames(nameObjects_for_each_run): list_of_names = [] for time, one_run_dict in nameObjects_for_each_run.items(): list_of_names.append(one_run_dict.keys()) names = sorted(set(list(itertools.chain(*list_of_names)))) return names ############################################################## def createHTMLRowList(self): html_row_list = [] for time in self.ordered_thoughtList_keys: thought_list = self.dict_of_all_thought_lists[time] title_row = createThoughtsTitle(thought_list,time) html_row_list.append(title_row) paragraph_for_each_thought = createThoughts(thought_list) paragraph_row = html.Div(paragraph_for_each_thought,className='row') html_row_list.append(paragraph_row) return html_row_list ## only take 0.1 seconds. So no issue in updating it # @profile("Thoughts") def getDictOfAllThoughtLists(database_name): mongo = Database() folder_iterators_list = mongo.getAllFolderIteratorsFromDatabase(database_name) database_dict = {} for folder_iterator in folder_iterators_list: dict_of_thoughtlists = getDictOfThoughtLists(folder_iterator) database_dict.update(dict_of_thoughtlists) mongo.close() return database_dict ######################### def getDictOfThoughtLists(folder_iterator): dict_of_thoughtlists = {} for run_object in folder_iterator: Experimental_Parameters = run_object['Experimental Parameters'] time = Experimental_Parameters['Time'] try: thought_list = run_object['Thoughts'] ## eliminating the extra self.folder_name logs dict_of_thoughtlists[time]=thought_list except KeyError: print("Run object does not have 'Thoughts' as a key") return dict_of_thoughtlists ######################### def getOrderedKeys(dict_of_thoughtlists): return sorted(dict_of_thoughtlists.keys()) def createThoughts(list_of_thoughts): paragraph_list = [] ## skipping the folder_names for thought in list_of_thoughts[1::2]: paragraph = html.P(thought) paragraph_list.append(paragraph) return paragraph_list def createThoughtsTitle(list_of_thoughts,time): folder_name = list_of_thoughts[0] ## No need for year and seconds title_row = html.Div(html.B(time[5:-3]+': '+folder_name),className='row') return title_row ############################################################## ################ **Functions used During Callbacks** ################## def getSelectedRunsFromDatatable(rows,selected_row_indices): if selected_row_indices==[]: selected_runs= rows else: selected_runs = [rows[i] for i in selected_row_indices] return [run_dict['Time'] for run_dict in selected_runs] if __name__ == '__main__': database = Database() database.client['test_db']['test_collection'].insert_one({"Test":"test"}) database.viewRun('test_db','test_collection') database.removeRun('test_db','test_collection') database.viewRun('test_db','test_collection')
from math import sqrt import numpy as np import pandas as pd import matplotlib.pyplot as plt from statsmodels.tsa.stattools import adfuller def compute_correlation(x,y,r2=False,auto=False): '''Take two array-like series to calculate the correlation x: numpy.array or pandas.DataFrame: x value for correlation y: numpy.array or pandas.DataFrame: y value for correlation r2: Boolean (optional): return r-squared value instead of r''' '''Need to remove the mean for autocorrelation?''' df = pd.DataFrame({'x':x,'y':y}) if auto: df['x'] = df['x'] - df['x'].mean() df['y'] = df['y'] - df['y'].mean() df.dropna(inplace=True) n = len(df) df['x2'] = np.square(df['x']) df['y2'] = np.square(df['y']) df['xy'] = df['x'] * df['y'] sum_x = df['x'].sum() sum_y = df['y'].sum() sum_xy = df['xy'].sum() sum_x2 = df['x2'].sum() sum_y2 = df['y2'].sum() corr = (n*(sum_xy) - (sum_x*sum_y)) / (sqrt(((n*(sum_x2) - (sum_x**2)) *((n*(sum_y2) - (sum_y**2)))))) #corr_test = np.cov(df['x'].values,df['y'].values)[0,1] return df, corr def acf_compute(x,y): if isinstance(x,pd.DataFrame) or isinstance(x,pd.Series): x = x.dropna().values if isinstance(y,pd.DataFrame) or isinstance(y,pd.Series): y = y.dropna().values nx = len(x) ny = len(y) x = x[nx-ny:] top = np.mean(np.dot((x-np.mean(x)), (y-np.mean(y)))) bot = np.sum(np.square((x-np.mean(x)))) acf_r = top/bot return acf_r def autocorrelate(x,shift=1,conf_int=False,lags=None,df=False): if isinstance(x,pd.DataFrame) or isinstance(x,pd.Series): x = x.values n = len(x) if lags is None: lags = n else: lags = lags r_array = np.empty(lags) conf_lower = np.empty(lags) conf_upper = np.empty(lags) for i in range(lags): r_array[i] = acf_compute(x[i:],x[:len(x)-i]) conf_lower[i] = -1.96 / np.sqrt(len(x)-i) conf_upper[i] = 1.96 / np.sqrt(len(x)-i) if df: r_array = pd.DataFrame(data=r_array) if conf_int: return r_array, conf_upper, conf_lower return r_array def plot_auto_corr(x,title=None,lags=None): auto_corr, conf_upper, conf_lower = autocorrelate(x,conf_int=True,lags=lags) plt.plot(auto_corr,linestyle='none',marker='o',color='red') for i, x in enumerate(auto_corr): plt.vlines(x=i,ymin=min(0,x),ymax=max(0,x)) plt.fill_between([i for i in range(len(auto_corr))],conf_lower,conf_upper,color='green',alpha=0.2) if title is None: title = 'Autocorrelation (Lags = {})'.format(len(auto_corr)) else: title = title + ' (Lags = {})'.format(len(auto_corr)) plt.title(title,fontsize=16) plt.show() def test_stationarity(df, print_results=True, **kwargs): '''Use stattools adfuller function with a more DataFrame-friendly format df = pandas.DataFrame or pandas.Series: required, used for testing stationarity **kwargs = dict, used to feed adfuller arguments''' raw_results = adfuller(df,**kwargs) df_rows = {fk: fv for fv, fk in zip(raw_results[:4],list(['Test Statistic','P-Value','Lags Used','Observations Taken']))} df_rows.update({sk: sv for sk, sv in raw_results[4:-1][0].items()}) dickey_test_results = pd.DataFrame(index=df_rows.keys(),data=list(df_rows.values()),columns=['Metric']) if print_results: print('Results of the Augmented Dickey-Fuller Test: \n\n', dickey_test_results.head(10)) return dickey_test_results
/** * Created by juandaniel on 11/5/17. */
import sveltePreprocess from "svelte-preprocess"; import node from "@sveltejs/adapter-node"; import { mdsvex } from "mdsvex"; import autoprefixer from "autoprefixer"; const preprocessors = sveltePreprocess({ scss: { includePaths: ["src"], }, postcss: { plugins: [autoprefixer], }, }); /** @type {import('@sveltejs/kit').Config} */ export default { // Consult https://github.com/sveltejs/svelte-preprocess // for more information about preprocessors preprocess: [mdsvex(), preprocessors], kit: { adapter: node(), // hydrate the <div id="svelte"> element in src/app.html target: "#svelte", }, extensions: [".svelte", ".svx"], };
# Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com> # # This file is part of paramiko. # # Paramiko is free software; you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation; either version 2.1 of the License, or (at your option) # any later version. # # Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with Paramiko; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. """ Implementation of an SSH2 "message". """ import struct from paramiko import util from paramiko.common import zero_byte, max_byte, one_byte, asbytes from paramiko.py3compat import long, BytesIO, u, integer_types class Message (object): """ An SSH2 message is a stream of bytes that encodes some combination of strings, integers, bools, and infinite-precision integers (known in Python as longs). This class builds or breaks down such a byte stream. Normally you don't need to deal with anything this low-level, but it's exposed for people implementing custom extensions, or features that paramiko doesn't support yet. """ big_int = long(0xff000000) def __init__(self, content=None): """ Create a new SSH2 message. :param str content: the byte stream to use as the message content (passed in only when decomposing a message). """ if content is not None: self.packet = BytesIO(content) else: self.packet = BytesIO() def __str__(self): """ Return the byte stream content of this message, as a string/bytes obj. """ return self.asbytes() def __repr__(self): """ Returns a string representation of this object, for debugging. """ return 'paramiko.Message(' + repr(self.packet.getvalue()) + ')' def asbytes(self): """ Return the byte stream content of this Message, as bytes. """ return self.packet.getvalue() def rewind(self): """ Rewind the message to the beginning as if no items had been parsed out of it yet. """ self.packet.seek(0) def get_remainder(self): """ Return the bytes (as a `str`) of this message that haven't already been parsed and returned. """ position = self.packet.tell() remainder = self.packet.read() self.packet.seek(position) return remainder def get_so_far(self): """ Returns the `str` bytes of this message that have been parsed and returned. The string passed into a message's constructor can be regenerated by concatenating ``get_so_far`` and `get_remainder`. """ position = self.packet.tell() self.rewind() return self.packet.read(position) def get_bytes(self, n): """ Return the next ``n`` bytes of the message (as a `str`), without decomposing into an int, decoded string, etc. Just the raw bytes are returned. Returns a string of ``n`` zero bytes if there weren't ``n`` bytes remaining in the message. """ b = self.packet.read(n) max_pad_size = 1 << 20 # Limit padding to 1 MB if len(b) < n < max_pad_size: return b + zero_byte * (n - len(b)) return b def get_byte(self): """ Return the next byte of the message, without decomposing it. This is equivalent to `get_bytes(1) <get_bytes>`. :return: the next (`str`) byte of the message, or ``'\000'`` if there aren't any bytes remaining. """ return self.get_bytes(1) def get_boolean(self): """ Fetch a boolean from the stream. """ b = self.get_bytes(1) return b != zero_byte def get_adaptive_int(self): """ Fetch an int from the stream. :return: a 32-bit unsigned `int`. """ byte = self.get_bytes(1) if byte == max_byte: return util.inflate_long(self.get_binary()) byte += self.get_bytes(3) return struct.unpack('>I', byte)[0] def get_int(self): """ Fetch an int from the stream. """ return struct.unpack('>I', self.get_bytes(4))[0] def get_int64(self): """ Fetch a 64-bit int from the stream. :return: a 64-bit unsigned integer (`long`). """ return struct.unpack('>Q', self.get_bytes(8))[0] def get_mpint(self): """ Fetch a long int (mpint) from the stream. :return: an arbitrary-length integer (`long`). """ return util.inflate_long(self.get_binary()) def get_string(self): """ Fetch a `str` from the stream. This could be a byte string and may contain unprintable characters. (It's not unheard of for a string to contain another byte-stream message.) """ return self.get_bytes(self.get_int()) def get_text(self): """ Fetch a Unicode string from the stream. """ return u(self.get_string()) def get_binary(self): """ Fetch a string from the stream. This could be a byte string and may contain unprintable characters. (It's not unheard of for a string to contain another byte-stream Message.) """ return self.get_bytes(self.get_int()) def get_list(self): """ Fetch a `list` of `strings <str>` from the stream. These are trivially encoded as comma-separated values in a string. """ return self.get_text().split(',') def add_bytes(self, b): """ Write bytes to the stream, without any formatting. :param str b: bytes to add """ self.packet.write(b) return self def add_byte(self, b): """ Write a single byte to the stream, without any formatting. :param str b: byte to add """ self.packet.write(b) return self def add_boolean(self, b): """ Add a boolean value to the stream. :param bool b: boolean value to add """ if b: self.packet.write(one_byte) else: self.packet.write(zero_byte) return self def add_int(self, n): """ Add an integer to the stream. :param int n: integer to add """ self.packet.write(struct.pack('>I', n)) return self def add_adaptive_int(self, n): """ Add an integer to the stream. :param int n: integer to add """ if n >= Message.big_int: self.packet.write(max_byte) self.add_string(util.deflate_long(n)) else: self.packet.write(struct.pack('>I', n)) return self def add_int64(self, n): """ Add a 64-bit int to the stream. :param long n: long int to add """ self.packet.write(struct.pack('>Q', n)) return self def add_mpint(self, z): """ Add a long int to the stream, encoded as an infinite-precision integer. This method only works on positive numbers. :param long z: long int to add """ self.add_string(util.deflate_long(z)) return self def add_string(self, s): """ Add a string to the stream. :param str s: string to add """ s = asbytes(s) self.add_int(len(s)) self.packet.write(s) return self def add_list(self, l): """ Add a list of strings to the stream. They are encoded identically to a single string of values separated by commas. (Yes, really, that's how SSH2 does it.) :param list l: list of strings to add """ self.add_string(','.join(l)) return self def _add(self, i): if type(i) is bool: return self.add_boolean(i) elif isinstance(i, integer_types): return self.add_adaptive_int(i) elif type(i) is list: return self.add_list(i) else: return self.add_string(i) def add(self, *seq): """ Add a sequence of items to the stream. The values are encoded based on their type: str, int, bool, list, or long. .. warning:: Longs are encoded non-deterministically. Don't use this method. :param seq: the sequence of items """ for item in seq: self._add(item)
#!/usr/bin/env python ########################################################################## # # Copyright 2011 Jose Fonseca # All Rights Reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the 'Software'), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # ##########################################################################/ '''Run two retrace instances in parallel, comparing generated snapshots. ''' import math import optparse import os.path import subprocess import platform import sys from PIL import Image from snapdiff import Comparer from highlight import AutoHighlighter import jsondiff # Null file, to use when we're not interested in subprocesses output if platform.system() == 'Windows': NULL = open('NUL:', 'wb') else: NULL = open('/dev/null', 'wb') class RetraceRun: def __init__(self, process): self.process = process def nextSnapshot(self): image, comment = read_pnm(self.process.stdout) if image is None: return None, None callNo = int(comment.strip()) return image, callNo def terminate(self): try: self.process.terminate() except OSError: # Avoid http://bugs.python.org/issue14252 pass class Retracer: def __init__(self, retraceExe, args, env=None): self.retraceExe = retraceExe self.args = args self.env = env def _retrace(self, args, stdout=subprocess.PIPE): cmd = [ self.retraceExe, ] + args + self.args if self.env: for name, value in self.env.iteritems(): sys.stderr.write('%s=%s ' % (name, value)) sys.stderr.write(' '.join(cmd) + '\n') try: return subprocess.Popen(cmd, env=self.env, stdout=stdout, stderr=NULL) except OSError, ex: sys.stderr.write('error: failed to execute %s: %s\n' % (cmd[0], ex.strerror)) sys.exit(1) def retrace(self, args): p = self._retrace([]) p.wait() return p.returncode def snapshot(self, call_nos): process = self._retrace([ '-s', '-', '-S', call_nos, ]) return RetraceRun(process) def dump_state(self, call_no): '''Get the state dump at the specified call no.''' p = self._retrace([ '-D', str(call_no), ]) state = jsondiff.load(p.stdout) p.wait() return state.get('parameters', {}) def diff_state(self, ref_call_no, src_call_no, stream): '''Compare the state between two calls.''' ref_state = self.dump_state(ref_call_no) src_state = self.dump_state(src_call_no) stream.flush() differ = jsondiff.Differ(stream) differ.visit(ref_state, src_state) stream.write('\n') def read_pnm(stream): '''Read a PNM from the stream, and return the image object, and the comment.''' magic = stream.readline() if not magic: return None, None magic = magic.rstrip() if magic == 'P5': channels = 1 bytesPerChannel = 1 mode = 'L' elif magic == 'P6': channels = 3 bytesPerChannel = 1 mode = 'RGB' elif magic == 'Pf': channels = 1 bytesPerChannel = 4 mode = 'R' elif magic == 'PF': channels = 3 bytesPerChannel = 4 mode = 'RGB' elif magic == 'PX': channels = 4 bytesPerChannel = 4 mode = 'RGB' else: raise Exception('Unsupported magic `%s`' % magic) comment = '' line = stream.readline() while line.startswith('#'): comment += line[1:] line = stream.readline() width, height = map(int, line.strip().split()) maximum = int(stream.readline().strip()) if bytesPerChannel == 1: assert maximum == 255 else: assert maximum == 1 data = stream.read(height * width * channels * bytesPerChannel) if bytesPerChannel == 4: # Image magic only supports single channel floating point images, so # represent the image as numpy arrays import numpy pixels = numpy.fromstring(data, dtype=numpy.float32) pixels.resize((height, width, channels)) return pixels, comment image = Image.frombuffer(mode, (width, height), data, 'raw', mode, 0, 1) return image, comment def dumpNumpyImage(output, pixels, filename): height, width, channels = pixels.shape import numpy pixels = (pixels*255).clip(0, 255).astype('uint8') if 0: # XXX: Doesn't work somehow im = Image.fromarray(pixels) else: # http://code.activestate.com/recipes/577591-conversion-of-pil-image-and-numpy-array/ pixels = pixels.reshape(height*width, channels) if channels == 4: mode = 'RGBA' else: if channels < 3: pixels = numpy.c_[arr, 255*numpy.ones((heigth * width, 3 - channels), numpy.uint8)] assert channels == 3 mode = 'RGB' im = Image.frombuffer(mode, (width, height), pixels.tostring(), 'raw', mode, 0, 1) im.save(filename) if 0: # Dump to stdout for y in range(height): output.write(' ') for x in range(width): for c in range(channels): output.write('%0.9g,' % pixels[y, x, c]) output.write(' ') output.write('\n') def parse_env(optparser, entries): '''Translate a list of NAME=VALUE entries into an environment dictionary.''' if not entries: return None env = os.environ.copy() for entry in entries: try: name, var = entry.split('=', 1) except Exception: optparser.error('invalid environment entry %r' % entry) env[name] = var return env def main(): '''Main program. ''' global options # Parse command line options optparser = optparse.OptionParser( usage='\n\t%prog [options] -- [glretrace options] <trace>', version='%%prog') optparser.add_option( '-r', '--retrace', metavar='PROGRAM', type='string', dest='retrace', default='glretrace', help='retrace command [default: %default]') optparser.add_option( '--ref-driver', metavar='DRIVER', type='string', dest='ref_driver', default=None, help='force reference driver') optparser.add_option( '--src-driver', metavar='DRIVER', type='string', dest='src_driver', default=None, help='force source driver') optparser.add_option( '--ref-arg', metavar='OPTION', type='string', action='append', dest='ref_args', default=[], help='pass argument to reference retrace') optparser.add_option( '--src-arg', metavar='OPTION', type='string', action='append', dest='src_args', default=[], help='pass argument to source retrace') optparser.add_option( '--ref-env', metavar='NAME=VALUE', type='string', action='append', dest='ref_env', default=[], help='add variable to reference environment') optparser.add_option( '--src-env', metavar='NAME=VALUE', type='string', action='append', dest='src_env', default=[], help='add variable to source environment') optparser.add_option( '--diff-prefix', metavar='PATH', type='string', dest='diff_prefix', default='.', help='prefix for the difference images') optparser.add_option( '-t', '--threshold', metavar='BITS', type="float", dest="threshold", default=12.0, help="threshold precision [default: %default]") optparser.add_option( '-S', '--snapshot-frequency', metavar='CALLSET', type="string", dest="snapshot_frequency", default='draw', help="calls to compare [default: %default]") optparser.add_option( '--diff-state', action='store_true', dest='diff_state', default=False, help='diff state between failing calls') optparser.add_option( '-o', '--output', metavar='FILE', type="string", dest="output", help="output file [default: stdout]") (options, args) = optparser.parse_args(sys.argv[1:]) ref_env = parse_env(optparser, options.ref_env) src_env = parse_env(optparser, options.src_env) if not args: optparser.error("incorrect number of arguments") if options.ref_driver: options.ref_args.insert(0, '--driver=' + options.ref_driver) if options.src_driver: options.src_args.insert(0, '--driver=' + options.src_driver) refRetracer = Retracer(options.retrace, options.ref_args + args, ref_env) srcRetracer = Retracer(options.retrace, options.src_args + args, src_env) if options.output: output = open(options.output, 'wt') else: output = sys.stdout highligher = AutoHighlighter(output) highligher.write('call\tprecision\n') last_bad = -1 last_good = 0 refRun = refRetracer.snapshot(options.snapshot_frequency) try: srcRun = srcRetracer.snapshot(options.snapshot_frequency) try: while True: # Get the reference image refImage, refCallNo = refRun.nextSnapshot() if refImage is None: break # Get the source image srcImage, srcCallNo = srcRun.nextSnapshot() if srcImage is None: break assert refCallNo == srcCallNo callNo = refCallNo # Compare the two images if isinstance(refImage, Image.Image) and isinstance(srcImage, Image.Image): # Using PIL numpyImages = False comparer = Comparer(refImage, srcImage) precision = comparer.precision() else: # Using numpy (for floating point images) # TODO: drop PIL when numpy path becomes general enough import numpy assert not isinstance(refImage, Image.Image) assert not isinstance(srcImage, Image.Image) numpyImages = True assert refImage.shape == srcImage.shape diffImage = numpy.square(srcImage - refImage) height, width, channels = diffImage.shape square_error = numpy.sum(diffImage) square_error += numpy.finfo(numpy.float32).eps rel_error = square_error / float(height*width*channels) bits = -math.log(rel_error)/math.log(2.0) precision = bits mismatch = precision < options.threshold if mismatch: highligher.color(highligher.red) highligher.bold() highligher.write('%u\t%f\n' % (callNo, precision)) if mismatch: highligher.normal() if mismatch: if options.diff_prefix: prefix = os.path.join(options.diff_prefix, '%010u' % callNo) prefix_dir = os.path.dirname(prefix) if not os.path.isdir(prefix_dir): os.makedirs(prefix_dir) if numpyImages: dumpNumpyImage(output, refImage, prefix + '.ref.png') dumpNumpyImage(output, srcImage, prefix + '.src.png') else: refImage.save(prefix + '.ref.png') srcImage.save(prefix + '.src.png') comparer.write_diff(prefix + '.diff.png') if last_bad < last_good and options.diff_state: srcRetracer.diff_state(last_good, callNo, output) last_bad = callNo else: last_good = callNo highligher.flush() finally: srcRun.terminate() finally: refRun.terminate() if __name__ == '__main__': main()
import React from 'react' import './Carousel.scss' export class Carousel extends React.Component { render() { return ( <div className={this.props.classes}> <div className="items-wraper"> <div className="items" id="items"> <div className="item"> <img className="item-image" alt="description" src="https://cdn-images.farfetch-contents.com/13/00/25/79/13002579_14188766_1000.jpg"/> <h2 className="item-title Polaris-Bold type-M">View the Look</h2> <p className="item-description">6 Pieces</p> </div> <div className="item"> <img className="item-image" alt="description" src="https://cdn-images.farfetch-contents.com/13/00/30/65/13003065_13861162_1000.jpg"/> <h2 className="item-title Polaris-Bold type-M">View the Look</h2> <p className="item-description">6 Pieces</p> </div> <div className="item"> <img className="item-image" alt="description" src="https://cdn-images.farfetch-contents.com/12/96/73/06/12967306_14307926_1000.jpg"/> <h2 className="item-title Polaris-Bold type-M">View the Look</h2> <p className="item-description">6 Pieces</p> </div> <div className="item"> <img className="item-image" alt="description" src="https://cdn-images.farfetch-contents.com/13/00/01/96/13000196_14191716_1000.jpg"/> <h2 className="item-title Polaris-Bold type-M">View the Look</h2> <p className="item-description">6 Pieces</p> </div> <div className="item"> <img className="item-image" alt="description" src="https://cdn-images.farfetch-contents.com/12/97/22/18/12972218_13892278_1000.jpg"/> <h2 className="item-title Polaris-Bold type-M">View the Look</h2> <p className="item-description">6 Pieces</p> </div> <div className="item"> <img className="item-image" alt="description" src="https://cdn-images.farfetch-contents.com/13/04/47/90/13044790_13916984_1000.jpg"/> <h2 className="item-title Polaris-Bold type-M">View the Look</h2> <p className="item-description">6 Pieces</p> </div> <div className="item"> <img className="item-image" alt="description" src="https://cdn-images.farfetch-contents.com/12/66/20/86/12662086_13203346_1000.jpg"/> <h2 className="item-title Polaris-Bold type-M">View the Look</h2> <p className="item-description">6 Pieces</p> </div> </div> </div> </div > ) } } export default Carousel
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import sys sys.path.append("..") import unittest import numpy as np from op_test import OpTest, skip_check_grad_ci import paddle.fluid as fluid from paddle.fluid import compiler, Program, program_guard, core import paddle class TestConcatOp(OpTest): def setUp(self): self.op_type = "concat" self.dtype = self.get_dtype() self.init_test_data() self.inputs = {'X': [('x0', self.x0), ('x1', self.x1), ('x2', self.x2)]} self.attrs = {'axis': self.axis} if self.axis < 0: self.actual_axis = self.axis + len(self.x0.shape) self.actual_axis = self.actual_axis if self.actual_axis > 0 else 0 else: self.actual_axis = self.axis self.outputs = { 'Out': np.concatenate( (self.x0, self.x1, self.x2), axis=self.actual_axis) } def get_dtype(self): return "float64" def test_check_output(self): if paddle.is_compiled_with_xpu(): place = paddle.XPUPlace(0) self.check_output_with_place(place) def test_check_grad(self): if paddle.is_compiled_with_xpu(): place = paddle.XPUPlace(0) self.check_grad_with_place(place, ['x0'], 'Out') self.check_grad_with_place(place, ['x1'], 'Out') self.check_grad_with_place(place, ['x2'], 'Out') def init_test_data(self): self.x0 = np.random.random((5, 1, 4, 5)).astype(self.dtype) self.x1 = np.random.random((5, 2, 4, 5)).astype(self.dtype) self.x2 = np.random.random((5, 3, 4, 5)).astype(self.dtype) self.axis = 1 class TestConcatOp2(TestConcatOp): def init_test_data(self): self.x0 = np.random.random((2, 3, 4, 5)).astype(self.dtype) self.x1 = np.random.random((2, 3, 4, 5)).astype(self.dtype) self.x2 = np.random.random((2, 3, 4, 5)).astype(self.dtype) self.axis = 1 @skip_check_grad_ci( reason="The function 'check_grad' for large inputs is too slow.") class TestConcatOp3(TestConcatOp): def init_test_data(self): self.x0 = np.random.random((1, 256, 170, 256)).astype(self.dtype) self.x1 = np.random.random((1, 128, 170, 256)).astype(self.dtype) self.x2 = np.random.random((1, 128, 170, 256)).astype(self.dtype) self.axis = 1 def test_check_grad(self): pass @skip_check_grad_ci( reason="This test will meet fetch error when there is a null grad. The detailed information is in PR#17015." ) class TestConcatOp4(TestConcatOp): def init_test_data(self): self.x0 = np.random.random((2, 3, 4, 5)).astype(self.dtype) self.x1 = np.random.random((2, 3, 4, 5)).astype(self.dtype) self.x2 = np.random.random((0, 3, 4, 5)).astype(self.dtype) self.axis = 0 def test_check_grad(self): pass class TestConcatOp5(TestConcatOp): def init_test_data(self): self.x0 = np.random.random((5, 1, 4, 5)).astype(self.dtype) self.x1 = np.random.random((5, 2, 4, 5)).astype(self.dtype) self.x2 = np.random.random((5, 3, 4, 5)).astype(self.dtype) self.axis = -3 class TestConcatOp6(TestConcatOp): def setUp(self): self.op_type = "concat" self.dtype = self.get_dtype() self.init_test_data() self.lod = [[20, 80]] self.out_lod = [[20, 80, 20, 80, 20, 80]] self.inputs = { 'X': [('x0', (self.x0, self.lod)), ('x1', (self.x1, self.lod)), ('x2', (self.x2, self.lod))] } self.attrs = {'axis': self.axis} if self.axis < 0: self.actual_axis = self.axis + len(self.x0.shape) self.actual_axis = self.actual_axis if self.actual_axis > 0 else 0 else: self.actual_axis = self.axis out = np.concatenate((self.x0, self.x1, self.x2), axis=self.actual_axis) self.outputs = {'Out': (out, self.out_lod)} def test_check_output(self): if paddle.is_compiled_with_xpu(): place = paddle.XPUPlace(0) self.check_output_with_place(place) def test_check_grad(self): if paddle.is_compiled_with_xpu(): place = paddle.XPUPlace(0) self.check_grad_with_place(place, ['x0'], 'Out') self.check_grad_with_place(place, ['x1'], 'Out') self.check_grad_with_place(place, ['x2'], 'Out') def init_test_data(self): self.x0 = np.random.random([100]).astype(self.dtype) self.x1 = np.random.random([100]).astype(self.dtype) self.x2 = np.random.random([100]).astype(self.dtype) self.axis = 0 class TestConcatOpError(unittest.TestCase): def test_errors(self): with program_guard(Program(), Program()): # The input type of concat_op should be list. x1 = fluid.layers.data(shape=[4], dtype='int32', name='x1') fluid.layers.concat(x1) # The item in input must be Variable. x2 = fluid.create_lod_tensor( np.array([[-1]]), [[1]], fluid.CPUPlace()) x3 = fluid.create_lod_tensor( np.array([[-1]]), [[1]], fluid.CPUPlace()) self.assertRaises(TypeError, fluid.layers.concat, [x2]) # The input dtype of concat_op must be float16, float32, float64, int32, int64. x4 = fluid.layers.data(shape=[4], dtype='uint8', name='x4') x5 = fluid.layers.data(shape=[4], dtype='uint8', name='x5') self.assertRaises(TypeError, fluid.layers.concat, [x4, x5]) x6 = fluid.layers.data(shape=[4], dtype='float16', name='x6') x7 = fluid.layers.data(shape=[4], dtype='float16', name='x7') x8 = fluid.layers.data(shape=[4], dtype='float32', name='x8') fluid.layers.concat([x6, x7]) # The type of axis in concat_op should be int or Variable. def test_axis_type(): fluid.layers.concat([x6, x7], 3.2) self.assertRaises(TypeError, test_axis_type) def test_input_same_dtype(): fluid.layers.concat([x7, x8]) self.assertRaises(TypeError, test_input_same_dtype) class TestConcatAPI(unittest.TestCase): def test_fluid_api(self): x_1 = fluid.data(shape=[None, 1, 4, 5], dtype='float32', name='x_1') fluid.layers.concat([x_1, x_1], 0) input_2 = np.random.random([2, 1, 4, 5]).astype("float32") input_3 = np.random.random([2, 2, 4, 5]).astype("float32") x_2 = fluid.data(shape=[2, 1, 4, 5], dtype='float32', name='x_2') x_3 = fluid.data(shape=[2, 2, 4, 5], dtype='float32', name='x_3') positive_1_int32 = fluid.layers.fill_constant([1], "float32", 1) positive_1_int64 = fluid.layers.fill_constant([1], "float32", 1) out_1 = fluid.layers.concat(input=[x_2, x_3], axis=1) out_2 = fluid.layers.concat(input=[x_2, x_3], axis=1) out_3 = fluid.layers.concat(input=[x_2, x_3], axis=1) exe = fluid.Executor(place=fluid.XPUPlace(0)) [res_1, res_2, res_3] = exe.run( fluid.default_main_program(), feed={"x_1": input_2, "x_2": input_2, "x_3": input_3}, fetch_list=[out_1, out_2, out_3]) assert np.array_equal(res_1, np.concatenate((input_2, input_3), axis=1)) assert np.array_equal(res_2, np.concatenate((input_2, input_3), axis=1)) assert np.array_equal(res_3, np.concatenate((input_2, input_3), axis=1)) def test_errors(self): with program_guard(Program(), Program()): # The item in input must be Variable. x2 = fluid.create_lod_tensor( np.array([[-1]]), [[1]], fluid.XPUPlace(0)) x3 = fluid.create_lod_tensor( np.array([[-1]]), [[1]], fluid.XPUPlace(0)) self.assertRaises(TypeError, paddle.concat, [x2]) # The input dtype of concat_op must be float32. x4 = fluid.data(shape=[4], dtype='uint8', name='x4') x5 = fluid.data(shape=[4], dtype='uint8', name='x5') self.assertRaises(TypeError, fluid.layers.concat, [x4, x5]) # The type of axis in concat_op should be int or Variable. x6 = fluid.layers.data(shape=[4], dtype='float16', name='x6') x7 = fluid.layers.data(shape=[4], dtype='float16', name='x7') x8 = fluid.layers.data(shape=[4], dtype='float32', name='x8') def test_axis_type(): paddle.concat([x6, x7], 3.2) self.assertRaises(TypeError, test_axis_type) def test_input_same_dtype(): paddle.concat([x7, x8]) self.assertRaises(TypeError, test_input_same_dtype) if __name__ == '__main__': paddle.enable_static() unittest.main()
#!/usr/bin/python # # Create Webfaction website using Ansible and the Webfaction API # # ------------------------------------------ # # (c) Quentin Stafford-Fraser 2015 # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # DOCUMENTATION = ''' --- module: webfaction_site short_description: Add or remove a website on a Webfaction host description: - Add or remove a website on a Webfaction host. Further documentation at http://github.com/quentinsf/ansible-webfaction. author: Quentin Stafford-Fraser (@quentinsf) version_added: "2.0" notes: - Sadly, you I(do) need to know your webfaction hostname for the C(host) parameter. But at least, unlike the API, you don't need to know the IP address - you can use a DNS name. - If a site of the same name exists in the account but on a different host, the operation will exit. - "You can run playbooks that use this on a local machine, or on a Webfaction host, or elsewhere, since the scripts use the remote webfaction API - the location is not important. However, running them on multiple hosts I(simultaneously) is best avoided. If you don't specify I(localhost) as your host, you may want to add C(serial: 1) to the plays." - See `the webfaction API <http://docs.webfaction.com/xmlrpc-api/>`_ for more info. options: name: description: - The name of the website required: true state: description: - Whether the website should exist required: false choices: ['present', 'absent'] default: "present" host: description: - The webfaction host on which the site should be created. required: true https: description: - Whether or not to use HTTPS required: false choices: - true - false default: 'false' site_apps: description: - A mapping of URLs to apps required: false subdomains: description: - A list of subdomains associated with this site. required: false default: null login_name: description: - The webfaction account to use required: true login_password: description: - The webfaction password to use required: true ''' EXAMPLES = ''' - name: create website webfaction_site: name: testsite1 state: present host: myhost.webfaction.com subdomains: - 'testsite1.my_domain.org' site_apps: - ['testapp1', '/'] https: no login_name: "{{webfaction_user}}" login_password: "{{webfaction_passwd}}" ''' import socket import xmlrpclib webfaction = xmlrpclib.ServerProxy('https://api.webfaction.com/') def main(): module = AnsibleModule( argument_spec = dict( name = dict(required=True), state = dict(required=False, choices=['present', 'absent'], default='present'), # You can specify an IP address or hostname. host = dict(required=True), https = dict(required=False, type='bool', default=False), subdomains = dict(required=False, type='list', default=[]), site_apps = dict(required=False, type='list', default=[]), login_name = dict(required=True), login_password = dict(required=True), ), supports_check_mode=True ) site_name = module.params['name'] site_state = module.params['state'] site_host = module.params['host'] site_ip = socket.gethostbyname(site_host) session_id, account = webfaction.login( module.params['login_name'], module.params['login_password'] ) site_list = webfaction.list_websites(session_id) site_map = dict([(i['name'], i) for i in site_list]) existing_site = site_map.get(site_name) result = {} # Here's where the real stuff happens if site_state == 'present': # Does a site with this name already exist? if existing_site: # If yes, but it's on a different IP address, then fail. # If we wanted to allow relocation, we could add a 'relocate=true' option # which would get the existing IP address, delete the site there, and create it # at the new address. A bit dangerous, perhaps, so for now we'll require manual # deletion if it's on another host. if existing_site['ip'] != site_ip: module.fail_json(msg="Website already exists with a different IP address. Please fix by hand.") # If it's on this host and the key parameters are the same, nothing needs to be done. if (existing_site['https'] == module.boolean(module.params['https'])) and \ (set(existing_site['subdomains']) == set(module.params['subdomains'])) and \ (dict(existing_site['website_apps']) == dict(module.params['site_apps'])): module.exit_json( changed = False ) positional_args = [ session_id, site_name, site_ip, module.boolean(module.params['https']), module.params['subdomains'], ] for a in module.params['site_apps']: positional_args.append( (a[0], a[1]) ) if not module.check_mode: # If this isn't a dry run, create or modify the site result.update( webfaction.create_website( *positional_args ) if not existing_site else webfaction.update_website ( *positional_args ) ) elif site_state == 'absent': # If the site's already not there, nothing changed. if not existing_site: module.exit_json( changed = False, ) if not module.check_mode: # If this isn't a dry run, delete the site result.update( webfaction.delete_website(session_id, site_name, site_ip) ) else: module.fail_json(msg="Unknown state specified: {}".format(site_state)) module.exit_json( changed = True, result = result ) from ansible.module_utils.basic import * main()
/* Tests clFinish Copyright (c) 2013 Ville Korhonen / Tampere University of Technology Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include <CL/cl.h> #include <string.h> #include <stdio.h> #include <stdlib.h> #include "poclu.h" char kernelASourceCode[] = "kernel \n" "void test_kernel(constant char* input) {\n" " printf(\"%s\", input);\n" "}\n"; int main() { size_t global_work_size[1] = { 1 }, local_work_size[1]= { 1 }; cl_int err; cl_platform_id platforms[1]; cl_uint nplatforms; cl_device_id devices[1]; // + 1 for duplicate test cl_uint num_devices; cl_program program = NULL; cl_kernel kernelA = NULL; cl_kernel kernelB = NULL; cl_kernel kernelC= NULL; char inputA[] = "A"; char inputB[] = "B"; char inputC[] = "C"; cl_mem inputBufferA = NULL; cl_mem inputBufferB = NULL; cl_mem inputBufferC = NULL; /* command queues */ cl_command_queue queueA = NULL; cl_command_queue queueB = NULL; cl_command_queue queueC = NULL; /* events */ cl_event eventA1 = NULL; cl_event eventB2 = NULL; cl_event eventA3 = NULL; cl_event eventB4 = NULL; /* event wait lists */ cl_event B2_wait_list[1]; cl_event A3_wait_list[1]; cl_event B4_wait_list[1]; cl_event C5_wait_list[2]; err = clGetPlatformIDs(1, platforms, &nplatforms); CHECK_OPENCL_ERROR_IN("clGetPlatformIDs"); if (!nplatforms) return EXIT_FAILURE; err = clGetDeviceIDs(platforms[0], CL_DEVICE_TYPE_ALL, 1, devices, &num_devices); CHECK_OPENCL_ERROR_IN("clGetDeviceIDs"); cl_context context = clCreateContext(NULL, num_devices, devices, NULL, NULL, &err); CHECK_OPENCL_ERROR_IN("clCreateContext"); err = clGetContextInfo(context, CL_CONTEXT_DEVICES, sizeof(cl_device_id), devices, NULL); CHECK_OPENCL_ERROR_IN("clGetContextInfo"); queueA = clCreateCommandQueue(context, devices[0], 0, &err); CHECK_OPENCL_ERROR_IN("clCreateCommandQueue"); TEST_ASSERT(queueA); queueB = clCreateCommandQueue(context, devices[0], 0, &err); CHECK_OPENCL_ERROR_IN("clCreateCommandQueue"); TEST_ASSERT(queueB); queueC = clCreateCommandQueue(context, devices[0], 0, &err); CHECK_OPENCL_ERROR_IN("clCreateCommandQueue"); TEST_ASSERT(queueB); inputBufferA = clCreateBuffer(context, CL_MEM_READ_ONLY | CL_MEM_COPY_HOST_PTR, strlen (inputB)+1, (void *) inputA, &err); CHECK_OPENCL_ERROR_IN("clCreateBuffer"); TEST_ASSERT(inputBufferA); inputBufferB = clCreateBuffer(context, CL_MEM_READ_ONLY | CL_MEM_COPY_HOST_PTR, strlen (inputA)+1, (void *) inputB, &err); CHECK_OPENCL_ERROR_IN("clCreateBuffer"); TEST_ASSERT(inputBufferB); inputBufferC = clCreateBuffer(context, CL_MEM_READ_ONLY | CL_MEM_COPY_HOST_PTR, strlen (inputA)+1, (void *) inputC, &err); CHECK_OPENCL_ERROR_IN("clCreateBuffer"); TEST_ASSERT(inputBufferC); size_t kernel_size = strlen (kernelASourceCode); char* kernel_buffer = kernelASourceCode; program = clCreateProgramWithSource (context, 1, (const char**)&kernel_buffer, &kernel_size, &err); CHECK_OPENCL_ERROR_IN("clCreateProgramWithSource"); err = clBuildProgram (program, num_devices, devices, NULL, NULL, NULL); CHECK_OPENCL_ERROR_IN("clBuildProgram"); kernelA = clCreateKernel (program, "test_kernel", NULL); CHECK_OPENCL_ERROR_IN("clCreateKernel"); TEST_ASSERT(kernelA); kernelB = clCreateKernel (program, "test_kernel", NULL); CHECK_OPENCL_ERROR_IN("clCreateKernel"); TEST_ASSERT(kernelB); kernelC = clCreateKernel (program, "test_kernel", NULL); CHECK_OPENCL_ERROR_IN("clCreateKernel"); TEST_ASSERT(kernelC); err = clSetKernelArg (kernelA, 0, sizeof (cl_mem), &inputBufferA); CHECK_OPENCL_ERROR_IN("clSetKernelArg"); err = clSetKernelArg (kernelB, 0, sizeof (cl_mem), &inputBufferB); CHECK_OPENCL_ERROR_IN("clSetKernelArg"); err = clSetKernelArg (kernelC, 0, sizeof (cl_mem), &inputBufferC); CHECK_OPENCL_ERROR_IN("clSetKernelArg"); /* first enqueue A1*/ err = clEnqueueNDRangeKernel (queueA, kernelA, 1, NULL, global_work_size, local_work_size, 0, NULL, &eventA1); CHECK_OPENCL_ERROR_IN("clEnqueueNDRangeKernel"); /* enqueue B2 */ B2_wait_list[0] = eventA1; err = clEnqueueNDRangeKernel (queueB, kernelB, 1, NULL, global_work_size, local_work_size, 1, B2_wait_list, &eventB2); CHECK_OPENCL_ERROR_IN("clEnqueueNDRangeKernel"); /* enqueue A3 */ A3_wait_list[0] = eventB2; err = clEnqueueNDRangeKernel (queueA, kernelA, 1, NULL, global_work_size, local_work_size, 1, A3_wait_list, &eventA3); CHECK_OPENCL_ERROR_IN("clEnqueueNDRangeKernel"); /* enqueue B4 */ B4_wait_list[0] = eventA3; err = clEnqueueNDRangeKernel (queueB, kernelB, 1, NULL, global_work_size, local_work_size, 1, B4_wait_list, &eventB4); CHECK_OPENCL_ERROR_IN("clEnqueueNDRangeKernel"); /* enqueue C5 */ C5_wait_list[0] = eventA3; C5_wait_list[1] = eventB4; err = clEnqueueNDRangeKernel (queueC, kernelC, 1, NULL, global_work_size, local_work_size, 2, C5_wait_list, NULL); CHECK_OPENCL_ERROR_IN("clEnqueueNDRangeKernel"); clFinish(queueC); printf("\n"); return EXIT_SUCCESS; }
""" The arraypad module contains a group of functions to pad values onto the edges of an n-dimensional array. """ import numpy as np from numpy.core.overrides import array_function_dispatch from numpy.lib.index_tricks import ndindex __all__ = ['pad'] ############################################################################### # Private utility functions. def _round_if_needed(arr, dtype): """ Rounds arr inplace if destination dtype is integer. Parameters ---------- arr : ndarray Input array. dtype : dtype The dtype of the destination array. """ if np.issubdtype(dtype, np.integer): arr.round(out=arr) def _slice_at_axis(sl, axis): """ Construct tuple of slices to slice an array in the given dimension. Parameters ---------- sl : slice The slice for the given dimension. axis : int The axis to which `sl` is applied. All other dimensions are left "unsliced". Returns ------- sl : tuple of slices A tuple with slices matching `shape` in length. Examples -------- >>> _slice_at_axis(slice(None, 3, -1), 1) (slice(None, None, None), slice(None, 3, -1), (...,)) """ return (slice(None),) * axis + (sl,) + (...,) def _view_roi(array, original_area_slice, axis): """ Get a view of the current region of interest during iterative padding. When padding multiple dimensions iteratively corner values are unnecessarily overwritten multiple times. This function reduces the working area for the first dimensions so that corners are excluded. Parameters ---------- array : ndarray The array with the region of interest. original_area_slice : tuple of slices Denotes the area with original values of the unpadded array. axis : int The currently padded dimension assuming that `axis` is padded before `axis` + 1. Returns ------- roi : ndarray The region of interest of the original `array`. """ axis += 1 sl = (slice(None),) * axis + original_area_slice[axis:] return array[sl] def _pad_simple(array, pad_width, fill_value=None): """ Pad array on all sides with either a single value or undefined values. Parameters ---------- array : ndarray Array to grow. pad_width : sequence of tuple[int, int] Pad width on both sides for each dimension in `arr`. fill_value : scalar, optional If provided the padded area is filled with this value, otherwise the pad area left undefined. Returns ------- padded : ndarray The padded array with the same dtype as`array`. Its order will default to C-style if `array` is not F-contiguous. original_area_slice : tuple A tuple of slices pointing to the area of the original array. """ # Allocate grown array new_shape = tuple( left + size + right for size, (left, right) in zip(array.shape, pad_width) ) order = 'F' if array.flags.fnc else 'C' # Fortran and not also C-order padded = np.empty(new_shape, dtype=array.dtype, order=order) if fill_value is not None: padded.fill(fill_value) # Copy old array into correct space original_area_slice = tuple( slice(left, left + size) for size, (left, right) in zip(array.shape, pad_width) ) padded[original_area_slice] = array return padded, original_area_slice def _set_pad_area(padded, axis, width_pair, value_pair): """ Set empty-padded area in given dimension. Parameters ---------- padded : ndarray Array with the pad area which is modified inplace. axis : int Dimension with the pad area to set. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. value_pair : tuple of scalars or ndarrays Values inserted into the pad area on each side. It must match or be broadcastable to the shape of `arr`. """ left_slice = _slice_at_axis(slice(None, width_pair[0]), axis) padded[left_slice] = value_pair[0] right_slice = _slice_at_axis( slice(padded.shape[axis] - width_pair[1], None), axis) padded[right_slice] = value_pair[1] def _get_edges(padded, axis, width_pair): """ Retrieve edge values from empty-padded array in given dimension. Parameters ---------- padded : ndarray Empty-padded array. axis : int Dimension in which the edges are considered. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. Returns ------- left_edge, right_edge : ndarray Edge values of the valid area in `padded` in the given dimension. Its shape will always match `padded` except for the dimension given by `axis` which will have a length of 1. """ left_index = width_pair[0] left_slice = _slice_at_axis(slice(left_index, left_index + 1), axis) left_edge = padded[left_slice] right_index = padded.shape[axis] - width_pair[1] right_slice = _slice_at_axis(slice(right_index - 1, right_index), axis) right_edge = padded[right_slice] return left_edge, right_edge def _get_linear_ramps(padded, axis, width_pair, end_value_pair): """ Construct linear ramps for empty-padded array in given dimension. Parameters ---------- padded : ndarray Empty-padded array. axis : int Dimension in which the ramps are constructed. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. end_value_pair : (scalar, scalar) End values for the linear ramps which form the edge of the fully padded array. These values are included in the linear ramps. Returns ------- left_ramp, right_ramp : ndarray Linear ramps to set on both sides of `padded`. """ edge_pair = _get_edges(padded, axis, width_pair) left_ramp = np.linspace( start=end_value_pair[0], stop=edge_pair[0].squeeze(axis), # Dimensions is replaced by linspace num=width_pair[0], endpoint=False, dtype=padded.dtype, axis=axis, ) right_ramp = np.linspace( start=end_value_pair[1], stop=edge_pair[1].squeeze(axis), # Dimension is replaced by linspace num=width_pair[1], endpoint=False, dtype=padded.dtype, axis=axis, ) # Reverse linear space in appropriate dimension right_ramp = right_ramp[_slice_at_axis(slice(None, None, -1), axis)] return left_ramp, right_ramp def _get_stats(padded, axis, width_pair, length_pair, stat_func): """ Calculate statistic for the empty-padded array in given dimension. Parameters ---------- padded : ndarray Empty-padded array. axis : int Dimension in which the statistic is calculated. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. length_pair : 2-element sequence of None or int Gives the number of values in valid area from each side that is taken into account when calculating the statistic. If None the entire valid area in `padded` is considered. stat_func : function Function to compute statistic. The expected signature is ``stat_func(x: ndarray, axis: int, keepdims: bool) -> ndarray``. Returns ------- left_stat, right_stat : ndarray Calculated statistic for both sides of `padded`. """ # Calculate indices of the edges of the area with original values left_index = width_pair[0] right_index = padded.shape[axis] - width_pair[1] # as well as its length max_length = right_index - left_index # Limit stat_lengths to max_length left_length, right_length = length_pair if left_length is None or max_length < left_length: left_length = max_length if right_length is None or max_length < right_length: right_length = max_length if (left_length == 0 or right_length == 0) \ and stat_func in {np.amax, np.amin}: # amax and amin can't operate on an empty array, # raise a more descriptive warning here instead of the default one raise ValueError("stat_length of 0 yields no value for padding") # Calculate statistic for the left side left_slice = _slice_at_axis( slice(left_index, left_index + left_length), axis) left_chunk = padded[left_slice] left_stat = stat_func(left_chunk, axis=axis, keepdims=True) _round_if_needed(left_stat, padded.dtype) if left_length == right_length == max_length: # return early as right_stat must be identical to left_stat return left_stat, left_stat # Calculate statistic for the right side right_slice = _slice_at_axis( slice(right_index - right_length, right_index), axis) right_chunk = padded[right_slice] right_stat = stat_func(right_chunk, axis=axis, keepdims=True) _round_if_needed(right_stat, padded.dtype) return left_stat, right_stat def _set_reflect_both(padded, axis, width_pair, method, include_edge=False): """ Pad `axis` of `arr` with reflection. Parameters ---------- padded : ndarray Input array of arbitrary shape. axis : int Axis along which to pad `arr`. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. method : str Controls method of reflection; options are 'even' or 'odd'. include_edge : bool If true, edge value is included in reflection, otherwise the edge value forms the symmetric axis to the reflection. Returns ------- pad_amt : tuple of ints, length 2 New index positions of padding to do along the `axis`. If these are both 0, padding is done in this dimension. """ left_pad, right_pad = width_pair old_length = padded.shape[axis] - right_pad - left_pad if include_edge: # Edge is included, we need to offset the pad amount by 1 edge_offset = 1 else: edge_offset = 0 # Edge is not included, no need to offset pad amount old_length -= 1 # but must be omitted from the chunk if left_pad > 0: # Pad with reflected values on left side: # First limit chunk size which can't be larger than pad area chunk_length = min(old_length, left_pad) # Slice right to left, stop on or next to edge, start relative to stop stop = left_pad - edge_offset start = stop + chunk_length left_slice = _slice_at_axis(slice(start, stop, -1), axis) left_chunk = padded[left_slice] if method == "odd": # Negate chunk and align with edge edge_slice = _slice_at_axis(slice(left_pad, left_pad + 1), axis) left_chunk = 2 * padded[edge_slice] - left_chunk # Insert chunk into padded area start = left_pad - chunk_length stop = left_pad pad_area = _slice_at_axis(slice(start, stop), axis) padded[pad_area] = left_chunk # Adjust pointer to left edge for next iteration left_pad -= chunk_length if right_pad > 0: # Pad with reflected values on right side: # First limit chunk size which can't be larger than pad area chunk_length = min(old_length, right_pad) # Slice right to left, start on or next to edge, stop relative to start start = -right_pad + edge_offset - 2 stop = start - chunk_length right_slice = _slice_at_axis(slice(start, stop, -1), axis) right_chunk = padded[right_slice] if method == "odd": # Negate chunk and align with edge edge_slice = _slice_at_axis( slice(-right_pad - 1, -right_pad), axis) right_chunk = 2 * padded[edge_slice] - right_chunk # Insert chunk into padded area start = padded.shape[axis] - right_pad stop = start + chunk_length pad_area = _slice_at_axis(slice(start, stop), axis) padded[pad_area] = right_chunk # Adjust pointer to right edge for next iteration right_pad -= chunk_length return left_pad, right_pad def _set_wrap_both(padded, axis, width_pair): """ Pad `axis` of `arr` with wrapped values. Parameters ---------- padded : ndarray Input array of arbitrary shape. axis : int Axis along which to pad `arr`. width_pair : (int, int) Pair of widths that mark the pad area on both sides in the given dimension. Returns ------- pad_amt : tuple of ints, length 2 New index positions of padding to do along the `axis`. If these are both 0, padding is done in this dimension. """ left_pad, right_pad = width_pair period = padded.shape[axis] - right_pad - left_pad # If the current dimension of `arr` doesn't contain enough valid values # (not part of the undefined pad area) we need to pad multiple times. # Each time the pad area shrinks on both sides which is communicated with # these variables. new_left_pad = 0 new_right_pad = 0 if left_pad > 0: # Pad with wrapped values on left side # First slice chunk from right side of the non-pad area. # Use min(period, left_pad) to ensure that chunk is not larger than # pad area right_slice = _slice_at_axis( slice(-right_pad - min(period, left_pad), -right_pad if right_pad != 0 else None), axis ) right_chunk = padded[right_slice] if left_pad > period: # Chunk is smaller than pad area pad_area = _slice_at_axis(slice(left_pad - period, left_pad), axis) new_left_pad = left_pad - period else: # Chunk matches pad area pad_area = _slice_at_axis(slice(None, left_pad), axis) padded[pad_area] = right_chunk if right_pad > 0: # Pad with wrapped values on right side # First slice chunk from left side of the non-pad area. # Use min(period, right_pad) to ensure that chunk is not larger than # pad area left_slice = _slice_at_axis( slice(left_pad, left_pad + min(period, right_pad),), axis) left_chunk = padded[left_slice] if right_pad > period: # Chunk is smaller than pad area pad_area = _slice_at_axis( slice(-right_pad, -right_pad + period), axis) new_right_pad = right_pad - period else: # Chunk matches pad area pad_area = _slice_at_axis(slice(-right_pad, None), axis) padded[pad_area] = left_chunk return new_left_pad, new_right_pad def _as_pairs(x, ndim, as_index=False): """ Broadcast `x` to an array with the shape (`ndim`, 2). A helper function for `pad` that prepares and validates arguments like `pad_width` for iteration in pairs. Parameters ---------- x : {None, scalar, array-like} The object to broadcast to the shape (`ndim`, 2). ndim : int Number of pairs the broadcasted `x` will have. as_index : bool, optional If `x` is not None, try to round each element of `x` to an integer (dtype `np.intp`) and ensure every element is positive. Returns ------- pairs : nested iterables, shape (`ndim`, 2) The broadcasted version of `x`. Raises ------ ValueError If `as_index` is True and `x` contains negative elements. Or if `x` is not broadcastable to the shape (`ndim`, 2). """ if x is None: # Pass through None as a special case, otherwise np.round(x) fails # with an AttributeError return ((None, None),) * ndim x = np.array(x) if as_index: x = np.round(x).astype(np.intp, copy=False) if x.ndim < 3: # Optimization: Possibly use faster paths for cases where `x` has # only 1 or 2 elements. `np.broadcast_to` could handle these as well # but is currently slower if x.size == 1: # x was supplied as a single value x = x.ravel() # Ensure x[0] works for x.ndim == 0, 1, 2 if as_index and x < 0: raise ValueError("index can't contain negative values") return ((x[0], x[0]),) * ndim if x.size == 2 and x.shape != (2, 1): # x was supplied with a single value for each side # but except case when each dimension has a single value # which should be broadcasted to a pair, # e.g. [[1], [2]] -> [[1, 1], [2, 2]] not [[1, 2], [1, 2]] x = x.ravel() # Ensure x[0], x[1] works if as_index and (x[0] < 0 or x[1] < 0): raise ValueError("index can't contain negative values") return ((x[0], x[1]),) * ndim if as_index and x.min() < 0: raise ValueError("index can't contain negative values") # Converting the array with `tolist` seems to improve performance # when iterating and indexing the result (see usage in `pad`) return np.broadcast_to(x, (ndim, 2)).tolist() def _pad_dispatcher(array, pad_width, mode=None, **kwargs): return (array,) ############################################################################### # Public functions @array_function_dispatch(_pad_dispatcher, module='numpy') def pad(array, pad_width, mode='constant', **kwargs): """ Pad an array. Parameters ---------- array : array_like of rank N The array to pad. pad_width : {sequence, array_like, int} Number of values padded to the edges of each axis. ((before_1, after_1), ... (before_N, after_N)) unique pad widths for each axis. ((before, after),) yields same before and after pad for each axis. (pad,) or int is a shortcut for before = after = pad width for all axes. mode : str or function, optional One of the following string values or a user supplied function. 'constant' (default) Pads with a constant value. 'edge' Pads with the edge values of array. 'linear_ramp' Pads with the linear ramp between end_value and the array edge value. 'maximum' Pads with the maximum value of all or part of the vector along each axis. 'mean' Pads with the mean value of all or part of the vector along each axis. 'median' Pads with the median value of all or part of the vector along each axis. 'minimum' Pads with the minimum value of all or part of the vector along each axis. 'reflect' Pads with the reflection of the vector mirrored on the first and last values of the vector along each axis. 'symmetric' Pads with the reflection of the vector mirrored along the edge of the array. 'wrap' Pads with the wrap of the vector along the axis. The first values are used to pad the end and the end values are used to pad the beginning. 'empty' Pads with undefined values. .. versionadded:: 1.17 <function> Padding function, see Notes. stat_length : sequence or int, optional Used in 'maximum', 'mean', 'median', and 'minimum'. Number of values at edge of each axis used to calculate the statistic value. ((before_1, after_1), ... (before_N, after_N)) unique statistic lengths for each axis. ((before, after),) yields same before and after statistic lengths for each axis. (stat_length,) or int is a shortcut for before = after = statistic length for all axes. Default is ``None``, to use the entire axis. constant_values : sequence or scalar, optional Used in 'constant'. The values to set the padded values for each axis. ``((before_1, after_1), ... (before_N, after_N))`` unique pad constants for each axis. ``((before, after),)`` yields same before and after constants for each axis. ``(constant,)`` or ``constant`` is a shortcut for ``before = after = constant`` for all axes. Default is 0. end_values : sequence or scalar, optional Used in 'linear_ramp'. The values used for the ending value of the linear_ramp and that will form the edge of the padded array. ``((before_1, after_1), ... (before_N, after_N))`` unique end values for each axis. ``((before, after),)`` yields same before and after end values for each axis. ``(constant,)`` or ``constant`` is a shortcut for ``before = after = constant`` for all axes. Default is 0. reflect_type : {'even', 'odd'}, optional Used in 'reflect', and 'symmetric'. The 'even' style is the default with an unaltered reflection around the edge value. For the 'odd' style, the extended part of the array is created by subtracting the reflected values from two times the edge value. Returns ------- pad : ndarray Padded array of rank equal to `array` with shape increased according to `pad_width`. Notes ----- .. versionadded:: 1.7.0 For an array with rank greater than 1, some of the padding of later axes is calculated from padding of previous axes. This is easiest to think about with a rank 2 array where the corners of the padded array are calculated by using padded values from the first axis. The padding function, if used, should modify a rank 1 array in-place. It has the following signature:: padding_func(vector, iaxis_pad_width, iaxis, kwargs) where vector : ndarray A rank 1 array already padded with zeros. Padded values are vector[:iaxis_pad_width[0]] and vector[-iaxis_pad_width[1]:]. iaxis_pad_width : tuple A 2-tuple of ints, iaxis_pad_width[0] represents the number of values padded at the beginning of vector where iaxis_pad_width[1] represents the number of values padded at the end of vector. iaxis : int The axis currently being calculated. kwargs : dict Any keyword arguments the function requires. Examples -------- >>> a = [1, 2, 3, 4, 5] >>> np.pad(a, (2, 3), 'constant', constant_values=(4, 6)) array([4, 4, 1, ..., 6, 6, 6]) >>> np.pad(a, (2, 3), 'edge') array([1, 1, 1, ..., 5, 5, 5]) >>> np.pad(a, (2, 3), 'linear_ramp', end_values=(5, -4)) array([ 5, 3, 1, 2, 3, 4, 5, 2, -1, -4]) >>> np.pad(a, (2,), 'maximum') array([5, 5, 1, 2, 3, 4, 5, 5, 5]) >>> np.pad(a, (2,), 'mean') array([3, 3, 1, 2, 3, 4, 5, 3, 3]) >>> np.pad(a, (2,), 'median') array([3, 3, 1, 2, 3, 4, 5, 3, 3]) >>> a = [[1, 2], [3, 4]] >>> np.pad(a, ((3, 2), (2, 3)), 'minimum') array([[1, 1, 1, 2, 1, 1, 1], [1, 1, 1, 2, 1, 1, 1], [1, 1, 1, 2, 1, 1, 1], [1, 1, 1, 2, 1, 1, 1], [3, 3, 3, 4, 3, 3, 3], [1, 1, 1, 2, 1, 1, 1], [1, 1, 1, 2, 1, 1, 1]]) >>> a = [1, 2, 3, 4, 5] >>> np.pad(a, (2, 3), 'reflect') array([3, 2, 1, 2, 3, 4, 5, 4, 3, 2]) >>> np.pad(a, (2, 3), 'reflect', reflect_type='odd') array([-1, 0, 1, 2, 3, 4, 5, 6, 7, 8]) >>> np.pad(a, (2, 3), 'symmetric') array([2, 1, 1, 2, 3, 4, 5, 5, 4, 3]) >>> np.pad(a, (2, 3), 'symmetric', reflect_type='odd') array([0, 1, 1, 2, 3, 4, 5, 5, 6, 7]) >>> np.pad(a, (2, 3), 'wrap') array([4, 5, 1, 2, 3, 4, 5, 1, 2, 3]) >>> def pad_with(vector, pad_width, iaxis, kwargs): ... pad_value = kwargs.get('padder', 10) ... vector[:pad_width[0]] = pad_value ... vector[-pad_width[1]:] = pad_value >>> a = np.arange(6) >>> a = a.reshape((2, 3)) >>> np.pad(a, 2, pad_with) array([[10, 10, 10, 10, 10, 10, 10], [10, 10, 10, 10, 10, 10, 10], [10, 10, 0, 1, 2, 10, 10], [10, 10, 3, 4, 5, 10, 10], [10, 10, 10, 10, 10, 10, 10], [10, 10, 10, 10, 10, 10, 10]]) >>> np.pad(a, 2, pad_with, padder=100) array([[100, 100, 100, 100, 100, 100, 100], [100, 100, 100, 100, 100, 100, 100], [100, 100, 0, 1, 2, 100, 100], [100, 100, 3, 4, 5, 100, 100], [100, 100, 100, 100, 100, 100, 100], [100, 100, 100, 100, 100, 100, 100]]) """ array = np.asarray(array) pad_width = np.asarray(pad_width) if not pad_width.dtype.kind == 'i': raise TypeError('`pad_width` must be of integral type.') # Broadcast to shape (array.ndim, 2) pad_width = _as_pairs(pad_width, array.ndim, as_index=True) if callable(mode): # Old behavior: Use user-supplied function with np.apply_along_axis function = mode # Create a new zero padded array padded, _ = _pad_simple(array, pad_width, fill_value=0) # And apply along each axis for axis in range(padded.ndim): # Iterate using ndindex as in apply_along_axis, but assuming that # function operates inplace on the padded array. # view with the iteration axis at the end view = np.moveaxis(padded, axis, -1) # compute indices for the iteration axes, and append a trailing # ellipsis to prevent 0d arrays decaying to scalars (gh-8642) inds = ndindex(view.shape[:-1]) inds = (ind + (Ellipsis,) for ind in inds) for ind in inds: function(view[ind], pad_width[axis], axis, kwargs) return padded # Make sure that no unsupported keywords were passed for the current mode allowed_kwargs = { 'empty': [], 'edge': [], 'wrap': [], 'constant': ['constant_values'], 'linear_ramp': ['end_values'], 'maximum': ['stat_length'], 'mean': ['stat_length'], 'median': ['stat_length'], 'minimum': ['stat_length'], 'reflect': ['reflect_type'], 'symmetric': ['reflect_type'], } try: unsupported_kwargs = set(kwargs) - set(allowed_kwargs[mode]) except KeyError: raise ValueError("mode '{}' is not supported".format(mode)) if unsupported_kwargs: raise ValueError("unsupported keyword arguments for mode '{}': {}" .format(mode, unsupported_kwargs)) stat_functions = {"maximum": np.amax, "minimum": np.amin, "mean": np.mean, "median": np.median} # Create array with final shape and original values # (padded area is undefined) padded, original_area_slice = _pad_simple(array, pad_width) # And prepare iteration over all dimensions # (zipping may be more readable than using enumerate) axes = range(padded.ndim) if mode == "constant": values = kwargs.get("constant_values", 0) values = _as_pairs(values, padded.ndim) for axis, width_pair, value_pair in zip(axes, pad_width, values): roi = _view_roi(padded, original_area_slice, axis) _set_pad_area(roi, axis, width_pair, value_pair) elif mode == "empty": pass # Do nothing as _pad_simple already returned the correct result elif array.size == 0: # Only modes "constant" and "empty" can extend empty axes, all other # modes depend on `array` not being empty # -> ensure every empty axis is only "padded with 0" for axis, width_pair in zip(axes, pad_width): if array.shape[axis] == 0 and any(width_pair): raise ValueError( "can't extend empty axis {} using modes other than " "'constant' or 'empty'".format(axis) ) # passed, don't need to do anything more as _pad_simple already # returned the correct result elif mode == "edge": for axis, width_pair in zip(axes, pad_width): roi = _view_roi(padded, original_area_slice, axis) edge_pair = _get_edges(roi, axis, width_pair) _set_pad_area(roi, axis, width_pair, edge_pair) elif mode == "linear_ramp": end_values = kwargs.get("end_values", 0) end_values = _as_pairs(end_values, padded.ndim) for axis, width_pair, value_pair in zip(axes, pad_width, end_values): roi = _view_roi(padded, original_area_slice, axis) ramp_pair = _get_linear_ramps(roi, axis, width_pair, value_pair) _set_pad_area(roi, axis, width_pair, ramp_pair) elif mode in stat_functions: func = stat_functions[mode] length = kwargs.get("stat_length", None) length = _as_pairs(length, padded.ndim, as_index=True) for axis, width_pair, length_pair in zip(axes, pad_width, length): roi = _view_roi(padded, original_area_slice, axis) stat_pair = _get_stats(roi, axis, width_pair, length_pair, func) _set_pad_area(roi, axis, width_pair, stat_pair) elif mode in {"reflect", "symmetric"}: method = kwargs.get("reflect_type", "even") include_edge = True if mode == "symmetric" else False for axis, (left_index, right_index) in zip(axes, pad_width): if array.shape[axis] == 1 and (left_index > 0 or right_index > 0): # Extending singleton dimension for 'reflect' is legacy # behavior; it really should raise an error. edge_pair = _get_edges(padded, axis, (left_index, right_index)) _set_pad_area( padded, axis, (left_index, right_index), edge_pair) continue roi = _view_roi(padded, original_area_slice, axis) while left_index > 0 or right_index > 0: # Iteratively pad until dimension is filled with reflected # values. This is necessary if the pad area is larger than # the length of the original values in the current dimension. left_index, right_index = _set_reflect_both( roi, axis, (left_index, right_index), method, include_edge ) elif mode == "wrap": for axis, (left_index, right_index) in zip(axes, pad_width): roi = _view_roi(padded, original_area_slice, axis) while left_index > 0 or right_index > 0: # Iteratively pad until dimension is filled with wrapped # values. This is necessary if the pad area is larger than # the length of the original values in the current dimension. left_index, right_index = _set_wrap_both( roi, axis, (left_index, right_index)) return padded
#include "../c-mpi.h" int mpi_matvec_manteuffel_M_nblock_eo( MPI_Comm mpi_comm, long int m, double *x, double *y, double *x_notlocal ){ int n, i, j, k, mloc, iglobal, my_rank, pool_size; n = sqrt( m ); MPI_Comm_rank(MPI_COMM_WORLD, &my_rank); MPI_Comm_size(MPI_COMM_WORLD, &pool_size); mloc = m / pool_size + (( my_rank < m - pool_size*( m / pool_size ) ) ? 1 : 0); iglobal = ( m / pool_size ) * my_rank + (( my_rank < m - pool_size*( m / pool_size ) ) ? my_rank : ( m - pool_size * ( m / pool_size ) )); if( my_rank % 2 == 0 ){ if( pool_size != 1 ){ if( my_rank != pool_size-1 ) MPI_Send( &(x[mloc-n]), n, MPI_DOUBLE, my_rank+1, 0, mpi_comm); if( my_rank != 0 ) MPI_Send( &(x[0]), n, MPI_DOUBLE, my_rank-1, 0, mpi_comm); } } else { if( pool_size != 1 ){ if( my_rank != 0 ) MPI_Recv( &(x_notlocal[0]), n, MPI_DOUBLE, my_rank-1, 0, mpi_comm, MPI_STATUS_IGNORE); if( my_rank != pool_size-1 ) MPI_Recv( &(x_notlocal[n]), n, MPI_DOUBLE, my_rank+1, 0, mpi_comm, MPI_STATUS_IGNORE); } } if( my_rank % 2 == 1 ){ if( pool_size != 1 ){ if( my_rank != pool_size-1 ) MPI_Send( &(x[mloc-n]), n, MPI_DOUBLE, my_rank+1, 0, mpi_comm); if( my_rank != 0 ) MPI_Send( &(x[0]), n, MPI_DOUBLE, my_rank-1, 0, mpi_comm); } } else { if( pool_size != 1 ){ if( my_rank != 0 ) MPI_Recv( &(x_notlocal[0]), n, MPI_DOUBLE, my_rank-1, 0, mpi_comm, MPI_STATUS_IGNORE); if( my_rank != pool_size-1 ) MPI_Recv( &(x_notlocal[n]), n, MPI_DOUBLE, my_rank+1, 0, mpi_comm, MPI_STATUS_IGNORE); } } if ( my_rank == 0 ){ if( mloc > 2*n ){ for( i=0;i<n;i++ ) y[i] += ( -1.0e00 * x[i+n] ); if( pool_size == 1 ){ for( i=n;i<mloc-n;i++ ) y[i] += ( -1.0e00 * x[i-n] ) + ( -1.0e00 * x[i+n] ); for( i=mloc-n;i<mloc;i++ ) y[i] += ( -1.0e00 * x[i-n] ); } else { for( i=n;i<mloc-n;i++ ) y[i] += ( -1.0e00 * x[i-n] ) + ( -1.0e00 * x[i+n] ); j=0; for( i=mloc-n;i<mloc;i++ ){ y[i] += ( -1.0e00 * x[i-n] ) + ( -1.0e00 * x_notlocal[j+n] ); j++; } } } else { for( i=0;i<mloc-n;i++ ) y[i] += ( -1.0e00 * x[i+n] ); j=0; for( i=mloc-n;i<n;i++ ){ y[i] += ( -1.0e00 * x_notlocal[j+n] ); j++; } if( pool_size == 1 ){ for( i=n;i<mloc-n;i++ ) y[i] += ( -1.0e00 * x[i-n] ) + ( -1.0e00 * x[i+n] ); for( i=mloc-n;i<mloc;i++ ) y[i] += ( -1.0e00 * x[i-n] ); } else { for( i=n;i<mloc;i++ ){ y[i] += ( -1.0e00 * x[i-n] ) + ( -1.0e00 * x_notlocal[j+n] ); j++; } } } } else if ( ( my_rank == pool_size-1 ) && ( my_rank != 0 ) ){ if ( mloc > 2*n ){ j=0; for( i=0;i<n;i++ ){ y[i] += ( -1.0e00 * x_notlocal[j] ) + ( -1.0e00 * x[i+n] ); j++; } for( i=n;i<mloc-n;i++ ) y[i] += ( -1.0e00 * x[i-n] ) + ( -1.0e00 * x[i+n] ); for( i=mloc-n;i<mloc;i++ ) y[i] += ( -1.0e00 * x[i-n] ); } else { j=0; for( i=0;i<mloc-n;i++ ){ y[i] += ( -1.0e00 * x_notlocal[j] ) + ( -1.0e00 * x[i+n] ); j++; } for( i=mloc-n;i<n;i++ ){ y[i] += ( -1.0e00 * x_notlocal[j] ); j++; } for( i=n;i<mloc;i++ ) y[i] += ( -1.0e00 * x[i-n] ); } } else { if ( mloc > 2*n ){ j=0; for( i=0;i<n;i++ ){ y[i] += ( -1.0e00 * x_notlocal[j] ) + ( -1.0e00 * x[i+n] ); j++; } for( i=n;i<mloc-n;i++ ) y[i] += ( -1.0e00 * x[i-n] ) + ( -1.0e00 * x[i+n] ); j=0; for( i=mloc-n;i<mloc;i++ ){ y[i] += ( -1.0e00 * x[i-n] ) + ( -1.0e00 * x_notlocal[j+n] ); j++; } } else { j=0; for( i=0;i<mloc-n;i++ ){ y[i] += ( -1.0e00 * x_notlocal[j] ) + ( -1.0e00 * x[i+n] ); j++; } k=0; for( i=mloc-n;i<n;i++ ){ y[i] += ( -1.0e00 * x_notlocal[j] ) + ( -1.0e00 * x_notlocal[k+n] ); j++; k++; } for( i=n;i<mloc;i++ ){ y[i] += ( -1.0e00 * x[i-n] ) + ( -1.0e00 * x_notlocal[k+n] ); k++; } } } return 0; }
km = float(input("Digite quantos KM foram percorridos: ")) dias = int(input("Digite quantos dias foram usados o veiculo: ")) diasTotal = dias * 60 kmtotal = km * 0.15 totalPagar = float(diasTotal + kmtotal) print(f"Voce utilizou o veiculo por {dias} dias e percorreu {km} km o total a pagar é de R${totalPagar:.2f}")
/* ---------------------------------------------------------------------------- * GTSAM Copyright 2010, Georgia Tech Research Corporation, * Atlanta, Georgia 30332-0415 * All Rights Reserved * Authors: Frank Dellaert, et al. (see THANKS for the full author list) * See LICENSE for the license information * -------------------------------------------------------------------------- */ /** * @file NonlinearOptimizerParams.h * @brief Parameters for nonlinear optimization * @author Yong-Dian Jian * @author Richard Roberts * @author Frank Dellaert * @author Andrew Melim * @date Apr 1, 2012 */ #pragma once #include <gtsam/linear/GaussianFactorGraph.h> #include <gtsam/linear/SubgraphSolver.h> #include <boost/optional.hpp> #include <string> namespace gtsam { /** The common parameters for Nonlinear optimizers. Most optimizers * deriving from NonlinearOptimizer also subclass the parameters. */ class GTSAM_EXPORT NonlinearOptimizerParams { public: /** See NonlinearOptimizerParams::verbosity */ enum Verbosity { SILENT, TERMINATION, ERROR, VALUES, DELTA, LINEAR }; size_t maxIterations; ///< The maximum iterations to stop iterating (default 100) double relativeErrorTol; ///< The maximum relative error decrease to stop iterating (default 1e-5) double absoluteErrorTol; ///< The maximum absolute error decrease to stop iterating (default 1e-5) double errorTol; ///< The maximum total error to stop iterating (default 0.0) Verbosity verbosity; ///< The printing verbosity during optimization (default SILENT) Ordering::OrderingType orderingType; ///< The method of ordering use during variable elimination (default COLAMD) NonlinearOptimizerParams() : maxIterations(100), relativeErrorTol(1e-5), absoluteErrorTol(1e-5), errorTol( 0.0), verbosity(SILENT), orderingType(Ordering::COLAMD), linearSolverType(MULTIFRONTAL_CHOLESKY) {} virtual ~NonlinearOptimizerParams() { } virtual void print(const std::string& str = "") const; size_t getMaxIterations() const { return maxIterations; } double getRelativeErrorTol() const { return relativeErrorTol; } double getAbsoluteErrorTol() const { return absoluteErrorTol; } double getErrorTol() const { return errorTol; } std::string getVerbosity() const { return verbosityTranslator(verbosity); } void setMaxIterations(int value) { maxIterations = value; } void setRelativeErrorTol(double value) { relativeErrorTol = value; } void setAbsoluteErrorTol(double value) { absoluteErrorTol = value; } void setErrorTol(double value) { errorTol = value; } void setVerbosity(const std::string& src) { verbosity = verbosityTranslator(src); } static Verbosity verbosityTranslator(const std::string &s) ; static std::string verbosityTranslator(Verbosity value) ; /** See NonlinearOptimizerParams::linearSolverType */ enum LinearSolverType { MULTIFRONTAL_CHOLESKY, MULTIFRONTAL_QR, SEQUENTIAL_CHOLESKY, SEQUENTIAL_QR, Iterative, /* Experimental Flag */ CHOLMOD, /* Experimental Flag */ }; LinearSolverType linearSolverType; ///< The type of linear solver to use in the nonlinear optimizer boost::optional<Ordering> ordering; ///< The optional variable elimination ordering, or empty to use COLAMD (default: empty) IterativeOptimizationParameters::shared_ptr iterativeParams; ///< The container for iterativeOptimization parameters. used in CG Solvers. inline bool isMultifrontal() const { return (linearSolverType == MULTIFRONTAL_CHOLESKY) || (linearSolverType == MULTIFRONTAL_QR); } inline bool isSequential() const { return (linearSolverType == SEQUENTIAL_CHOLESKY) || (linearSolverType == SEQUENTIAL_QR); } inline bool isCholmod() const { return (linearSolverType == CHOLMOD); } inline bool isIterative() const { return (linearSolverType == Iterative); } GaussianFactorGraph::Eliminate getEliminationFunction() const { switch (linearSolverType) { case MULTIFRONTAL_CHOLESKY: case SEQUENTIAL_CHOLESKY: return EliminatePreferCholesky; case MULTIFRONTAL_QR: case SEQUENTIAL_QR: return EliminateQR; default: throw std::runtime_error( "Nonlinear optimization parameter \"factorization\" is invalid"); } } std::string getLinearSolverType() const { return linearSolverTranslator(linearSolverType); } void setLinearSolverType(const std::string& solver) { linearSolverType = linearSolverTranslator(solver); } void setIterativeParams(const boost::shared_ptr<IterativeOptimizationParameters> params); void setOrdering(const Ordering& ordering) { this->ordering = ordering; this->orderingType = Ordering::CUSTOM; } std::string getOrderingType() const { return orderingTypeTranslator(orderingType); } // Note that if you want to use a custom ordering, you must set the ordering directly, this will switch to custom type void setOrderingType(const std::string& ordering){ orderingType = orderingTypeTranslator(ordering); } private: std::string linearSolverTranslator(LinearSolverType linearSolverType) const; LinearSolverType linearSolverTranslator(const std::string& linearSolverType) const; std::string orderingTypeTranslator(Ordering::OrderingType type) const; Ordering::OrderingType orderingTypeTranslator(const std::string& type) const; }; // For backward compatibility: typedef NonlinearOptimizerParams SuccessiveLinearizationParams; } /* namespace gtsam */
/* * Copyright 2020 The Magma Authors. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * @flow strict-local * @format */ import type {EditSubscriberProps} from './SubscriberEditDialog.js'; import ApnContext from '../../components/context/ApnContext'; import Checkbox from '@material-ui/core/Checkbox'; import FormControl from '@material-ui/core/FormControl'; import List from '@material-ui/core/List'; import ListItemText from '@material-ui/core/ListItemText'; import LoadingFiller from '../../components/LoadingFiller'; import MagmaV1API from '../../../generated/WebClient'; import MenuItem from '@material-ui/core/MenuItem'; import OutlinedInput from '@material-ui/core/OutlinedInput'; import PolicyContext from '../../components/context/PolicyContext'; import React from 'react'; import Select from '@material-ui/core/Select'; import nullthrows from '../../../shared/util/nullthrows'; import useMagmaAPI from '../../../api/useMagmaAPI'; import {AltFormField} from '../../components/FormField'; import {makeStyles} from '@material-ui/styles'; import {useContext} from 'react'; import {useParams} from 'react-router-dom'; const useStyles = makeStyles(() => ({ input: { display: 'inline-flex', margin: '5px 0', width: '50%', fullWidth: true, }, })); export default function EditSubscriberTrafficPolicy( props: EditSubscriberProps, ) { const classes = useStyles(); const params = useParams(); const apnCtx = useContext(ApnContext); const apns = Array.from(Object.keys(apnCtx.state || {})); const policyCtx = useContext(PolicyContext); const {isLoading: baseNamesLoading, response: baseNames} = useMagmaAPI( MagmaV1API.getLteByNetworkIdSubscriberConfigBaseNames, { networkId: nullthrows(params.networkId), }, ); if (baseNamesLoading) { return <LoadingFiller />; } return ( <div> <List> <AltFormField label={'Active APNs'}> <FormControl className={classes.input}> <Select multiple id="activeApnTestId" value={props.subscriberState.active_apns ?? []} onChange={({target}) => { props.onSubscriberChange('active_apns', target.value); }} renderValue={selected => selected.join(', ')} input={<OutlinedInput />}> {apns.map((k: string, idx: number) => ( <MenuItem key={idx} value={k}> <Checkbox checked={ props.subscriberState.active_apns != null ? props.subscriberState.active_apns.indexOf(k) > -1 : false } /> <ListItemText primary={k} /> </MenuItem> ))} </Select> </FormControl> </AltFormField> <AltFormField label={'Base Names'}> <FormControl className={classes.input}> <Select multiple value={props.subscriberState.active_base_names ?? []} onChange={({target}) => { props.onSubscriberChange('active_base_names', target.value); }} renderValue={selected => selected.join(', ')} input={<OutlinedInput />}> {(baseNames || []).map((k: string, idx: number) => ( <MenuItem key={idx} value={k}> <Checkbox checked={ props.subscriberState.active_base_names != null ? props.subscriberState.active_base_names.indexOf(k) > -1 : false } /> <ListItemText primary={k} /> </MenuItem> ))} </Select> </FormControl> </AltFormField> <AltFormField label={'Active Policies'}> <FormControl className={classes.input}> <Select multiple value={props.subscriberState.active_policies ?? []} onChange={({target}) => { props.onSubscriberChange('active_policies', target.value); }} renderValue={selected => selected.join(', ')} input={<OutlinedInput />}> {Object.keys(policyCtx.state).map((k: string, idx: number) => ( <MenuItem key={idx} value={k}> <Checkbox checked={ props.subscriberState.active_policies != null ? props.subscriberState.active_policies.indexOf(k) > -1 : false } /> <ListItemText primary={k} /> </MenuItem> ))} </Select> </FormControl> </AltFormField> </List> </div> ); }
import BFX from "bitfinex-api-node"; const coinList = [ "ltc", "eth", "etc", "rrt", "zec", "xmr", "dsh", "bcc", "bcu", "xrp", "iot", "eos", "san", "omg", "bch", "neo", "etp", "qtm", "bt1", "bt2", "avt", "edo", "btg", "dat", "qsh", "yyw" ]; const pairList_USD = [ "btcusd", "ltcusd", "ethusd", "etcusd", "rrtusd", "zecusd", "xmrusd", "dshusd", "bccusd", "bcuusd", "xrpusd", "iotusd", "eosusd", "sanusd", "omgusd", "bchusd", "neousd", "etpusd", "qtmusd", "bt1usd", "bt2usd", "avtusd", "edousd", "btgusd", "datusd", "qshusd", "yywusd" ]; const pairList_ETH = [ "ioteth", "eoseth", "saneth", "omgeth", "bcheth", "neoeth", "etpeth", "qtmeth", "avteth", "edoeth", "dateth", "qsheth", "yyweth" ]; const pairList_BTC = [ "ltcbtc", "ethbtc", "etcbtc", "rrtbtc", "zecbtc", "xmrbtc", "dshbtc", "bccbtc", "bcubtc", "xrpbtc", "iotbtc", "eosbtc", "sanbtc", "omgbtc", "bchbtc", "neobtc", "etpbtc", "qtmbtc", "bt1btc", "bt2btc", "avtbtc", "edobtc", "btgbtc", "datbtc", "qshbtc", "yywbtc" ]; const coin_BTC2USD = {}; const initCoin = () => { coinList.forEach(coinId => Object.assign(coin_BTC2USD, { [coinId]: { bid: { usd: 0, btc: 0, ratio: 0 }, ask: { usd: 0, btc: 0, ratio: 0 } } }) ); }; initCoin(); const cb = response => { console.log(response); }; const opts = { version: 2, transform: true }; const bws = new BFX( "aUZKjyEDAzranUPPJRySrJGpzzim6RHeHpR526dOj57", "oldv0oz8ZbKLrgUuZwqbYHQ7EtaDzXY4Y35KLjMGJb0", opts ).ws; const bfx = new BFX( "aUZKjyEDAzranUPPJRySrJGpzzim6RHeHpR526dOj57", "oldv0oz8ZbKLrgUuZwqbYHQ7EtaDzXY4Y35KLjMGJb0", opts ); //bfx.rest.ticker('tBTCUSD', (error, response, body) => console.log(response)) /* const getCoinTicket = async () => { const timestamp = new Date(); //console.log(timestamp); const firstBatch = await batchRequest( 0, Math.floor(pairList_USD.length / 3), pairList_USD, 0 ); const secondBatch = await batchRequest( Math.ceil(pairList_USD.length / 3), Math.floor(pairList_USD.length * (2 / 3)), pairList_USD, Math.floor(pairList_USD.length / 3) * 100 ); const third = await batchRequest( Math.floor(pairList_USD.length * (2 / 3)), pairList_USD.length, pairList_USD, Math.floor(pairList_USD.length * (2 / 3)) * 100 ); const btcfirstBatch = await batchRequest( 0, Math.floor(pairList_BTC.length / 3), pairList_BTC, 1000 ); const btcsecondBatch = await batchRequest( Math.ceil(pairList_BTC.length / 3), Math.floor(pairList_BTC.length * (2 / 3)), pairList_BTC, 1000 + Math.floor(pairList_BTC.length / 3) * 100 ); const btcthird = await batchRequest( Math.floor(pairList_BTC.length * (2 / 3)), pairList_BTC.length, pairList_BTC, 1000 + Math.floor(pairList_BTC.length * (2 / 3)) * 100 ); }; const batchRequest = (start, end, coinArray, startTime) => { return new Promise((resolve, reject) => { let posttime = startTime; const coinStream = coinArray.slice(start, end).map(item => { posttime += 100; return ticketFunc(item, posttime); }); Promise.all(coinStream).then(() => { resolve(); }); }); }; const ticketFunc = (key, time) => { return new Promise((resolve, reject) => { setTimeout(() => { bfx.rest.ticker(`t${key.toUpperCase()}`, (error, response, body) => { console.log(`${key} ${response.LAST_PRICE}`); resolve(); }); }, time); }); }; getCoinTicket(); */ bws.on("auth", () => { // emitted after .auth() // needed for private api endpoints console.log("authenticated"); // bws.submitOrder ... }); bws.on("open", () => { pairList_USD.forEach(item => { bws.subscribeTicker(item); //bws.subscribeOrderBook(item) //bws.subscribeTrades(item) }); pairList_ETH.forEach(item => { bws.subscribeTicker(item); //bws.subscribeOrderBook(item) //bws.subscribeTrades(item) }); pairList_BTC.forEach(item => { bws.subscribeTicker(item); //bws.subscribeOrderBook(item) //bws.subscribeTrades(item) }); }); bws.on("ticker", (pair, ticker) => { //console.log(ticker) const coinType = pair.slice(1, 4).toLowerCase(); const dollar = pair.slice(4, 7).toLowerCase(); console.log(dollar) if ( Object.keys(coin_BTC2USD).indexOf(coinType) > -1 && ticker.VOLUME > 100000 ) { coin_BTC2USD[coinType]["ask"][dollar] = ticker.ASK; coin_BTC2USD[coinType]["bid"][dollar] = ticker.BID; //console.log(`${coinType} ${dollar} ${ticker.LAST_PRICE}`); } }); const GET_COIN_RATIO = () => { //console.log(new Date()); Object.keys(coin_BTC2USD).forEach(item => { if ( coin_BTC2USD[item]["ask"]["usd"] !== 0 && coin_BTC2USD[item]["ask"]["btc"] !== 0 && coin_BTC2USD[item]["bid"]["usd"] !== 0 && coin_BTC2USD[item]["bid"]["btc"] !== 0 ) { coin_BTC2USD[item]["ask"]["ratio"] = coin_BTC2USD[item]["bid"]["btc"] / coin_BTC2USD[item]["ask"]["usd"]; coin_BTC2USD[item]["bid"]["ratio"] = coin_BTC2USD[item]["bid"]["usd"] / coin_BTC2USD[item]["ask"]["btc"]; // console.log( // `${item} ${coin_BTC2USD[item]["usd"] / coin_BTC2USD[item]["btc"]}` // ); } }); }; var usd =50; const arbitrage = () => { const keys = Object.keys(coin_BTC2USD); const arbitrageRank = []; keys.map(item => { if ( coin_BTC2USD[item]["ask"]["usd"] !== 0 && coin_BTC2USD[item]["ask"]["btc"] !== 0 && coin_BTC2USD[item]["bid"]["usd"] !== 0 && coin_BTC2USD[item]["bid"]["btc"] !== 0 ) { keys.map(key => { const profitMargin = coin_BTC2USD[item]["ask"]["ratio"] * coin_BTC2USD[key]["bid"]["ratio"]; if (profitMargin > 1) { arbitrageRank.push( `${item} -> ${key} : ${(profitMargin - 1).toFixed(4) * 100}% ` ); } }); // console.log( // `${item} ${coin_BTC2USD[item]["usd"] / coin_BTC2USD[item]["btc"]}` // ); } }); arbitrageRank.sort((a, b) => { if (a.split(": ")[1].split("%")[0] > b.split(": ")[1].split("%")[0]) { return -1; } else { return 1; } }); try { console.log(arbitrageRank.slice(1, 6)); usdFlow( usd, arbitrageRank[2].split(" -> ")[0], arbitrageRank[2].split(" -> ")[1].split(" ")[0] ).then(profit => {}); } catch (err) {} }; const usdFlow = (usd, aCoin, bCoin) => { return new Promise((resolve, reject) => { const a2USD = coin_BTC2USD[aCoin]["ask"]["usd"]; const a2BTC = coin_BTC2USD[aCoin]["bid"]["btc"]; const aPhase1 = usd / a2USD * 0.998; const aPhase2 = aPhase1 * a2BTC * 0.998; const b2USD = coin_BTC2USD[bCoin]["bid"]["usd"]; const b2BTC = coin_BTC2USD[bCoin]["ask"]["btc"]; const bPhase1 = aPhase2 / b2BTC * 0.998; const bPhase2 = bPhase1 * b2USD * 0.998; console.log( `USD: ${usd} -> ${aPhase1} -> ${aPhase2} -> ${bPhase1} -> ${bPhase2}` ); //resolve(bPhase2) }); }; //setInterval(GET_COIN_RATIO, 1000); //setInterval(arbitrage, 1000);
# -*- coding: utf-8 -*- from functools import reduce from operator import mul import numpy as np from africanus.gridding.simple.gridding import (grid as np_grid_fn, degrid as np_degrid_fn) from africanus.util.docs import mod_docs from africanus.util.requirements import requires_optional try: import dask.array as da except ImportError as e: da_import_error = e else: da_import_error = None # Unfortunately necessary to introduce an extra dim # for blockwise to work properly def _grid_fn(vis, uvw, flags, weights, ref_wave, convolution_filter, cell_size, nx, ny): return np_grid_fn(vis[0], uvw[0], flags[0], weights[0], ref_wave[0], convolution_filter, cell_size, nx=nx, ny=ny)[None, :] @requires_optional('dask.array', da_import_error) def grid(vis, uvw, flags, weights, ref_wave, convolution_filter, cell_size, nx=1024, ny=1024): """ Documentation below """ # Creation correlation dimension strings for each correlation corrs = tuple('corr-%d' % i for i in range(len(vis.shape[2:]))) # Get grids, stacked by row grids = da.core.blockwise(_grid_fn, ("row", "ny", "nx") + corrs, vis, ("row", "chan") + corrs, uvw, ("row", "(u,v,w)"), flags, ("row", "chan") + corrs, weights, ("row", "chan") + corrs, ref_wave, ("chan",), new_axes={"ny": ny, "nx": nx}, adjust_chunks={"row": 1}, convolution_filter=convolution_filter, cell_size=cell_size, ny=ny, nx=nx, dtype=vis.dtype) # Sum grids over the row dimension to produce (ny, nx, corr_1, corr_2) return grids.sum(axis=0) @requires_optional('dask.array', da_import_error) def degrid(grid, uvw, weights, ref_wave, convolution_filter, cell_size): """ Documentation below """ grid_flat_corrs = reduce(mul, grid.shape[2:]) weight_flat_corrs = reduce(mul, weights.shape[2:]) assert grid_flat_corrs == weight_flat_corrs assert uvw.shape[0] == weights.shape[0] assert weights.shape[1] == ref_wave.shape[0] # Creation correlation dimension strings for each correlation corrs = tuple('corr-%d' % i for i in range(len(grid.shape[2:]))) return da.core.blockwise(np_degrid_fn, ("row", "chan") + corrs, grid, ("ny", "nx") + corrs, uvw, ("row", "(u,v,w)"), weights, ("row", "chan") + corrs, ref_wave, ("chan",), concatenate=True, convolution_filter=convolution_filter, cell_size=cell_size, dtype=np.complex64) grid.__doc__ = mod_docs(np_grid_fn.__doc__, [(":class:`numpy.ndarray`", ":class:`dask.array.Array`"), ("np.ones_like", "da.ones_like"), ("np.zeros_like", "da.zeros_like")]) degrid.__doc__ = mod_docs(np_degrid_fn.__doc__, [(":class:`numpy.ndarray`", ":class:`dask.array.Array`"), ("np.ones_like", "da.ones_like"), ("np.zeros_like", "da.zeros_like")])
import os import sys from setuptools import setup, find_packages def readme(): with open('README.rst') as f: return f.read() exec(open(os.path.join('saleae', 'version.py')).read()) requires = [ 'future', 'enum34', 'psutil', ] setup( name='saleae', version=__version__, # packages=find_packages(exclude=['tests']), # description="Library to control a Saleae", long_description=readme(), # url="https://github.com/ppannuto/python-saleae", # author="Pat Pannuto", author_email="pat.pannuto+saleae@gmail.com", # license="MIT", # classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: Implementation :: CPython", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Scientific/Engineering", "Topic :: Software Development :: Embedded Systems", "Topic :: Utilities", ], # keywords='string formatting', # install_requires=requires, include_package_data=True, # #test_suite='saleae.tests', # #entry_points = { # 'console_scripts':['saleae = saleae:console'] #}, )
# -*- coding: utf-8 -*- # Author: Daniel Yang <daniel.yj.yang@gmail.com> # # License: BSD 3 clause from ._neural_network import demo, multi_layer_perceptron_classifier # this is for "from <package_name>.neural_network import *" __all__ = ["demo", "multi_layer_perceptron_classifier",]
import React, { Component } from 'react'; import { connect } from 'dva'; import { Row, Col, Icon, Avatar } from 'antd'; import DescriptionList from '../../components/DescriptionList'; import PageHeaderWrapper from '../../components/PageHeaderWrapper'; import styles from './View.less'; const { Description } = DescriptionList; const tabList = [ { key: 'basic', tab: '基本信息', }, { key: 'manager', tab: '管理团队', }, { key: 'fundraising', tab: '募资管理', }, { key: 'financial', tab: '投资组合', }, { key: 'exit', tab: '退出管理', }, { key: 'files', tab: '相关文档', }, { key: 'task', tab: '相关任务', }, ]; @connect(({ projectView, loading }) => ({ ...projectView, loading: loading.effects['projectView/fetchBasic'], })) class View extends Component { state = { tab: 'basic', showTeam: false, }; componentDidMount() { const { dispatch } = this.props; dispatch({ type: 'projectView/fetchBasic', }); } onTabChange = key => { this.setState({ tab: key }); }; onTeamClick = () => { const { showTeam } = this.state; this.setState({ showTeam: !showTeam }); }; renderExtra() { return ( <Row> <Col xs={24} sm={8}> <div className={styles.textSecondary} onClick={this.onTeamClick}> <Icon type="team" /> <div className={styles.heading}>负责团队</div> </div> </Col> <Col xs={24} sm={8}> <div className={styles.textSecondary}> <Icon type="message" /> <div className={styles.heading}>基金动态</div> </div> </Col> <Col xs={24} sm={8}> <div className={styles.textSecondary}> <Icon type="bar-chart" /> <div className={styles.heading}>意见报告</div> </div> </Col> </Row> ); } renderDescription() { const { basic } = this.props; return ( <DescriptionList className={styles.headerList} size="small" col="1"> <Description>{basic.s || '未知'}</Description> </DescriptionList> ); } renderContent() { const { tab, loading } = this.state; if (loading) { return null; } switch (tab) { case 'basic': return null; case 'invest': return null; // case 'files': // return this.renderFiles(); // case 'task': // return this.renderTask(); default: return null; } } render() { const { loading, basic } = this.props; return ( <PageHeaderWrapper loading={loading} skeletonParagraph={{ rows: 5 }} title={basic.proname || ' '} logo={<Avatar alt={basic.proname} src={basic.icon} size={96} />} content={this.renderDescription()} extraContent={this.renderExtra()} tabList={tabList} disableBreadcrumb onTabChange={this.onTabChange} > {this.renderContent()} </PageHeaderWrapper> ); } } export default View;
# Import all the required modules from flask import Blueprint from flask_sqlalchemy import SQLAlchemy from .forms import * from . import * from wtforms import ValidationError, validators from main_app import db, bcrypt, login_manager from flask import current_app from flask_login import ( UserMixin, login_required, login_user, LoginManager, current_user, logout_user, login_required, ) from flask import ( Flask, render_template, request, redirect, flash, url_for, abort, send_from_directory, ) from werkzeug.routing import BuildError from sqlalchemy.exc import ( IntegrityError, DataError, DatabaseError, InterfaceError, InvalidRequestError, ) from geopy.exc import GeocoderUnavailable from utils import * from flask_bcrypt import generate_password_hash, check_password_hash from models import * from geopy.geocoders import Nominatim # End of imports # Register a blueprint for authentication auth = Blueprint("auth", __name__, url_prefix="/0") # Load the user,and return the user object @login_manager.user_loader def load_user(user_id): return User.query.get(int(user_id)) # lOGIN route @auth.route("/login/", methods=("GET", "POST"), strict_slashes=False) def login(): # Instantiate the login_form and assign it to # a variable form form = login_form() # If form is validated during submission,begin the execution if form.validate_on_submit(): try: # get the user that exists with the submitted email user = User.query.filter_by(email=form.email.data).first() # check if the user given email matches the password # that is stored for that email if check_password_hash(user.pwd, form.pwd.data): #login the user login_user(user) # Redirect to the user account return redirect(url_for('dashboard')) else: flash("Invalid Username or password!", "danger") except: flash("Invalid Username or password!", "danger") return render_template("auth.html", form=form, legend="Login", title="MeDOC | Login", action="Login" ) # Register route @auth.route("/register/", methods=("GET", "POST"), strict_slashes=False) def register(): # Instantiate the register_form and assign it to # a variable form form = register_form() # If form is validated during submission,begin the execution if form.validate_on_submit(): try: # assign the user input values from the # form variables pwd = form.pwd.data email = form.email.data lname = form.lname.data fname = form.fname.data location= form.location.data sex = form.sex.data speciality= form.speciality.data # create a variable to hold the user object newuser = User( email=email, lname=lname, fname=fname, location=location, sex=sex, speciality=speciality, pwd=bcrypt.generate_password_hash(pwd), ) # Add user to the database db.session.add(newuser) # Save db.session.commit() # Show a success message flash(f"Account Succesfully created", "success") # Redirect to the login page return redirect(url_for("auth.login")) # Handle exceptions except InvalidRequestError: db.session.rollback() flash(f"Something went wrong!", "danger") except IntegrityError: db.session.rollback() flash(f"User already exists!.", "warning") except DataError: db.session.rollback() flash(f"Invalid Entry", "warning") except InterfaceError: db.session.rollback() flash(f"Error connecting to the database", "danger") except DatabaseError: db.session.rollback() flash(f"Error connecting to the database", "danger") except BuildError: db.session.rollback() flash(f"An error occured !", "danger") # register function return statement return render_template("auth.html", form=form, legend="Create account", title="MeDOC | Register", action="Register account" ) # User account route @auth.route("/account/", methods=("GET", "POST"), strict_slashes=False) # Require the user to login first @login_required def account(): # Instantiate the UpdateProfile and assign it to # a variable form form = UpdateProfile() # assign the address as the current users location # location is from the location the user provided # address= current_user.location # geolocator = Nominatim(user_agent="MeDoc") # Geocode the address # location_coordinates = geolocator.geocode(address) # Store the longitude and latitude as a list # location_coordinates = list((location_coordinates.latitude, location_coordinates.longitude)) if form.validate_on_submit(): # If form is validated during submission,begin the execution try: # if the user has provided a profile image if form.profileImg.data: # call the image upload method and pass to it # the user image picture_file = upload_img(form.profileImg.data) # Update the current user image to the new image current_user.image = picture_file # Update the current user details with the new # data current_user.email = form.email.data current_user.fname = form.fname.data current_user.lname = form.lname.data current_user.location = form.location.data current_user.speciality = form.speciality.data # save the changes db.session.commit() # Flash a success message flash("Your profile has been updated", "success") # Redirect back return redirect(url_for("auth.account")) # Handle exeptions except InvalidRequestError: db.session.rollback() flash(f"Something went wrong!", "danger") except IntegrityError: db.session.rollback() flash(f"User already exists!.", "warning") except DataError: db.session.rollback() flash(f"Invalid Entry", "warning") except DatabaseError: db.session.rollback() flash(f"Error connecting to the database", "danger") except BuildError: db.session.rollback() flash(f"An error occured !", "danger") except GeocoderUnavailable: db.session.rollback() flash(f"Network Error while fetching your location!", "danger") # Unless the request is a post request,display # the user details in the input fields elif request.method == "GET": form.email.data = current_user.email form.fname.data = current_user.fname form.lname.data = current_user.lname form.location.data = current_user.location form.speciality.data = current_user.speciality form.profileImg.data = current_user.image # Function return statement return render_template( "account.html", form=form, #location_coordinates=location_coordinates ) # Use this route to toggle the user # online and offline status @auth.route("/status",methods=("GET", "POST"),strict_slashes=False,) # Require the user to login first @login_required def toggle_status(): # Get the current user user = User.query.filter_by(id=current_user.id).first() # Toggle the status to the opposite of the current status user.status = not user.status # Save these changes db.session.commit() # Redirect back the user return redirect(url_for('auth.account')) # Logout user route @auth.route("/logout") # Require the user to login first @login_required def logout(): logout_user() return redirect(url_for('index'))
# -*- coding: utf-8 -*- # # K2HR3 OpenStack Notification Listener # # Copyright 2018 Yahoo! Japan Corporation. # # K2HR3 is K2hdkc based Resource and Roles and policy Rules, gathers # common management information for the cloud. # K2HR3 can dynamically manage information as "who", "what", "operate". # These are stored as roles, resources, policies in K2hdkc, and the # client system can dynamically read and modify these information. # # For the full copyright and license information, please view # the licenses file that was distributed with this source code. # # AUTHOR: Hirotaka Wakabayashi # CREATE: Tue Sep 11 2018 # REVISION: # """K2hr3 OpenStack Notification message Listener.""" from __future__ import (absolute_import, division, print_function, unicode_literals) __all__ = [ 'K2hr3Conf', 'K2hr3ConfError', 'K2hr3NotificationEndpoint', 'K2hr3NotificationEndpointError', 'listen', 'main', 'version', ] __author__ = 'Hirotaka Wakabayashi <hiwakaba@yahoo-corp.jp>' __version__ = '1.0.0' import argparse import logging from logging.handlers import TimedRotatingFileHandler from logging import StreamHandler from pathlib import Path import sys import time from typing import List, Set, Dict, Tuple, Optional # noqa: pylint: disable=unused-import import oslo_config import oslo_messaging from k2hr3_osnl.cfg import K2hr3Conf from k2hr3_osnl.exceptions import K2hr3Error, K2hr3ConfError, K2hr3NotificationEndpointError from k2hr3_osnl.endpoint import K2hr3NotificationEndpoint LOG = logging.getLogger(__name__) if sys.platform.startswith('win'): raise ImportError(r'Currently we do not test well on windows') def version() -> str: """Returns a version of k2hr3_osnl package. :returns: version :rtype: str """ return __version__ def main() -> int: """Runs a oslo_messaging notification listener for k2hr3. You can configure the listener by the config file. Simple usage: $ k2hr3_osnl -c etc/k2hr3_osnl.config :returns: 0 if success, otherwise 1. :rtype: int """ parser = argparse.ArgumentParser( description='An oslo.messaging notification listener for k2hr3.') parser.add_argument( '-c', '--config-file', dest='config_file', default='/etc/k2hr3/k2hr3_osnl.conf', help='config file path') parser.add_argument( '-d', dest='debug_level', choices=('debug', 'info', 'warn', 'error', 'critical'), help='debug level. default: defined in the config_file') parser.add_argument( '-l', dest='libs_debug_level', choices=('debug', 'info', 'warn', 'error', 'critical'), help='dependent libraries loglevel. default: defined in the config_file' ) parser.add_argument( '-f', dest='log_file', help='log file path. default: defined in the config_file') parser.add_argument( '-v', action='version', version='%(prog)s ' + __version__) args = parser.parse_args() try: conf = K2hr3Conf(Path(args.config_file)) _configure_logger(args, conf) # logger configured by args and conf. endpoints = [K2hr3NotificationEndpoint(conf)] sys.exit(listen(endpoints)) except K2hr3Error as error: LOG.error('K2hr3Error error, %s', error) raise K2hr3Error("K2hr3 RuntimeError") from error except Exception as error: LOG.error('Unknown error, %s', error) raise RuntimeError("Unknown RuntimeError") from error _nametolevel = { 'error': logging.ERROR, 'warn': logging.WARNING, 'info': logging.INFO, 'debug': logging.DEBUG, 'notset': logging.NOTSET } def _configure_logger(args, conf) -> bool: """Configures logger settings by args and conf. :param args: command line args :type argparse: command line args :param conf: configuration :type K2hr3Conf: configuration :returns: True if success, otherwise False :rtype: bool """ # We prefer args than configuration file. # 1. debug_level debug_level = logging.WARNING if args.debug_level is not None: debug_level = _nametolevel.get(args.debug_level, logging.WARNING) else: debug_level = _nametolevel.get(conf.debug_level, logging.WARNING) LOG.setLevel(debug_level) # 2. formatter formatter = logging.Formatter( '%(asctime)-15s %(levelname)s %(name)s:%(lineno)d %(message)s') # hardcoding # 3. log_file if args.log_file is not None: # check the permission of the destination file. # if unable to open it, use default(stderr). # Add the log message handler to the logger handler = TimedRotatingFileHandler( args.log_file, when='midnight', encoding='UTF-8', backupCount=31) handler.setFormatter(formatter) LOG.addHandler(handler) else: if conf.log_file == 'sys.stderr': stream_handler = StreamHandler(sys.stderr) stream_handler.setFormatter(formatter) LOG.addHandler(stream_handler) else: # Add the log message handler to the logger handler = TimedRotatingFileHandler( conf.log_file, when='midnight', encoding='UTF-8', backupCount=31) handler.setFormatter(formatter) LOG.addHandler(handler) # 3. libs_debug_level libs_debug_level = logging.WARNING if args.libs_debug_level is not None: libs_debug_level = _nametolevel.get(args.libs_debug_level, logging.WARNING) else: libs_debug_level = _nametolevel.get(conf.libs_debug_level, logging.WARNING) libs = [ 'stevedore.extension', 'oslo.messaging._drivers.pool', 'oslo.messaging._drivers.impl_rabbit', 'amqp' ] for i in libs: logging.getLogger(i).setLevel(libs_debug_level) return True def listen(endpoints: List[K2hr3NotificationEndpoint]) -> int: """Runs a oslo_messaging notification listener for k2hr3. This function is a library endpoint to start a oslo_messaging notification listener for k2hr3. :param endpoints: endpoint to be called by dispatcher when notification messages arrive. :type endpoints: list of K2hr3NotificationEndpoint :returns: 0 if success, otherwise 1. :rtype: int """ # 1. validate endpoints if not isinstance(endpoints, list) or len(endpoints) == 0: LOG.error('invalid endpoints, %s', endpoints) return 1 # 2. validate each endpoint for endpoint in endpoints: if not isinstance(endpoint, K2hr3NotificationEndpoint): LOG.error('found an invalid endpoint, %s', endpoint) return 1 if not isinstance(endpoint.conf, K2hr3Conf): # this never happens. LOG.error('found an invalid conf in an endpoint, %s', endpoint.conf) return 1 conf = endpoint.conf assert isinstance(conf, K2hr3Conf) try: # transport, targets transport = oslo_messaging.get_notification_transport( oslo_config.cfg.CONF, url=conf.oslo_messaging_notifications.transport_url) targets = [ oslo_messaging.Target( topic=conf.oslo_messaging_notifications.topic, exchange=conf.oslo_messaging_notifications.exchange) ] listener = oslo_messaging.get_notification_listener( transport, targets, endpoints, pool=conf.oslo_messaging_notifications.pool, executor=conf.oslo_messaging_notifications.executor, allow_requeue=conf.oslo_messaging_notifications.allow_requeue) listener.start() LOG.info('Starting') while True: time.sleep(1) except KeyboardInterrupt: LOG.info('Stopping') listener.stop() listener.wait() except NotImplementedError: LOG.error('allow_requeue is not supported by driver') return 1 except oslo_messaging.ServerListenError as error: LOG.error('listener error, %s', error.msg) return 1 return 0 if __name__ == "__main__": sys.exit(main()) # # EOF #
// @flow import createAnnouncer from '../../../src/view/announcer/announcer'; import type { Announcer } from '../../../src/view/announcer/announcer-types'; describe('mounting', () => { it('should not create a dom node before mount is called', () => { const announcer: Announcer = createAnnouncer(); const el: ?HTMLElement = document.getElementById(announcer.id); expect(el).not.toBeTruthy(); }); it('should create a new element when mounting', () => { const announcer: Announcer = createAnnouncer(); announcer.mount(); const el: ?HTMLElement = document.getElementById(announcer.id); expect(el).toBeInstanceOf(HTMLElement); }); it('should throw if attempting to double mount', () => { const announcer: Announcer = createAnnouncer(); announcer.mount(); expect(() => announcer.mount()).toThrow(); }); it('should apply the appropriate aria attributes and non visibility styles', () => { const announcer: Announcer = createAnnouncer(); announcer.mount(); const el: HTMLElement = (document.getElementById(announcer.id): any); expect(el.getAttribute('aria-live')).toBe('assertive'); expect(el.getAttribute('role')).toBe('log'); expect(el.getAttribute('aria-atomic')).toBe('true'); // not checking all the styles - just enough to know we are doing something expect(el.style.overflow).toBe('hidden'); }); }); describe('unmounting', () => { it('should remove the element when unmounting', () => { const announcer: Announcer = createAnnouncer(); announcer.mount(); announcer.unmount(); const el: ?HTMLElement = document.getElementById(announcer.id); expect(el).not.toBeTruthy(); }); it('should throw if attempting to unmount before mounting', () => { const announcer: Announcer = createAnnouncer(); expect(() => announcer.unmount()).toThrow(); }); it('should throw if unmounting after an unmount', () => { const announcer: Announcer = createAnnouncer(); announcer.mount(); announcer.unmount(); expect(() => announcer.unmount()).toThrow(); }); }); describe('announcing', () => { it('should warn if not mounted', () => { jest.spyOn(console, 'warn').mockImplementation(() => {}); const announcer: Announcer = createAnnouncer(); announcer.announce('test'); expect(console.warn).toHaveBeenCalled(); console.warn.mockRestore(); }); it('should set the text content of the announcement element', () => { const announcer: Announcer = createAnnouncer(); announcer.mount(); const el: HTMLElement = (document.getElementById(announcer.id): any); announcer.announce('test'); expect(el.textContent).toBe('test'); }); });
# tks to https://github.com/pimoroni/bmp280-python from bmp280 import BMP280 try: from smbus2 import SMBus except ImportError: from smbus import SMBus class Bmp280: def __init__(self): self.bus = SMBus(1) self.bmp280 = BMP280(i2c_dev=self.bus) pass def get_temperature(self): return self.bmp280.get_temperature() def get_pressure(self): return self.bmp280.get_pressure() def get_altitude(self): baseline_values = [] baseline_size = 100 for i in range(baseline_size): pressure = self.bmp280.get_pressure() baseline_values.append(pressure) baseline = sum(baseline_values[:-25]) / len(baseline_values[:-25]) return self.bmp280.get_altitude(qnh=baseline) def get_data(self): return { "pressure": self.get_pressure(), "temperature": self.get_temperature(), "altitude": self.get_altitude() }
import React from "react" import PropTypes from "prop-types" import Helmet from "react-helmet" import { useStaticQuery, graphql } from "gatsby" import defaultImage from "../assets/Logo.svg" function SEO({ description, lang, meta, title, image, pagePath }) { const { site } = useStaticQuery( graphql` query { site { siteMetadata { title description author siteUrl } } } ` ) const metaDescription = description || site.siteMetadata.description let metaImage = defaultImage // if (image) { // metaImage = `${site.siteMetadata.siteUrl}${image}` // } // let metaImage let pageUrl if (pagePath) { pageUrl = `${site.siteMetadata.siteUrl}${pagePath}` } return ( <Helmet htmlAttributes={{ lang, }} title={title} titleTemplate={`%s | ${site.siteMetadata.title}`} meta={[ { name: `description`, content: metaDescription, }, { property: `og:image`, content: { metaImage }, }, { property: `og:title`, content: title, }, { property: `og:description`, content: metaDescription, }, { property: `og:type`, content: `website`, }, { property: `og:url`, content: pageUrl, }, { name: `twitter:card`, content: `summary_large_image`, }, { name: `twitter:creator`, content: site.siteMetadata.author, }, { name: `twitter:title`, content: title, }, { name: `twitter:description`, content: metaDescription, }, { name: `twitter:image:src`, content: { metaImage }, }, ].concat(meta)} /> ) } SEO.defaultProps = { lang: `en`, meta: [], description: ``, image: null, } SEO.propTypes = { description: PropTypes.string, lang: PropTypes.string, meta: PropTypes.arrayOf(PropTypes.object), title: PropTypes.string.isRequired, image: PropTypes.string, } export default SEO
# =============================================================================== # Copyright 2019 Jan Hendrickx and Gabriel Parrish # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== import os from datetime import datetime, timedelta # ============= standard library imports ======================== prism_directory = '/Users/dcadol/Desktop/academic_docs_II/calibration_approach/mini_model_inputs/' \ 'wjs_aoi/PRISM/precip/800m_std_all_original' first_day = 'precip_20000101.tif' output_directory = '/Users/dcadol/Desktop/academic_docs_II/calibration_approach/mini_model_inputs/' \ 'wjs_aoi/PRISM/precip/800m_std_all' for f in os.listdir(prism_directory): if f == first_day: print '*******' print 'do not do this one' print '*******' else: # chop up de stringy fname = f.split('_')[1] datestring = fname.split('.')[0] print '=====' print 'old datestirng {}'.format(datestring) # make into a datetime operation for ease of operations. f_dt = datetime.strptime(datestring, '%Y%m%d') # subtract a day new_dt = f_dt - timedelta(days=1) # new filename new_fname = 'precip_{}{:02d}{:02d}.tif'.format(new_dt.year, new_dt.month, new_dt.day) print 'new fname', new_fname os.rename(os.path.join(prism_directory, f), os.path.join(output_directory, new_fname))
from migrate.changeset import SQLA_10 """ Safe quoting method """ def safe_quote(obj): # this is the SQLA 0.9 approach if hasattr(obj, 'name') and hasattr(obj.name, 'quote'): return obj.name.quote else: return obj.quote def fk_column_names(constraint): if SQLA_10: return [ constraint.columns[key].name for key in constraint.column_keys] else: return [ element.parent.name for element in constraint.elements]
var express = require('express'); var app = express(); var session = require('express-session'); var cookieParser = require('cookie-parser'); var bodyParser = require('body-parser'); //Cookie and Session 的基础功能 app.use(cookieParser()); app.use(bodyParser.urlencoded({ extended: false, })); app.use(bodyParser.json()); const UUID = require('uuid'); app.use(session({ secret: UUID.v4(), name: 'IFM_SESSION', //1小时 cookie: { maxAge: 1000 * 60 * 60 }, resave: true, saveUninitialized: true, })); app.use('/public', express.static('./public')); const baseRoute = require('./controller/function'); const authRoute = require('./controller/auth'); app.use('/fs_auth', authRoute); //必须先进行登陆 app.use(['/fs', '/public'], function (req, res, next) { if (req.session.fsos) { next(); return true; } res.status(403).send('禁止访问:权限不足!'); }); app.use('/fs', baseRoute); const server = app.listen(3000, function () { // const host = server.address().address; const port = server.address().port; console.log(' - 演示项目运行'); console.log(' - 访问即可使用与体验: http://localhost:%s/fs_auth/auth/foo', port); });
""" Copyright (c) 2016-2020 Keith Sterling http://www.keithsterling.com Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from programy.utils.logging.ylogger import YLogger from programy.parser.template.nodes.indexed import TemplateIndexedNode ###################################################################################################################### # # <response index=”n”/> is replaced with the value of the nth previous multi-sentence bot response.. # The response element returns the bot’s response specified by its historical index value. # class TemplateResponseNode(TemplateIndexedNode): def __init__(self, index=1): TemplateIndexedNode.__init__(self, index) def resolve_to_string(self, client_context): int_index = int(self.index.resolve(client_context)) conversation = client_context.bot.get_conversation(client_context) question = conversation.previous_nth_question(int_index) resolved = question.combine_answers() YLogger.debug(client_context, "[%s] resolved to [%s]", self.to_string(), resolved) return resolved def to_string(self): string = "[RESPONSE" string += self.index.to_string() string += ']' return string def to_xml(self, client_context): xml = '<response index="' xml += self.index.to_xml(client_context) xml += '"></response>' return xml ####################################################################################################### # RESPONSE_EXPRESSION ::== <response( INDEX_ATTRIBUTE)/> | <response><index>TEMPLATE_EXPRESSION</index></response> def parse_expression(self, graph, expression): self._parse_node_with_attrib(graph, expression, "index", "1") if self.children: YLogger.warning(self, "<response> node should not contain child text, use <response /> " "or <response></response> only")
import typing import datetime from redbot.core import commands from copy import copy import asyncio # Credits: # The idea for this cog came from @Jack1142. This PR will take time, so I'm making it. If one day this one is integrated into Red, this cog may make it easier to manage. (https://github.com/Cog-Creators/Red-DiscordBot/pull/5419) # Thanks to @epic guy on Discord for the basic syntax (command groups, commands) and also commands (await ctx.send, await ctx.author.send, await ctx.message.delete())! # Thanks to the developers of the cogs I added features to as it taught me how to make a cog! (Chessgame by WildStriker, Captcha by Kreusada, Speak by Epic guy and Rommer by Dav) # Thanks to all the people who helped me with some commands in the #coding channel of the redbot support server! class Sudo(commands.Cog): """A cog to allow bot owners to be normal users in terms of permissions!""" def __init__(self, bot): self.bot = bot self.all_owner_ids = copy(self.bot.owner_ids) self.bot.owner_ids.clear() def cog_unload(self): self.bot.owner_ids.update(copy(self.all_owner_ids)) self.all_owner_ids.clear() def decorator(all_owner_ids: typing.Optional[bool], bot_owner_ids: typing.Optional[bool]): async def pred(ctx): if all_owner_ids: if ctx.author.id in ctx.bot.get_cog("Sudo").all_owner_ids: return True if not all_owner_ids: if bot_owner_ids: if ctx.author.id in ctx.bot.owner_ids: return True return False return commands.check(pred) @decorator(all_owner_ids=True, bot_owner_ids=False) @commands.command() async def su(self, ctx): """Sudo as the owner of the bot. """ ctx.bot.owner_ids.add(ctx.author.id) await ctx.tick() @decorator(all_owner_ids=False, bot_owner_ids=True) @commands.command() async def unsu(self, ctx): """Unsudo as normal user. """ ctx.bot.owner_ids.remove(ctx.author.id) await ctx.tick() @decorator(all_owner_ids=True, bot_owner_ids=False) @commands.command() async def sudo(self, ctx, *, command: str): """Rise as the bot owner for the specified command only. """ msg = ctx.message msg.content = f"{ctx.prefix}{command}" ctx.bot.owner_ids.add(ctx.author.id) new_ctx = await ctx.bot.get_context(msg) await ctx.bot.invoke(new_ctx) if ctx.bot.get_cog("Sudo") is not None: ctx.bot.owner_ids.remove(ctx.author.id) @decorator(all_owner_ids=True, bot_owner_ids=False) @commands.command() async def sutimeout( self, ctx, *, interval: commands.TimedeltaConverter( minimum=datetime.timedelta(seconds=10), maximum=datetime.timedelta(days=1), default_unit="minutes", ) = datetime.timedelta(minutes=5), ): """Sudo as the owner of the bot for the specified timeout. """ sleep = interval.total_seconds() ctx.bot.owner_ids.add(ctx.author.id) await asyncio.sleep(sleep) if ctx.bot.get_cog("Sudo") is not None: ctx.bot.owner_ids.remove(ctx.author.id) await ctx.tick()
function myMenus() { var x = document.getElementById("nav-links"); if (x.style.display === "block") { x.style.display = "none"; } else { x.style.display = "block"; } }
### Usage python this_program.py pdb.list ### def no_repeat(lista): unico = set() for m in lista: if m not in unico: unico.add(m) return unico #from libpydock.util.Table import Table #from numpy import array import numpy as np import os import sys import scipy import pylab import scipy.cluster.hierarchy as sch #import scipy #D = scipy.zeros([73,73]) #INPUT =[m.strip() for m in open(sys.argv[1]).readlines()] #print INPUT DIR = os.listdir('.') PDB_TO_READ = [x.strip() for x in open(sys.argv[1]).readlines()] #table = [m [ for n in DIR if os.path.isdir(n)] for m in DIR if os.path.isdir(m) ] #table = [m for m in DIR if os.path.isdir(m) if m in INPUT] table = [m for m in DIR if os.path.isdir(m) ] #table = [[m,n] for m in DIR if os.path.isdir(m) for n in DIR if os.path.isdir(n)] print len(table) print table #table2 = [[(table[i],table[j])] for i in range(len(table)) for j in range(len(table))] #table2 =[[]for i in range(len(table)) for j in range(len(table))] #table_simetric= [[] for j,n in enumerate(table) ] for i,m in enumerate(table) ] #table_unison = [[] for j,n in enumerate(table) ] for i,m in enumerate(table) ] table2= [[[] for j,n in enumerate(table) ] for i,m in enumerate(table) ] table_unison = scipy.zeros([len(table),len(table)]) table_simetric = scipy.zeros([len(table),len(table)]) #print table2 for i,m in enumerate(table): for j,n in enumerate(table): table2[i][j]=((m,n)) # print i,j #print table2 for i,m in enumerate(table2): for j,n in enumerate(m): # print n[0],n[1] PART_ONE, PART_TWO = set(),set() ONE = os.listdir(n[0]) TWO = os.listdir(n[1]) # print ind,jnd for archive in ONE: # print n[0],archive if '.ene' in archive: BM_CASE = archive.split('.')[0].split('_')[-1]####CP_BFKV_3SGQ.ene ENE1= open('%s/%s'%(n[0], archive)).readlines()[2:12] if BM_CASE in PDB_TO_READ : for lines in ENE1: CONFIG,CAPRI = lines.split()[0],lines.split()[-3] if CAPRI != 'incorr': # if BM_CASE not in PART_ONE : PART_ONE.add (BM_CASE) # break # else : # break for archive2 in TWO : if '.ene' in archive2: BM_CASE2 = archive2.split('.')[0].split('_')[-1]####CP_BFKV_3SGQ.ene ENE12= open('%s/%s'%(n[1], archive2)).readlines()[2:12] # print n[1],archive2 if BM_CASE2 in PDB_TO_READ : for lines in ENE12: CONFIG,CAPRI = lines.split()[0],lines.split()[-3] if CAPRI != 'incorr': # if BM_CASE2 not in PART_TWO : PART_TWO.add (BM_CASE2) # break # else: # break # print archive2 simetric_difference= no_repeat(PART_ONE) ^ no_repeat(PART_TWO) unison = no_repeat(PART_ONE) | no_repeat(PART_TWO) # simetric_difference= PART_ONE ^ PART_TWO # unison = PART_ONE & PART_TWO # table_simetric[i][j].append(len(simetric_difference)) # table_unison[i][j].append(len(unison)) table_simetric[i,j]= table_simetric[j,i]=len(simetric_difference) table_unison[i,j] = table_unison[j,i]=len(unison) #print table_simetric #print #print #print table_unison np.savetxt('table_simetric_all_sf_bm5_ftdock.txt', table_simetric, delimiter=',') #print table_simetric #print #print np.savetxt('table_unison_all_sf_bm5_ftdock.txt', table_unison, delimiter=',') for num,D in enumerate([table_unison,table_simetric]): #D = table_unison if num == 0: label= '_unison' if num ==1: label = '_simetric' # Compute and plot first dendrogram. print 'computing dendogram' fig = pylab.figure(figsize=(20,20)) #ax1 = fig.add_axes([0.09,0.1,0.2,0.6])### original ax1 = fig.add_axes([0.3,0.8,0.6,0.15]) ### modified , de este hay que modificar bottom ,height Y = sch.linkage(D, method='single') # Y = sch.linkage(D, method='centroid') #Z1 = sch.dendrogram(Y, orientation='right',labels=array(['CP_DECK', 'AP_dDFIRE', 'AP_PISA', 'CP_BFKV', 'CP_TB', 'pydock', 'CP_RMFCA', 'CP_HLPL', 'CP_TSC'])) Z1 = sch.dendrogram(Y)#,labels=array(['CP_DECK', 'AP_dDFIRE', 'AP_PISA', 'CP_BFKV', 'CP_TB', 'pydock', 'CP_RMFCA', 'CP_HLPL', 'CP_TSC'])) ax1.set_xticks([]) ax1.set_yticks([]) #ax1.set_xticks(range(len(table))) #ax1.set_yticks(range(len(table))) #ax1.set_xticklabels(['CP_DECK', 'AP_dDFIRE', 'AP_PISA', 'CP_BFKV', 'CP_TB', 'pydock', 'CP_RMFCA', 'CP_HLPL', 'CP_TSC']) #ax1.set_yticklabels(['CP_DECK', 'AP_dDFIRE', 'AP_PISA', 'CP_BFKV', 'CP_TB', 'pydock', 'CP_RMFCA', 'CP_HLPL', 'CP_TSC']) # ### Compute and plot second dendrogram. #qx2 = fig.add_axes([0.3,0.71,0.6,0.2]) #ax2 = fig.add_axes([0.09,0.05,0.2,0.3]) ##[left, bottom, width, height] # ax2 = fig.add_axes([0.065,0.1,0.14,0.6]) ### de este hay que modificar letf , width # Y = sch.linkage(D, method='single') # Z2 = sch.dendrogram(Y,orientation='right')#, labels=array(['CP_DECK', 'AP_dDFIRE', 'AP_PISA', 'CP_BFKV', 'CP_TB', 'pydock', 'CP_RMFCA', 'CP_HLPL', 'CP_TSC'])) #Z2 = sch.dendrogram(Y, labels=array(['CP_DECK', 'AP_dDFIRE', 'AP_PISA', 'CP_BFKV', 'CP_TB', 'pydock', 'CP_RMFCA', 'CP_HLPL', 'CP_TSC'])) # ax2.set_xticks([]) # ax2.set_yticks([]) #ax2.set_xticklabels(['CP_DECK', 'AP_dDFIRE', 'AP_PISA', 'CP_BFKV', 'CP_TB', 'pydock', 'CP_RMFCA', 'CP_HLPL', 'CP_TSC']) #ax2.set_yticklabels(['CP_DECK', 'AP_dDFIRE', 'AP_PISA', 'CP_BFKV', 'CP_TB', 'pydock', 'CP_RMFCA', 'CP_HLPL', 'CP_TSC']) # ### Plot distance matrix. axmatrix = fig.add_axes([0.3,0.1,0.6,0.6]) idx1 = Z1['leaves'] # idx2 = Z2['leaves'] D = D[idx1,:] # D = D[:,idx2] D = D[:,idx1] ##im = axmatrix.matshow(D, aspect='auto', origin='lower', cmap=pylab.cm.YlGnBu) im = axmatrix.matshow(D, aspect='auto', origin='lower') axmatrix.set_xticks([]) axmatrix.set_yticks([]) # ##print idx1 ##print label1 ## Plot axes labels label1 = [table[m] for m in idx1 ] # label2 = [table[m] for m in idx2 ] axmatrix.set_xticks(range(len(table))) axmatrix.set_xticklabels(label1, minor=False) axmatrix.xaxis.set_label_position('top') axmatrix.set_yticks(range(len(table))) axmatrix.set_yticklabels(label1, minor=False) # axmatrix.set_yticklabels(label2, minor=False) axmatrix.yaxis.set_label_position('left') #axmatrix.yaxis.tick_right() #axmatrix.xaxis.tick_bottom() # pylab.xticks(rotation=-90,fontsize=10) pylab.yticks(fontsize=10) # #label2 = [table[m] for m in idx2 ] # #axmatrix.set_yticks(range(len(table))) #axmatrix.set_yticklabels(label2, minor=False) #axmatrix.yaxis.set_label_position('left') #axmatrix.yaxis.tick_right() # #axcolor = fig.add_axes([0.94,0.1,0.02,0.6]) # # ## Plot colorbar. axcolor = fig.add_axes([0.92,0.1,0.02,0.6]) pylab.colorbar(im, cax=axcolor) ##fig.show() fig.savefig('dendrogram_FTdock_all_bm5%s.png'%label)
// The Vue build version to load with the `import` command // (runtime-only or standalone) has been set in webpack.base.conf with an alias. import Vue from 'vue' import Vuex from 'vuex' import VueI18n from 'vue-i18n' import fullscreen from 'vue-fullscreen' import VueClipboard from 'vue-clipboard2' import 'element-ui/lib/theme-chalk/index.css' import EditorPage from './EditorPage' import router from './editor.router' import { editorModule, userModule, gitlabModule, createPersistedState } from '@/store' import ElementUI from 'element-ui' import { broadcast } from 'vuex-iframe-sync' import { messages as i18nMessages, locale } from '@/lib/utils/i18n' import handleMessage from '@/lib/iframe' window.addEventListener('message', handleMessage) Vue.use(fullscreen) Vue.use(VueClipboard) Vue.config.productionTip = false Vue.use(Vuex) Vue.use(VueI18n) const i18n = new VueI18n({ locale, messages: i18nMessages }) Vue.use(ElementUI, { i18n: (key, value) => i18n.t(key, value) }) const store = new Vuex.Store({ modules: { user: userModule, gitlab: gitlabModule, editor: editorModule }, plugins: [ createPersistedState({ paths: ['user', 'editor'] // , 'gitlab'] }), broadcast('frameViewport') ] }) /* eslint-disable no-new */ new Vue({ el: '#editor', router, store, i18n, components: { EditorPage }, template: '<EditorPage/>' })
// [VexFlow](http://vexflow.com) - Copyright (c) Mohit Muthanna 2010. // @author Mohit Cheppudira // @author Greg Ristow (modifications) // // ## Description // // This file implements accidentals as modifiers that can be attached to // notes. Support is included for both western and microtonal accidentals. // // See `tests/accidental_tests.js` for usage examples. import { Vex } from './vex'; import { Fraction } from './fraction'; import { Flow } from './tables'; import { Music } from './music'; import { Modifier } from './modifier'; import { Glyph } from './glyph'; // To enable logging for this class. Set `Vex.Flow.Accidental.DEBUG` to `true`. function L(...args) { if (Accidental.DEBUG) Vex.L('Vex.Flow.Accidental', args); } const getGlyphWidth = glyph => glyph.getMetrics().width; // An `Accidental` inherits from `Modifier`, and is formatted within a // `ModifierContext`. export class Accidental extends Modifier { static get CATEGORY() { return 'accidentals'; } // Arrange accidentals inside a ModifierContext. static format(accidentals, state) { const noteheadAccidentalPadding = 1; const leftShift = state.left_shift + noteheadAccidentalPadding; const accidentalSpacing = 3; // If there are no accidentals, we needn't format their positions if (!accidentals || accidentals.length === 0) return; const accList = []; let prevNote = null; let shiftL = 0; // First determine the accidentals' Y positions from the note.keys let propsTemp; for (let i = 0; i < accidentals.length; ++i) { const acc = accidentals[i]; const note = acc.getNote(); const stave = note.getStave(); const props = note.getKeyProps()[acc.getIndex()]; if (note !== prevNote) { // Iterate through all notes to get the displaced pixels for (let n = 0; n < note.keys.length; ++n) { propsTemp = note.getKeyProps()[n]; shiftL = propsTemp.displaced ? note.getExtraLeftPx() : shiftL; } prevNote = note; } if (stave !== null) { const lineSpace = stave.options.spacing_between_lines_px; const y = stave.getYForLine(props.line); const accLine = Math.round(y / lineSpace * 2) / 2; accList.push({ y, line: accLine, shift: shiftL, acc, lineSpace }); } else { accList.push({ line: props.line, shift: shiftL, acc }); } } // Sort accidentals by line number. accList.sort((a, b) => b.line - a.line); // FIXME: Confusing name. Each object in this array has a property called `line`. // So if this is a list of lines, you end up with: `line.line` which is very awkward. const lineList = []; // amount by which all accidentals must be shifted right or left for // stem flipping, notehead shifting concerns. let accShift = 0; let previousLine = null; // Create an array of unique line numbers (lineList) from accList for (let i = 0; i < accList.length; i++) { const acc = accList[i]; // if this is the first line, or a new line, add a lineList if (previousLine === null || previousLine !== acc.line) { lineList.push({ line: acc.line, flatLine: true, dblSharpLine: true, numAcc: 0, width: 0, }); } // if this accidental is not a flat, the accidental needs 3.0 lines lower // clearance instead of 2.5 lines for b or bb. // FIXME: Naming could use work. acc.acc is very awkward if (acc.acc.type !== 'b' && acc.acc.type !== 'bb') { lineList[lineList.length - 1].flatLine = false; } // if this accidental is not a double sharp, the accidental needs 3.0 lines above if (acc.acc.type !== '##') { lineList[lineList.length - 1].dblSharpLine = false; } // Track how many accidentals are on this line: lineList[lineList.length - 1].numAcc++; // Track the total x_offset needed for this line which will be needed // for formatting lines w/ multiple accidentals: // width = accidental width + universal spacing between accidentals lineList[lineList.length - 1].width += acc.acc.getWidth() + accidentalSpacing; // if this accShift is larger, use it to keep first column accidentals in the same line accShift = acc.shift > accShift ? acc.shift : accShift; previousLine = acc.line; } // ### Place Accidentals in Columns // // Default to a classic triangular layout (middle accidental farthest left), // but follow exceptions as outlined in G. Read's _Music Notation_ and // Elaine Gould's _Behind Bars_. // // Additionally, this implements different vertical collision rules for // flats (only need 2.5 lines clearance below) and double sharps (only // need 2.5 lines of clearance above or below). // // Classic layouts and exception patterns are found in the 'tables.js' // in 'Vex.Flow.accidentalColumnsTable' // // Beyond 6 vertical accidentals, default to the parallel ascending lines approach, // using as few columns as possible for the verticle structure. // // TODO (?): Allow column to be specified for an accidental at run-time? let totalColumns = 0; // establish the boundaries for a group of notes with clashing accidentals: for (let i = 0; i < lineList.length; i++) { let noFurtherConflicts = false; const groupStart = i; let groupEnd = i; while (groupEnd + 1 < lineList.length && !noFurtherConflicts) { // if this note conflicts with the next: if (this.checkCollision(lineList[groupEnd], lineList[groupEnd + 1])) { // include the next note in the group: groupEnd++; } else { noFurtherConflicts = true; } } // Gets an a line from the `lineList`, relative to the current group const getGroupLine = (index) => lineList[groupStart + index]; const getGroupLines = (indexes) => indexes.map(getGroupLine); const lineDifference = (indexA, indexB) => { const [a, b] = getGroupLines([indexA, indexB]).map(item => item.line); return a - b; }; const notColliding = (...indexPairs) => indexPairs .map(getGroupLines) .every(lines => !this.checkCollision(...lines)); // Set columns for the lines in this group: const groupLength = groupEnd - groupStart + 1; // Set the accidental column for each line of the group let endCase = this.checkCollision(lineList[groupStart], lineList[groupEnd]) ? 'a' : 'b'; switch (groupLength) { case 3: if (endCase === 'a' && lineDifference(1, 2) === 0.5 && lineDifference(0, 1) !== 0.5) { endCase = 'second_on_bottom'; } break; case 4: if (notColliding([0, 2], [1, 3])) { endCase = 'spaced_out_tetrachord'; } break; case 5: if (endCase === 'b' && notColliding([1, 3])) { endCase = 'spaced_out_pentachord'; if (notColliding([0, 2], [2, 4])) { endCase = 'very_spaced_out_pentachord'; } } break; case 6: if (notColliding([0, 3], [1, 4], [2, 5])) { endCase = 'spaced_out_hexachord'; } if (notColliding([0, 2], [2, 4], [1, 3], [3, 5])) { endCase = 'very_spaced_out_hexachord'; } break; default: break; } let groupMember; let column; // If the group contains more than seven members, use ascending parallel lines // of accidentals, using as few columns as possible while avoiding collisions. if (groupLength >= 7) { // First, determine how many columns to use: let patternLength = 2; let collisionDetected = true; while (collisionDetected === true) { collisionDetected = false; for (let line = 0; line + patternLength < lineList.length; line++) { if (this.checkCollision(lineList[line], lineList[line + patternLength])) { collisionDetected = true; patternLength++; break; } } } // Then, assign a column to each line of accidentals for (groupMember = i; groupMember <= groupEnd; groupMember++) { column = ((groupMember - i) % patternLength) + 1; lineList[groupMember].column = column; totalColumns = (totalColumns > column) ? totalColumns : column; } // Otherwise, if the group contains fewer than seven members, use the layouts from // the accidentalsColumnsTable housed in tables.js. } else { for (groupMember = i; groupMember <= groupEnd; groupMember++) { column = Flow.accidentalColumnsTable[groupLength][endCase][groupMember - i]; lineList[groupMember].column = column; totalColumns = (totalColumns > column) ? totalColumns : column; } } // Increment i to the last note that was set, so that if a lower set of notes // does not conflict at all with this group, it can have its own classic shape. i = groupEnd; } // ### Convert Columns to x_offsets // // This keeps columns aligned, even if they have different accidentals within them // which sometimes results in a larger x_offset than is an accidental might need // to preserve the symmetry of the accidental shape. // // Neither A.C. Vinci nor G. Read address this, and it typically only happens in // music with complex chord clusters. // // TODO (?): Optionally allow closer compression of accidentals, instead of forcing // parallel columns. // track each column's max width, which will be used as initial shift of later columns: const columnWidths = []; const columnXOffsets = []; for (let i = 0; i <= totalColumns; i++) { columnWidths[i] = 0; columnXOffsets[i] = 0; } columnWidths[0] = accShift + leftShift; columnXOffsets[0] = accShift + leftShift; // Fill columnWidths with widest needed x-space; // this is what keeps the columns parallel. lineList.forEach(line => { if (line.width > columnWidths[line.column]) columnWidths[line.column] = line.width; }); for (let i = 1; i < columnWidths.length; i++) { // this column's offset = this column's width + previous column's offset columnXOffsets[i] = columnWidths[i] + columnXOffsets[i - 1]; } const totalShift = columnXOffsets[columnXOffsets.length - 1]; // Set the xShift for each accidental according to column offsets: let accCount = 0; lineList.forEach(line => { let lineWidth = 0; const lastAccOnLine = accCount + line.numAcc; // handle all of the accidentals on a given line: for (accCount; accCount < lastAccOnLine; accCount++) { const xShift = (columnXOffsets[line.column - 1] + lineWidth); accList[accCount].acc.setXShift(xShift); // keep track of the width of accidentals we've added so far, so that when // we loop, we add space for them. lineWidth += accList[accCount].acc.getWidth() + accidentalSpacing; L('Line, accCount, shift: ', line.line, accCount, xShift); } }); // update the overall layout with the full width of the accidental shapes: state.left_shift += totalShift; } // Helper function to determine whether two lines of accidentals collide vertically static checkCollision(line1, line2) { let clearance = line2.line - line1.line; let clearanceRequired = 3; // But less clearance is required for certain accidentals: b, bb and ##. if (clearance > 0) { // then line 2 is on top clearanceRequired = (line2.flatLine || line2.dblSharpLine) ? 2.5 : 3.0; if (line1.dblSharpLine) clearance -= 0.5; } else { // line 1 is on top clearanceRequired = (line1.flatLine || line1.dblSharpLine) ? 2.5 : 3.0; if (line2.dblSharpLine) clearance -= 0.5; } const collision = Math.abs(clearance) < clearanceRequired; L('Line_1, Line_2, Collision: ', line1.line, line2.line, collision); return collision; } // Use this method to automatically apply accidentals to a set of `voices`. // The accidentals will be remembered between all the voices provided. // Optionally, you can also provide an initial `keySignature`. static applyAccidentals(voices, keySignature) { const tickPositions = []; const tickNoteMap = {}; // Sort the tickables in each voice by their tick position in the voice voices.forEach(voice => { const tickPosition = new Fraction(0, 1); const notes = voice.getTickables(); notes.forEach(note => { if (note.shouldIgnoreTicks()) return; const notesAtPosition = tickNoteMap[tickPosition.value()]; if (!notesAtPosition) { tickPositions.push(tickPosition.value()); tickNoteMap[tickPosition.value()] = [note]; } else { notesAtPosition.push(note); } tickPosition.add(note.getTicks()); }); }); const music = new Music(); // Default key signature is C major if (!keySignature) keySignature = 'C'; // Get the scale map, which represents the current state of each pitch const scaleMap = music.createScaleMap(keySignature); tickPositions.forEach(tick => { const notes = tickNoteMap[tick]; // Array to store all pitches that modified accidental states // at this tick position const modifiedPitches = []; const processNote = (note) => { if (note.isRest() || note.shouldIgnoreTicks()) return; // Go through each key and determine if an accidental should be // applied note.keys.forEach((keyString, keyIndex) => { const key = music.getNoteParts(keyString.split('/')[0]); // Force a natural for every key without an accidental const accidentalString = key.accidental || 'n'; const pitch = key.root + accidentalString; // Determine if the current pitch has the same accidental // as the scale state const sameAccidental = scaleMap[key.root] === pitch; // Determine if an identical pitch in the chord already // modified the accidental state const previouslyModified = modifiedPitches.indexOf(pitch) > -1; // Add the accidental to the StaveNote if (!sameAccidental || (sameAccidental && previouslyModified)) { // Modify the scale map so that the root pitch has an // updated state scaleMap[key.root] = pitch; // Create the accidental const accidental = new Accidental(accidentalString); // Attach the accidental to the StaveNote note.addAccidental(keyIndex, accidental); // Add the pitch to list of pitches that modified accidentals modifiedPitches.push(pitch); } }); // process grace notes note.getModifiers().forEach(modifier => { if (modifier.getCategory() === 'gracenotegroups') { modifier.getGraceNotes().forEach(processNote); } }); }; notes.forEach(processNote); }); } // Create accidental. `type` can be a value from the // `Vex.Flow.accidentalCodes.accidentals` table in `tables.js`. For // example: `#`, `##`, `b`, `n`, etc. constructor(type = null) { super(); this.setAttribute('type', 'Accidental'); L('New accidental: ', type); this.note = null; // The `index` points to a specific note in a chord. this.index = null; this.type = type; this.position = Modifier.Position.LEFT; this.render_options = { // Font size for glyphs font_scale: 38, // Length of stroke across heads above or below the stave. stroke_px: 3, // Padding between accidental and parentheses on each side parenLeftPadding: 2, parenRightPadding: 2, }; this.accidental = Flow.accidentalCodes(this.type); if (!this.accidental) { throw new Vex.RERR('ArgumentError', `Unknown accidental type: ${type}`); } // Cautionary accidentals have parentheses around them this.cautionary = false; this.parenLeft = null; this.parenRight = null; this.reset(); } reset() { const fontScale = this.render_options.font_scale; this.glyph = new Glyph(this.accidental.code, fontScale); this.glyph.setOriginX(1.0); if (this.cautionary) { this.parenLeft = new Glyph(Flow.accidentalCodes('{').code, fontScale); this.parenRight = new Glyph(Flow.accidentalCodes('}').code, fontScale); this.parenLeft.setOriginX(1.0); this.parenRight.setOriginX(1.0); } } getCategory() { return Accidental.CATEGORY; } getWidth() { const parenWidth = this.cautionary ? ( getGlyphWidth(this.parenLeft) + getGlyphWidth(this.parenRight) + this.render_options.parenLeftPadding + this.render_options.parenRightPadding ) : 0; return getGlyphWidth(this.glyph) + parenWidth; } // Attach this accidental to `note`, which must be a `StaveNote`. setNote(note) { if (!note) { throw new Vex.RERR('ArgumentError', `Bad note value: ${note}`); } this.note = note; // Accidentals attached to grace notes are rendered smaller. if (this.note.getCategory() === 'gracenotes') { this.render_options.font_scale = 25; this.reset(); } } // If called, draws parenthesis around accidental. setAsCautionary() { this.cautionary = true; this.render_options.font_scale = 28; this.reset(); return this; } // Render accidental onto canvas. draw() { const { context, type, position, note, index, cautionary, x_shift, y_shift, glyph, parenLeft, parenRight, render_options: { parenLeftPadding, parenRightPadding }, } = this; this.checkContext(); if (!(note && (index != null))) { throw new Vex.RERR('NoAttachedNote', "Can't draw accidental without a note and index."); } // Figure out the start `x` and `y` coordinates for note and index. const start = note.getModifierStartXY(position, index); let accX = start.x + x_shift; const accY = start.y + y_shift; L('Rendering: ', type, accX, accY); if (!cautionary) { glyph.render(context, accX, accY); } else { // Render the accidental in parentheses. parenRight.render(context, accX, accY); accX -= getGlyphWidth(parenRight); accX -= parenRightPadding; accX -= this.accidental.parenRightPaddingAdjustment; glyph.render(context, accX, accY); accX -= getGlyphWidth(glyph); accX -= parenLeftPadding; parenLeft.render(context, accX, accY); } this.setRendered(); } }
from time import sleep valores = [] for cont in range(1, 6): #Ler vários valores pelo teclado e colocar na lista valores.append(int(input(f'Digite o {cont} valor: '))) print('Vamos organizar os valores!') valores.sort() #Organiza os valores em órdem crescente sleep(2) #Gracinha que fiz pra ficar mais legal print('Os valores são: ') for c, val in enumerate(valores): print(f'Na posição {c} temos {val}') print('A lista acabou!')
from src.utils.helper_synonym import get_synonym_all def test_get_synonym_all(terms_synonyms): assert get_synonym_all(terms=terms_synonyms.keys()) == terms_synonyms
import React, { Component } from 'react'; import { connect } from 'react-redux'; import { selectBook } from '../actions/index'; import { bindActionCreators } from 'redux'; class BookList extends Component { renderList() { return this.props.books.map((book) => { return ( <li key={book.title} onClick={() => this.props.selectBook(book)} className="list-group-item"> {book.title} </li> ); }); } render() { return ( <ul className="list-group col-sm-4"> {this.renderList()} </ul> ); } } function mapStateToProps(state) { return { books: state.books }; } function mapDispatchToProps(dispatch) { return bindActionCreators({ selectBook: selectBook }, dispatch); } export default connect(mapStateToProps, mapDispatchToProps)(BookList);
#ifndef VIX_LOGMANAGER_H #define VIX_LOGMANAGER_H #include <vix_platform.h> #include <vix_singleton.h> namespace Vixen { class VIX_API LogManager : public Singleton <LogManager> { friend class Singleton <LogManager>; public: }; extern LogManager& g_LogManager; } #endif // !VIX_LOGMANAGER_H
# -*- coding: utf-8 -*- """ This module implements the class StandardNetwork. StandardNetwork creates a ring network as defined in Santhakumar et al. 2005 with some changes as in Yim et al. 2015. See StandardNetwork docstring for details. Created on Tue Nov 28 13:01:38 2017 @author: DanielM """ from neuron import h, gui import ouropy import matplotlib.pyplot as plt import numpy as np from granulecell import GranuleCell from mossycell_cat import MossyCell from basketcell import BasketCell from hippcell import HippCell class TunedNetwork(ouropy.gennetwork.GenNetwork): """ This model implements the ring model from Santhakumar et al. 2005. with some changes as in Yim et al. 2015. It features inhibition but omits the MC->GC connection. """ name = "TunedNetwork" def __init__(self, seed=None, temporal_patterns=np.array([]), spatial_patterns_gcs=np.array([]), spatial_patterns_bcs=np.array([])): self.init_params = locals() self.init_params['self'] = str(self.init_params['self']) # Setup cells self.mk_population(GranuleCell, 2000) self.mk_population(MossyCell, 60) self.mk_population(BasketCell, 24) self.mk_population(HippCell, 24) # Set seed for reproducibility if seed: self.set_numpy_seed(seed) # Setup recordings self.populations[0].record_aps() self.populations[1].record_aps() self.populations[2].record_aps() self.populations[3].record_aps() temporal_patterns = np.array(temporal_patterns) print(np.shape(temporal_patterns)) #temporal_patterns = np.atleast_2d(temporal_patterns) if type(spatial_patterns_gcs) == np.ndarray and type(temporal_patterns) == np.ndarray: #spatial_patterns_gcs = np.atleast_2d(spatial_patterns_gcs) for pat in range(len(spatial_patterns_gcs)): # PP -> GC #Original ouropy.gennetwork.PerforantPathPoissonTmgsyn(self.populations[0], temporal_patterns[pat], spatial_patterns_gcs[pat], 'midd', 10, 0, 1, 0, 0, 1*10**(-3)) if type(spatial_patterns_bcs) == np.ndarray and type(temporal_patterns) == np.ndarray: #spatial_patterns_bcs = np.atleast_2d(spatial_patterns_bcs) for pat in range(len(spatial_patterns_bcs)): # PP -> BC ouropy.gennetwork.PerforantPathPoissonTmgsyn(self.populations[2], temporal_patterns[pat], spatial_patterns_bcs[pat], 'ddend', 6.3, 0, 1, 0, 0, 1*10**(-3)) # GC -> MC ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[0], self.populations[1], 6, 'proxd', 1, 7.6, 500, 0.1, 0, 0, 10, 1.5, 0.2*10**(-2) * 10) # GC -> BC #Weight x4, target_pool = 2 ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[0], self.populations[2], 4, 'proxd', 1, 8.7, 500, 0.1, 0, 0, 10, 0.8, 2.5*10**(-2)) # GC -> HC # Divergence x4; Weight doubled; Connected randomly. ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[0], self.populations[3], 24, 'proxd', 1, 8.7, 500, 0.1, 0, 0, 10, 1.5, 2.5*10**(-2)) # MC -> MC ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[1], self. populations[1], 24, 'proxd', 3, 2.2, 0, 1, 0, 0, 10, 2, 0.5*10**(-3)) # MC -> BC ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[1], self.populations[2], 6, 'proxd', 1, 2, 0, 1, 0, 0, 10, 3, 0.3*10**(-3)) # MC -> HC ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[1], self.populations[3], 10, 'midd', 2, 6.2, 0, 1, 0, 0, 10, 3, 0.2*10**(-3)) # BC -> GC # Nr. synapses x3; Weight *1/4; changed from 5.5 to 20 (Hefft & Jonas, 2005) ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[2], self.populations[0], 2000, 'soma', 400, 20, 0, 1, 0, -70, 10, 0.85, 1.2*10**(-3)) # We reseed here to make sure that those connections are consistent # between this and net_global. The only connection that differs between # net_tuned and net_global will be the BC -> GC connection. if seed: self.set_numpy_seed(seed+1) # BC -> MC ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[2], self.populations[1], 14, 'proxd', 3, 3.3, 0, 1, 0, -70, 10, 1.5, 1.5*10**(-3)) # BC -> BC ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[2], self.populations[2], 6,'proxd', 2, 1.8, 0,1,0,-70, 10, 0.8, 7.6*10**(-3)) # HC -> GC # Weight x10; Nr synapses x4; changed from 6 to 20 (Hefft & Jonas, 2005) ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[3], self.populations[0], 2000, 'dd', 640, 20, 0, 1, 0, -70, 10, 3.8, 0.6*10**(-2)) # HC -> MC ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[3], self.populations[1], 30, ['mid1d', 'mid2d'], 4, 6, 0, 1, 0, -70, 10, 1, 1.5*10**(-3)) # HC -> BC ouropy.gennetwork.tmgsynConnectionExponentialProb(self.populations[3], self.populations[2], 12, 'ddend', 4, 5.8, 0, 1, 0, -70, 10, 1.6, 0.5*10**(-3))
#!/usr/bin/env python import scipy.io import numpy as np import argparse, sys from ppi import irefindex as iref from ppi import parsers from ppi import string_db as sdb import stratipy.filtering_diffusion as diffuse import scipy.sparse as sp import sys import os.path import pickle def parse_gene_list_file(fh): rv = [] for line in fh: line = line.rstrip() rv.append(line) return rv def check_or_set_fh(arg, default_base): """Convenience function for setting default file handle """ rv = None if(arg is None): path = os.path.join(os.curdir, default_base) if(os.path.exists(path)): sys.stderr.write("Refusing to overwrite {}\n".format(path)) exit(2) else: rv = open(path, "w") else: rv = arg return rv def main(): # TODO currently only does smoothing step parser = argparse.ArgumentParser(description="Run network based stratification") parser.add_argument("ppi_db", type=str, help="irefindex database file") parser.add_argument("gene_lists", nargs="+", help="one or more files with an HGNC identifier on each line") parser.add_argument("--alpha", "-a", type=float, default=0.7, help="Diffusion rate parameter") parser.add_argument("--tolerance", "-t", type=float, default=10e-6, help="Tolerance threshold for diffusion; stop when change in diffused matrix crosses below threshold") parser.add_argument("--threshold", type=float, default=0.0), parser.add_argument("--ids", type=argparse.FileType("w"), help="File to write association between an index in the smoothed data matrix and a gene identifier") parser.add_argument("--mat-file", "-m", nargs=1, type=argparse.FileType("w"), help=".mat file to write 'smoothed_mat' and 'laplacian' matrices to; if provided, laplacian and diffused are ignored") parser.add_argument("--laplacian", "-K", type=argparse.FileType("w"), help="File to write graph Laplacian to") parser.add_argument("--diffused", "-d", type=argparse.FileType("w"), help="Diffused matrix") args = parser.parse_args() # set default output files but dont overwrite if a default is used (overwrite if file is explicitly mentioned) args.ids = check_or_set_fh(args.ids, "diffused_ids.txt") do_mat = False if(args.mat_file is None): args.laplacian = check_or_set_fh(args.laplacian, "laplacian.csv") args.diffused = check_or_set_fh(args.diffused, "smoothed_mat.csv") else: do_mat = True # (1) compute smoothed matrix gene_lists = [] for gene_path in args.gene_lists: with open(gene_path) as fh: gene_lists.append(parse_gene_list_file(fh)) fh = open(args.ppi_db) db_type = parsers.detect_db(fh) G_ppi = None if(db_type == 'string'): # TODO apply threshold only for laplacian? G_ppi = sdb.parse_string_fh(fh, threshold=args.threshold) elif(db_type == 'irefindex'): # TODO threshold G_ppi = iref.get_alt_id_ppi(fh) # defaults to node IDs as HGNC symbols # else error adj, ids = iref.get_adj_mat(G_ppi) fh.close() # bind first argument of get_row_vec in closure def get_row_vec_for_gene_list(gene_list): row_vec, missing = iref.get_row_vec_alt(ids, gene_list) sys.stderr.write("missing {}/{} HGNC identifiers: {}\n".format(len(missing), len(gene_list), ", ".join(missing))) return row_vec row_vecs = map(get_row_vec_for_gene_list, gene_lists) mat = sp.vstack(row_vecs) sys.stderr.write("mat.shape: {}\n".format(mat.shape)) sys.stderr.write("adj.shape: {}\n".format(adj.shape)) smoothed_mat = diffuse.propagation(mat, adj, alpha=args.alpha, tol=args.tolerance) # (2) compute laplacian # TODO use a "influence distance" "kNN" graph instead of full graph laplacian laplacian, ids_lap = iref.get_laplacian(G_ppi) assert ids == ids_lap, "ids != ids_lap: smoothed matrix and Laplacian matrix do not agree on gene identities" # (3) write output files for id in ids: args.ids.write("{}\n".format(id)) # take care in writing the laplacian; it is a n_gene x n_gene matrix (so approx 20k x 20k) # which is reasonably large; writing it as a .mat file is one option; another choice # is the MatrixMarket format which can write sparse matrices if(do_mat): # NOTE must verify that type is float because MatLab will interpret the result # differently (incorrectly) if not scipy.io.savemat(args.mat_file[0], { "smoothed_mat": smoothed_mat.astype(float), "laplacian": laplacian.astype(float) }) else: scipy.io.mmwrite(args.laplacian, laplacian) scipy.io.mmwrite(args.diffused, smoothed_mat) if __name__ == '__main__': main()
# -*- coding: utf-8 -*- # Generated by Django 1.9.7 on 2017-02-23 22:12 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tournament', '0145_auto_20170211_1825'), ] operations = [ migrations.AddField( model_name='league', name='description', field=models.TextField(blank=True), ), ]
var async = require("async"); /******************* Name: fetchScreen Description: Load selected screen object *******************/ exports.fetchScreen = function (req, res, next) { var Screen = require('../models/screen'); Screen.find(req.params.id, function(err, screen){ if( err){ console.log("Error retrieving screen. Error: %s", err); req.session.screen = null; next(); }else if( screen === null){ console.log("Screen not found") req.session.messages = { errors: ['screen not found'] }; req.session.screen = null; res.redirect('/'); next(); }else{ var Widget = require('../models/widget'); var screenWidgets = []; async.each( screen.widgets, function( wid, done){ console.log("Searching widget: ", wid); Widget.find(wid, function(err, widget){ if(err){ util.log( util.format("Error retrieving widget Error '%s'", err) ); done(err); }else{ //console.log("Found widget", widget); if( widget!== null) screenWidgets.push(widget); done(); } }); }, function(err){ if(err) req.session.screen = null else{ req.session.screen = screen; req.session.screen.widgets = screenWidgets; } next(); }); } });; }; /******************* Name: all Description: Fetch all screens from database *******************/ exports.all = function(req, res, next){ var Screen = require('../models/screen.js'); var screenList = []; Screen.all( function(err, list){ if(!err){ screenList = list; ; } req.session.screens = screenList; next(); }); }
#include <stdio.h> #include <unistd.h> #include <string.h> #include <stdlib.h> #include <sys/wait.h> #include <signal.h> int hijos; char *message = "procesando...\t"; void Usage() { printf("Usage:signals10 ejecutable [arg2..argn]\n"); printf("Este programa crea tantos procesos como argumentos recibe menos 1 que luego mutan al programa especificado\n"); } void error_y_exit(char *s, int error) { perror(s); exit(error); } void print_child(int pid, int status) { if (WIFEXITED(status)) { // Ha terminado por culpa de un exit int exitcode = WEXITSTATUS(status); printf("El proceso %d termina con exit code %d\n", pid, exitcode); } else { // Ha terminado por un signal int signalcode = WTERMSIG(status); printf("El proceso %d termina con signal code %d\n", pid, signalcode); } } void ras(int s) { if (s == SIGCHLD) { int res; int status; while ((res = waitpid(-1, &status, WNOHANG)) > 0) { print_child(res, status); --hijos; } } if (s == SIGALRM) { printf("%s", message); alarm(1); } if (s == SIGHUP) { if (message == "procesando...\t") message = "processant... \t"; else message = "procesando...\t"; } } int main(int argc, char *argv[]) { if (argc >= 2) { char *programa = argv[1]; argv++; hijos = argc - 1; sigset_t mask; struct sigaction sa; // Block signals sigemptyset(&mask); sigaddset(&mask, SIGCHLD); sigaddset(&mask, SIGALRM); sigaddset(&mask, SIGHUP); sigprocmask(SIG_BLOCK, &mask, NULL); // Reprogram SIGCHLD sa.sa_handler = ras; sa.sa_flags = SA_RESTART; if (sigaction(SIGCHLD, &sa, NULL) < 0) error_y_exit("Error en el sigaction", 1); if (sigaction(SIGALRM, &sa, NULL) < 0) error_y_exit("Error en el sigaction", 1); if (sigaction(SIGHUP, &sa, NULL) < 0) error_y_exit("Error en el sigaction", 1); for (int i = 1; i < argc; ++i) { int pid = fork(); if (pid == 0) { // Como el proceso muta, se pierde la reprogramación de los signals execvp(programa, argv); error_y_exit("Error en el execvp", 1); } if (pid < 0) { error_y_exit("Error en el fork", 1); } } sigfillset(&mask); sigdelset(&mask, SIGCHLD); sigdelset(&mask, SIGALRM); sigdelset(&mask, SIGHUP); sigdelset(&mask, SIGINT); alarm(1); while (hijos > 0) { if (sigsuspend(&mask) != -1) error_y_exit("Error en el sigsuspend", 1); } } else { Usage(); } }
import React, { Fragment } from "react"; import { BrowserRouter, Route, Switch, withRouter } from "react-router-dom"; import Header from "./components/common/header"; import Footer from "./components/common/footer"; import Home from "./components/home"; import SinglePost from "./components/post/single"; import AboutPage from "./components/about"; import ContactPage from "./components/contact"; import Signup from "./components/auth/signup"; import Posts from "./components/post"; import Categories from "./components/category"; ////// import PageNotFound from "./components/404"; import PostsCategory from "./components/post/postsByCat"; import PostsSearch from "./components/postsBySearch"; import Profile from "./components/profile"; import './components/common/assets/Fontawesome-all.css'; import './components/common/assets/themify-icons.css'; import './components/common/assets/linearicons.css'; import './App.scss'; import './_responsive.scss'; import PostsUser from './components/profile/post'; const Main = withRouter(({ location }) => { return ( <React.Fragment> {location.pathname !== "/signup" && <Header />} <Switch> <Route exact path="/" component={Home} /> <Route exact path="/posts"> <Posts /> </Route> <Route exact path="/single/:id"> <SinglePost /> </Route> <Route exact path="/categories"> <Categories /> </Route> <Route exact path="/posts/category/:id" component={PostsCategory} /> <Route exact path="/posts/:title/:category" component={PostsSearch} /> <Route exact path="/about" component={AboutPage} /> <Route exact path="/contact" component={ContactPage} /> <Route exact path="/signup" component={Signup} /> <Route exact path="/postsuser" component={PostsUser} /> <Route exact path="/profile" component={Profile} /> <Route component={PageNotFound} /> </Switch> {location.pathname !== "/signup" && <Footer />} </React.Fragment> ); }); function App() { return ( <BrowserRouter> <Main /> </BrowserRouter> ); } export default App;
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from ._enums import * __all__ = ['ApiIssue'] class ApiIssue(pulumi.CustomResource): def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, api_id: Optional[pulumi.Input[str]] = None, created_date: Optional[pulumi.Input[str]] = None, description: Optional[pulumi.Input[str]] = None, issue_id: Optional[pulumi.Input[str]] = None, resource_group_name: Optional[pulumi.Input[str]] = None, service_name: Optional[pulumi.Input[str]] = None, state: Optional[pulumi.Input[Union[str, 'State']]] = None, title: Optional[pulumi.Input[str]] = None, user_id: Optional[pulumi.Input[str]] = None, __props__=None, __name__=None, __opts__=None): """ Issue Contract details. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[str] api_id: A resource identifier for the API the issue was created for. :param pulumi.Input[str] created_date: Date and time when the issue was created. :param pulumi.Input[str] description: Text describing the issue. :param pulumi.Input[str] issue_id: Issue identifier. Must be unique in the current API Management service instance. :param pulumi.Input[str] resource_group_name: The name of the resource group. :param pulumi.Input[str] service_name: The name of the API Management service. :param pulumi.Input[Union[str, 'State']] state: Status of the issue. :param pulumi.Input[str] title: The issue title. :param pulumi.Input[str] user_id: A resource identifier for the user created the issue. """ if __name__ is not None: warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = dict() if api_id is None and not opts.urn: raise TypeError("Missing required property 'api_id'") __props__['api_id'] = api_id __props__['created_date'] = created_date if description is None and not opts.urn: raise TypeError("Missing required property 'description'") __props__['description'] = description __props__['issue_id'] = issue_id if resource_group_name is None and not opts.urn: raise TypeError("Missing required property 'resource_group_name'") __props__['resource_group_name'] = resource_group_name if service_name is None and not opts.urn: raise TypeError("Missing required property 'service_name'") __props__['service_name'] = service_name __props__['state'] = state if title is None and not opts.urn: raise TypeError("Missing required property 'title'") __props__['title'] = title if user_id is None and not opts.urn: raise TypeError("Missing required property 'user_id'") __props__['user_id'] = user_id __props__['name'] = None __props__['type'] = None alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:apimanagement/v20190101:ApiIssue"), pulumi.Alias(type_="azure-native:apimanagement:ApiIssue"), pulumi.Alias(type_="azure-nextgen:apimanagement:ApiIssue"), pulumi.Alias(type_="azure-native:apimanagement/latest:ApiIssue"), pulumi.Alias(type_="azure-nextgen:apimanagement/latest:ApiIssue"), pulumi.Alias(type_="azure-native:apimanagement/v20170301:ApiIssue"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20170301:ApiIssue"), pulumi.Alias(type_="azure-native:apimanagement/v20180101:ApiIssue"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180101:ApiIssue"), pulumi.Alias(type_="azure-native:apimanagement/v20180601preview:ApiIssue"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20180601preview:ApiIssue"), pulumi.Alias(type_="azure-native:apimanagement/v20191201:ApiIssue"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201:ApiIssue"), pulumi.Alias(type_="azure-native:apimanagement/v20191201preview:ApiIssue"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20191201preview:ApiIssue"), pulumi.Alias(type_="azure-native:apimanagement/v20200601preview:ApiIssue"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20200601preview:ApiIssue"), pulumi.Alias(type_="azure-native:apimanagement/v20201201:ApiIssue"), pulumi.Alias(type_="azure-nextgen:apimanagement/v20201201:ApiIssue")]) opts = pulumi.ResourceOptions.merge(opts, alias_opts) super(ApiIssue, __self__).__init__( 'azure-native:apimanagement/v20190101:ApiIssue', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'ApiIssue': """ Get an existing ApiIssue resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() __props__["api_id"] = None __props__["created_date"] = None __props__["description"] = None __props__["name"] = None __props__["state"] = None __props__["title"] = None __props__["type"] = None __props__["user_id"] = None return ApiIssue(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="apiId") def api_id(self) -> pulumi.Output[Optional[str]]: """ A resource identifier for the API the issue was created for. """ return pulumi.get(self, "api_id") @property @pulumi.getter(name="createdDate") def created_date(self) -> pulumi.Output[Optional[str]]: """ Date and time when the issue was created. """ return pulumi.get(self, "created_date") @property @pulumi.getter def description(self) -> pulumi.Output[str]: """ Text describing the issue. """ return pulumi.get(self, "description") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ Resource name. """ return pulumi.get(self, "name") @property @pulumi.getter def state(self) -> pulumi.Output[Optional[str]]: """ Status of the issue. """ return pulumi.get(self, "state") @property @pulumi.getter def title(self) -> pulumi.Output[str]: """ The issue title. """ return pulumi.get(self, "title") @property @pulumi.getter def type(self) -> pulumi.Output[str]: """ Resource type for API Management resource. """ return pulumi.get(self, "type") @property @pulumi.getter(name="userId") def user_id(self) -> pulumi.Output[str]: """ A resource identifier for the user created the issue. """ return pulumi.get(self, "user_id") def translate_output_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
import axios from 'axios'; export const GameAPIScope = 'api://05acec15-d6fb-4dae-a9b3-5886a7709df9/user_impersonation'; export default function(accessToken) { return axios.create({ baseURL: 'https://nursehack-gamificationapi.azurewebsites.net/api', headers: { common:{ 'content-type':'application/json', }, 'Authorization': `Bearer ${accessToken}` } }); }
//集团架构的增删改管理 var ORG_OrganizationManager = function(){ //初始化相关事件 var initEvent = function(){ //新建架构主管事件 $("#addNewOrgLeader").click(function(){ var leaderDiv = $("#addOrgLeader"); if(leaderDiv.is(":visible")){ leaderDiv.hide(); }else{ leaderDiv.show(); } }); //选择省份事件 $("#regionProvId").on("change", function(){ var regionId = $(this).val(); if(regionId==''){ return; } $.ajax({ type : "post", dataType : "json", url : "queryRegionByParentId.html", data : {parentRegionId : regionId}, success : function(data){ var result = ""; result += "<option value=''>请选择城市</option>"; $.each(data, function(index, value){ var editRegionCityId = $("#editRegionCityId").val(); if(editRegionCityId == value.id){ result += "<option value=\"" + value.id + "\" selected=\"selected\">" + value.name + "</option>"; } else { result += "<option value=\"" + value.id + "\">" + value.name + "</option>"; } }); $("#regionCityId").empty().append(result).change(); $("#regionAreaId").empty(); } }) }); //选择城市事件 $("#regionCityId").on("change", function(){ var regionId = $(this).val(); if(regionId==''){ return; } $.ajax({ type : "post", dataType : "json", url : "queryRegionByParentId.html", data : {parentRegionId : regionId}, success : function(data){ var result = ""; result += "<option value=''>请选择区域</option>"; $.each(data, function(index, value){ var editRegionAreaId = $("#editRegionAreaId").val(); if(editRegionAreaId == value.id){ result += "<option value=\"" + value.id + "\" selected=\"selected\">" + value.name + "</option>"; } else { result += "<option value=\"" + value.id + "\">" + value.name + "</option>"; } }); $("#regionAreaId").empty().append(result); } }) }); //选择架构级别类型事件 $("#levelType").on("change", function(){ var levelType = $(this).val(); if(levelType==''){ return; } $.ajax({ type : "post", dataType : "json", url : "queryLevelByType.html", data : {levelType : levelType}, success : function(data){ var result = ""; result += "<option value=''>请选择职位</option>"; $.each(data, function(index, value){ if(levelId == value.levelId){ result += "<option value=\"" + value.levelId + "\" selected=\"selected\">" + value.levelName + "</option>"; } else { result += "<option value=\"" + value.levelId + "\">" + value.levelName + "</option>"; } $("#levelId").empty().append(result); }); } }); }); //保存架构 $("#addOrgBut").on("click", function(){ var orgForm = $("#orgForm"); if(!orgForm.valid()){ return; } var requestUrl = "addOrg.html"; //判断是新增还是修改 if($("#editOrgId").val() != ""){ requestUrl = "modifyOrg.html"; } $.ajax({ type : "post", dataType : "json", url : requestUrl, data : orgForm.serialize(), success : function(data){ if(data.code==0){ $("#backListPage").click(); } } }); }); var options = { errorElement: 'span', errorClass: 'help-block', focusInvalid: false, ignore: "", rules: { orgName: { minlength: 2, maxlength: 24, required: true }, parentOrgName: { minlength: 2, maxlength: 24, required: true }, regionProvId: { required: true } }, highlight: function (element) { $(element).closest('.form-group').addClass('has-error'); }, unhighlight: function (element) { $(element).closest('.form-group').removeClass('has-error'); }, success: function (label) { label.closest('.form-group').removeClass('has-error'); } } //验证增加角色表单 $("#orgForm").validate(options); $("#regionProvId").change(); }; return { init : function (){ initEvent(); } }; }();
#!/usr/bin/env python3 import json import logging import os import re import subprocess import sys import time from io import BytesIO import pycurl import RPi.GPIO as GPIO from config import * # Logging fun root = logging.getLogger() root.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.stdout) handler.setLevel(logging.INFO) formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) root.addHandler(handler) # Setup GPIO communication GPIO.setmode(GPIO.BCM) GPIO.setup(GPIO_BCM_PIN, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Gotta use pycurl instead of requests to control source interface def curl(url, iface=None): c = pycurl.Curl() buffer = BytesIO() c.setopt(pycurl.URL, url) c.setopt(pycurl.TIMEOUT, 5) c.setopt(pycurl.WRITEFUNCTION, buffer.write) if iface: c.setopt(pycurl.INTERFACE, iface) c.perform() code = c.getinfo(pycurl.HTTP_CODE) try: resp = json.loads(buffer.getvalue().decode('UTF-8')) except json.decoder.JSONDecodeError: resp = None buffer.close() c.close() return (code, resp) class GoProManager(object): def __init__(self): self.gopros = [] self.recording = False def add_gopro(self, iface, ssid, wifi_mac, bt_mac): gp = GoPro(iface, ssid, wifi_mac, bt_mac) self.gopros.append(gp) def change_route(self, iface): subprocess.call('sudo ip route replace 10.5.5.9 dev {} proto dhcp scope link metric 0'.format(iface), shell=True) def start_monitor(self): i = 0 while True: triggered = not bool(GPIO.input(GPIO_BCM_PIN)) or os.path.exists(TRIGGER_PATH) if triggered != self.recording: for gp in self.gopros: self.change_route(gp.iface) if self.recording: logging.info("Stopping capture on {}.".format(gp.ssid)) gp.stop_capture() else: logging.info("Starting capture on {}.".format(gp.ssid)) gp.start_capture() self.recording = not self.recording time.sleep(1) i = i + 1 # Check to ensure status every so often, doing this won't let cameras sleep if CHECK_STATUS is not None and i >= CHECK_STATUS: for gp in self.gopros: self.change_route(gp.iface) if gp.is_capturing() != self.recording: logging.warning("{} recording status does not match desired state.".format(gp.ssid)) if self.recording: logging.info("Starting capture on {}.".format(gp.ssid)) gp.start_capture() else: logging.info("Stopping capture on {}.".format(gp.ssid)) gp.stop_capture() i = 0 class GoPro(object): def __init__(self, iface, ssid, wifi_mac, bt_mac): self.iface = iface self.ssid = ssid self.wifi_mac = wifi_mac self.bt_mac = bt_mac def gatttool_write(self, command): return subprocess.call("sudo gatttool -t random -b {bt} --char-write-req -a 0x33 -n {val}; \ sudo gatttool -t random -b {bt} --char-write-req -a 0x2f -n {val}".format(bt=self.bt_mac, val=command), shell=True, timeout=10) def power_on(self): if self.is_wifi_connected(): for attempt in range(1, 3): try: wake_on_lan = subprocess.call("sudo wakeonlan -p 9 -i 10.5.5.9 {}".format(self.wifi_mac), shell=True) # logging.debug("Wake-on-lan sent to {}".format(self.ssid)) r, data = curl("http://10.5.5.9/gp/gpControl/command/system/locate?p=0", iface=self.iface) except: r = 0 if r == 200: return True time.sleep(2) else: logging.info("Wifi is not connected to {}. Sending wifi enable over Bluetooth LE.".format(self.ssid)) try: wifi_on = self.gatttool_write('03170101') logging.debug("Wifi enable over Bluetooth LE returned {}".format(wifi_on)) except TimeoutExpired: logging.warn("{} unreachable over Bluetooth LE. Is camera in deep sleep?") wifi_on = None for attempt in range(1, 10): logging.info("Waiting for wifi to associate to {}".format(self.ssid)) time.sleep(1) if self.is_wifi_connected(): break wake_on_lan = subprocess.call("sudo wakeonlan -p 9 -i 10.5.5.9 {}".format(self.wifi_mac), shell=True) logging.debug("Wake-on-lan sent to {}".format(self.ssid)) for attempt in range(1, 3): logging.debug("Waiting for response from GoPro over HTTP") try: r, data = curl("http://10.5.5.9/gp/gpControl/command/system/locate?p=0", iface=self.iface) except: r = 0 if r == 200: return True time.sleep(1) def is_wifi_connected(self): out = subprocess.check_output('iw dev {} link'.format(self.iface), shell=True).split(b"\n") for line in out: if re.match('^Connected to {}'.format(self.wifi_mac.lower()).encode(), line) is not None: return True return False def ensure_connection(self): for attempt in range(1, 5): logging.debug('Attempt number {} to connect to {}'.format(attempt, self.ssid)) try: if self.power_on(): return True except: pass return False def power_off(self): if self.ensure_connection(): r, data = curl("http://10.5.5.9/gp/gpControl/command/system/sleep", iface=self.iface) return r def is_capturing(self): if self.ensure_connection(): try: r, data = curl("http://10.5.5.9/gp/gpControl/status", iface=self.iface) except: return None # Card error, cycle camera if data['status'].get('33') == 3: logging.warning("{} has card error condition. Shutting it down.".format(self.ssid)) self.power_off() return False # True if recording return data['status'].get('8') == 1 def start_capture(self): if self.ensure_connection(): r, data = curl("http://10.5.5.9/gp/gpControl/command/shutter?p=1", iface=self.iface) return r def stop_capture(self): if self.ensure_connection(): r, data = curl("http://10.5.5.9/gp/gpControl/command/shutter?p=0", iface=self.iface) return r if __name__ == '__main__': gpm = GoProManager() for gp in GOPROS: gpm.add_gopro(gp[0], gp[1], gp[2], gp[3]) logging.info("Starting to monitor trigger...") gpm.start_monitor()
from plaid.api.api import API class Balance(API): '''Accounts balance endpoint.''' def get(self, access_token, _options={}, account_ids=None): ''' Retrieve real-time balance information for accounts. :param str access_token: :param [str] account_ids: A list of account_ids to retrieve for the item. Optional. ''' options = {} options.update(_options) if account_ids is not None: options['account_ids'] = account_ids return self.client.post('/accounts/balance/get', { 'access_token': access_token, 'options': options, }) class Accounts(API): ''' Accounts endpoints. (`HTTP docs <https://plaid.com/docs/api/#accounts>`__) .. autoclass:: plaid.api.accounts.Balance :members: ''' def __init__(self, client): super(Accounts, self).__init__(client) self.balance = Balance(client) def get(self, access_token, _options={}, account_ids=None): ''' Retrieve high-level account information for an Item. :param str access_token: :param [str] account_ids: A list of account_ids to retrieve for the item. Optional. ''' options = {} options.update(_options) if account_ids is not None: options['account_ids'] = account_ids return self.client.post('/accounts/get', { 'access_token': access_token, 'options': options, })
# Code is generated: DO NOT EDIT # Copyright 2019 Machine Zone, Inc. All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. from kubespec import context from kubespec import types from kubespec.k8s import base from kubespec.k8s.batch import v1 as batchv1 from typeguard import check_type, typechecked from typing import Any, Dict, Optional # ConcurrencyPolicy describes how the job will be handled. # Only one of the following concurrent policies may be specified. # If none of the following policies is specified, the default one # is AllowConcurrent. ConcurrencyPolicy = base.Enum( "ConcurrencyPolicy", { # Allow allows CronJobs to run concurrently. "Allow": "Allow", # Forbid forbids concurrent runs, skipping next run if previous # hasn't finished yet. "Forbid": "Forbid", # Replace cancels currently running job and replaces it with a new one. "Replace": "Replace", }, ) class JobTemplateSpec(base.NamespacedMetadataObject): """ JobTemplateSpec describes the data a Job should have when created from a template """ @context.scoped @typechecked def __init__( self, namespace: str = None, name: str = None, labels: Dict[str, str] = None, annotations: Dict[str, str] = None, spec: "batchv1.JobSpec" = None, ): super().__init__( **({"namespace": namespace} if namespace is not None else {}), **({"name": name} if name is not None else {}), **({"labels": labels} if labels is not None else {}), **({"annotations": annotations} if annotations is not None else {}), ) self.__spec = spec if spec is not None else batchv1.JobSpec() @typechecked def _root(self) -> Dict[str, Any]: v = super()._root() spec = self.spec() check_type("spec", spec, Optional["batchv1.JobSpec"]) v["spec"] = spec return v def spec(self) -> Optional["batchv1.JobSpec"]: """ Specification of the desired behavior of the job. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status """ return self.__spec class CronJobSpec(types.Object): """ CronJobSpec describes how the job execution will look like and when it will actually run. """ @context.scoped @typechecked def __init__( self, schedule: str = "", starting_deadline_seconds: int = None, concurrency_policy: ConcurrencyPolicy = ConcurrencyPolicy["Allow"], suspend: bool = None, job_template: "JobTemplateSpec" = None, successful_jobs_history_limit: int = None, failed_jobs_history_limit: int = None, ): super().__init__() self.__schedule = schedule self.__starting_deadline_seconds = starting_deadline_seconds self.__concurrency_policy = concurrency_policy self.__suspend = suspend self.__job_template = ( job_template if job_template is not None else JobTemplateSpec() ) self.__successful_jobs_history_limit = ( successful_jobs_history_limit if successful_jobs_history_limit is not None else 3 ) self.__failed_jobs_history_limit = ( failed_jobs_history_limit if failed_jobs_history_limit is not None else 1 ) @typechecked def _root(self) -> Dict[str, Any]: v = super()._root() schedule = self.schedule() check_type("schedule", schedule, str) v["schedule"] = schedule starting_deadline_seconds = self.starting_deadline_seconds() check_type( "starting_deadline_seconds", starting_deadline_seconds, Optional[int] ) if starting_deadline_seconds is not None: # omit empty v["startingDeadlineSeconds"] = starting_deadline_seconds concurrency_policy = self.concurrency_policy() check_type( "concurrency_policy", concurrency_policy, Optional[ConcurrencyPolicy] ) if concurrency_policy: # omit empty v["concurrencyPolicy"] = concurrency_policy suspend = self.suspend() check_type("suspend", suspend, Optional[bool]) if suspend is not None: # omit empty v["suspend"] = suspend job_template = self.job_template() check_type("job_template", job_template, "JobTemplateSpec") v["jobTemplate"] = job_template successful_jobs_history_limit = self.successful_jobs_history_limit() check_type( "successful_jobs_history_limit", successful_jobs_history_limit, Optional[int], ) if successful_jobs_history_limit is not None: # omit empty v["successfulJobsHistoryLimit"] = successful_jobs_history_limit failed_jobs_history_limit = self.failed_jobs_history_limit() check_type( "failed_jobs_history_limit", failed_jobs_history_limit, Optional[int] ) if failed_jobs_history_limit is not None: # omit empty v["failedJobsHistoryLimit"] = failed_jobs_history_limit return v def schedule(self) -> str: """ The schedule in Cron format, see https://en.wikipedia.org/wiki/Cron. """ return self.__schedule def starting_deadline_seconds(self) -> Optional[int]: """ Optional deadline in seconds for starting the job if it misses scheduled time for any reason. Missed jobs executions will be counted as failed ones. """ return self.__starting_deadline_seconds def concurrency_policy(self) -> Optional[ConcurrencyPolicy]: """ Specifies how to treat concurrent executions of a Job. Valid values are: - "Allow" (default): allows CronJobs to run concurrently; - "Forbid": forbids concurrent runs, skipping next run if previous run hasn't finished yet; - "Replace": cancels currently running job and replaces it with a new one """ return self.__concurrency_policy def suspend(self) -> Optional[bool]: """ This flag tells the controller to suspend subsequent executions, it does not apply to already started executions. Defaults to false. """ return self.__suspend def job_template(self) -> "JobTemplateSpec": """ Specifies the job that will be created when executing a CronJob. """ return self.__job_template def successful_jobs_history_limit(self) -> Optional[int]: """ The number of successful finished jobs to retain. This is a pointer to distinguish between explicit zero and not specified. Defaults to 3. """ return self.__successful_jobs_history_limit def failed_jobs_history_limit(self) -> Optional[int]: """ The number of failed finished jobs to retain. This is a pointer to distinguish between explicit zero and not specified. Defaults to 1. """ return self.__failed_jobs_history_limit class CronJob(base.TypedObject, base.NamespacedMetadataObject): """ CronJob represents the configuration of a single cron job. """ @context.scoped @typechecked def __init__( self, namespace: str = None, name: str = None, labels: Dict[str, str] = None, annotations: Dict[str, str] = None, spec: "CronJobSpec" = None, ): super().__init__( api_version="batch/v1beta1", kind="CronJob", **({"namespace": namespace} if namespace is not None else {}), **({"name": name} if name is not None else {}), **({"labels": labels} if labels is not None else {}), **({"annotations": annotations} if annotations is not None else {}), ) self.__spec = spec if spec is not None else CronJobSpec() @typechecked def _root(self) -> Dict[str, Any]: v = super()._root() spec = self.spec() check_type("spec", spec, Optional["CronJobSpec"]) v["spec"] = spec return v def spec(self) -> Optional["CronJobSpec"]: """ Specification of the desired behavior of a cron job, including the schedule. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status """ return self.__spec class JobTemplate(base.TypedObject, base.NamespacedMetadataObject): """ JobTemplate describes a template for creating copies of a predefined pod. """ @context.scoped @typechecked def __init__( self, namespace: str = None, name: str = None, labels: Dict[str, str] = None, annotations: Dict[str, str] = None, template: "JobTemplateSpec" = None, ): super().__init__( api_version="batch/v1beta1", kind="JobTemplate", **({"namespace": namespace} if namespace is not None else {}), **({"name": name} if name is not None else {}), **({"labels": labels} if labels is not None else {}), **({"annotations": annotations} if annotations is not None else {}), ) self.__template = template if template is not None else JobTemplateSpec() @typechecked def _root(self) -> Dict[str, Any]: v = super()._root() template = self.template() check_type("template", template, Optional["JobTemplateSpec"]) v["template"] = template return v def template(self) -> Optional["JobTemplateSpec"]: """ Defines jobs that will be created from this template. https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status """ return self.__template
var FileLoggerAdapter = require('../src/Adapters/Logger/FileLoggerAdapter').FileLoggerAdapter; var Parse = require('parse/node').Parse; var request = require('request'); var fs = require('fs'); var LOGS_FOLDER = './test_logs/'; var deleteFolderRecursive = function(path) { if( fs.existsSync(path) ) { fs.readdirSync(path).forEach(function(file,index){ var curPath = path + "/" + file; if(fs.lstatSync(curPath).isDirectory()) { // recurse deleteFolderRecursive(curPath); } else { // delete file fs.unlinkSync(curPath); } }); fs.rmdirSync(path); } }; describe('info logs', () => { afterEach((done) => { deleteFolderRecursive(LOGS_FOLDER); done(); }); it("Verify INFO logs", (done) => { var fileLoggerAdapter = new FileLoggerAdapter({ logsFolder: LOGS_FOLDER }); fileLoggerAdapter.info('testing info logs', () => { fileLoggerAdapter.query({ size: 1, level: 'info' }, (results) => { if(results.length == 0) { fail('The adapter should return non-empty results'); done(); } else { expect(results[0].message).toEqual('testing info logs'); done(); } }); }); }); }); describe('error logs', () => { afterEach((done) => { deleteFolderRecursive(LOGS_FOLDER); done(); }); it("Verify ERROR logs", (done) => { var fileLoggerAdapter = new FileLoggerAdapter(); fileLoggerAdapter.error('testing error logs', () => { fileLoggerAdapter.query({ size: 1, level: 'error' }, (results) => { if(results.length == 0) { fail('The adapter should return non-empty results'); done(); } else { expect(results[0].message).toEqual('testing error logs'); done(); } }); }); }); });
######################################### ## Written by steven.feltner@spot.io ## Script to update the desiredCount (# of tasks) for all services have less running than desired. ######################################### ### Parameters ### cluster = '' region = '' desiredCount = 0 # AWS Profile Name (Optional) profile_name = '' ################### import boto3 from botocore.exceptions import ProfileNotFound from botocore.exceptions import ClientError try: session = boto3.session.Session(profile_name=profile_name) client = session.client('ecs', region_name=region) except ProfileNotFound as e: print(e) try: print("Trying without profile...") client = boto3.client('ecs', region_name=region) except ClientError as e: print(e) try: services = client.list_services(cluster=cluster, maxResults=100) except ClientError as e: print(e) print('---------------------') service_names = [] for i in services['serviceArns']: service_names.append(i.split('/', 1)[1]) for j in service_names: # if(j.startswith('sfm')): response = client.describe_services(cluster=cluster, services=[j]) # print(response) current_desiredCount = response['services'][0]['desiredCount'] current_runningCount = response['services'][0]['runningCount'] ServiceName = response['services'][0]['serviceName'] if current_runningCount < current_desiredCount: print("Services that have more desired than running will now be scaled down...") print("ServiceName: " + ServiceName) print("desiredCount = " + str(current_desiredCount) + " runningCount = " + str(current_runningCount)) client.update_service(cluster=cluster, service=j, desiredCount=desiredCount) print("Updated service: " + j + " to a desired count of: " + str(desiredCount))
const logger = require('loglevel'); const defaultHost = require('./host'); const validateOptions = require('./validateOptions'); const { getDefaultLineTransformers } = require('./transformers'); const { convertLegacyThemeOption } = require('./themeUtils'); const { processExtensions } = require('./processExtension'); /** * @param {PluginOptions} options * @param {GatsbyCache} cache * @returns {Promise<PluginOptions>} */ async function setup( { theme = 'Default Dark+', colorTheme: legacyTheme, wrapperClassName = '', languageAliases = {}, extensions = [], getLineClassName = () => '', injectStyles = true, replaceColor = x => x, logLevel = 'warn', host = defaultHost, getLineTransformers = getDefaultLineTransformers, ...rest }, cache ) { logger.setLevel(logLevel); if (legacyTheme) { theme = convertLegacyThemeOption(legacyTheme); } validateOptions({ theme, colorTheme: legacyTheme, wrapperClassName, languageAliases, extensions, getLineClassName, injectStyles, replaceColor, logLevel, host, getLineTransformers }); await processExtensions(extensions, host, cache); return { theme, colorTheme: legacyTheme, wrapperClassName, languageAliases, extensions, getLineClassName, injectStyles, replaceColor, logLevel, host, getLineTransformers, ...rest }; } module.exports = setup;
import tkinter as tk from lib.app import Application if __name__ == "__main__": root = tk.Tk() root.title("SCARA Motion Control System") app = Application(root) root.eval("tk::PlaceWindow . center") root.protocol("WM_DELETE_WINDOW", app.on_close) app.mainloop()
import numpy as np import pytest import scipy.sparse import tensorflow as tf import torch from scipy.sparse import csr_matrix from jina import DocumentArray, Document, DocumentArrayMemmap from tests import random_docs rand_array = np.random.random([10, 3]) def da_and_dam(): rand_docs = random_docs(100) da = DocumentArray() da.extend(rand_docs) rand_docs = random_docs(100) dam = DocumentArrayMemmap() dam.extend(rand_docs) return da, dam @pytest.mark.parametrize( 'array', [ rand_array, torch.Tensor(rand_array), tf.constant(rand_array), csr_matrix(rand_array), ], ) def test_set_embeddings_multi_kind(array): da = DocumentArray([Document() for _ in range(10)]) da.embeddings = array @pytest.mark.parametrize('da', da_and_dam()) def test_da_get_embeddings(da): np.testing.assert_almost_equal(da.get_attributes('embedding'), da.embeddings) @pytest.mark.parametrize('da', da_and_dam()) def test_embeddings_setter_da(da): emb = np.random.random((100, 128)) da.embeddings = emb np.testing.assert_almost_equal(da.embeddings, emb) for x, doc in zip(emb, da): np.testing.assert_almost_equal(x, doc.embedding) da.embeddings = None if hasattr(da, 'flush'): da.flush() assert not da.embeddings @pytest.mark.parametrize('da', da_and_dam()) def test_embeddings_wrong_len(da): embeddings = np.ones((2, 10)) with pytest.raises(ValueError): da.embeddings = embeddings @pytest.mark.parametrize('da', da_and_dam()) def test_blobs_getter_da(da): blobs = np.random.random((100, 10, 10)) da.blobs = blobs assert len(da) == 100 np.testing.assert_almost_equal(da.get_attributes('blob'), da.blobs) np.testing.assert_almost_equal(da.blobs, blobs) da.blobs = None if hasattr(da, 'flush'): da.flush() assert not da.blobs @pytest.mark.parametrize('da', da_and_dam()) def test_texts_getter_da(da): assert len(da.texts) == 100 assert da.texts == da.get_attributes('text') texts = ['text' for _ in range(100)] da.texts = texts assert da.texts == texts for x, doc in zip(texts, da): assert x == doc.text da.texts = None if hasattr(da, 'flush'): da.flush() # unfortunately protobuf does not distinguish None and '' on string # so non-set str field in Pb is '' assert da.texts == [''] * 100 @pytest.mark.parametrize('da', da_and_dam()) def test_texts_wrong_len(da): texts = ['hello'] with pytest.raises(ValueError): da.texts = texts @pytest.mark.parametrize('da', da_and_dam()) def test_blobs_wrong_len(da): blobs = np.ones((2, 10, 10)) with pytest.raises(ValueError): da.blobs = blobs @pytest.mark.parametrize('da', da_and_dam()) def test_buffers_getter_setter(da): with pytest.raises(ValueError): da.buffers = [b'cc', b'bb', b'aa', b'dd'] da.buffers = [b'aa'] * len(da) assert da.buffers == [b'aa'] * len(da) da.buffers = None if hasattr(da, 'flush'): da.flush() # unfortunately protobuf does not distinguish None and '' on string # so non-set str field in Pb is '' assert da.buffers == [b''] * 100 def test_zero_embeddings(): a = np.zeros([10, 6]) da = DocumentArray.empty(10) # all zero, dense da.embeddings = a np.testing.assert_almost_equal(da.embeddings, a) for d in da: assert d.embedding.shape == (6,) # all zero, sparse sp_a = scipy.sparse.coo_matrix(a) da.embeddings = sp_a np.testing.assert_almost_equal(da.embeddings.todense(), sp_a.todense()) for d in da: # scipy sparse row-vector can only be a (1, m) not squeezible assert d.embedding.shape == (1, 6) # near zero, sparse a = np.random.random([10, 6]) a[a > 0.1] = 0 sp_a = scipy.sparse.coo_matrix(a) da.embeddings = sp_a np.testing.assert_almost_equal(da.embeddings.todense(), sp_a.todense()) for d in da: # scipy sparse row-vector can only be a (1, m) not squeezible assert d.embedding.shape == (1, 6)
const Express = require("express"); const App = Express(); const port = 80; App.use("/", Express.static("public")); App.get("/api/:number", (req, res) => { let result = {"error": "Not found!"}; if(req.params.number == 5) { result = {"secret": "You got the secret!"}; } res.json(result); }); App.listen(port, () => { console.log("Server Running!"); });
/** * meraki * * This file was automatically generated for meraki by APIMATIC v2.0 ( https://apimatic.io ). */ 'use strict'; const BaseModel = require('./BaseModel'); /** * Creates an instance of PortRuleModel */ class PortRuleModel extends BaseModel { /** * @constructor * @param {Object} obj The object passed to constructor */ constructor(obj) { super(obj); if (obj === undefined || obj === null) return; this.name = this.constructor.getValue(obj.name); this.protocol = this.constructor.getValue(obj.protocol); this.publicPort = this.constructor.getValue(obj.publicPort); this.localIp = this.constructor.getValue(obj.localIp); this.localPort = this.constructor.getValue(obj.localPort); this.allowedIps = this.constructor.getValue(obj.allowedIps); } /** * Function containing information about the fields of this model * @return {array} Array of objects containing information about the fields */ static mappingInfo() { return super.mappingInfo().concat([ { name: 'name', realName: 'name' }, { name: 'protocol', realName: 'protocol' }, { name: 'publicPort', realName: 'publicPort' }, { name: 'localIp', realName: 'localIp' }, { name: 'localPort', realName: 'localPort' }, { name: 'allowedIps', realName: 'allowedIps', array: true }, ]); } /** * Function containing information about discriminator values * mapped with their corresponding model class names * * @return {object} Object containing Key-Value pairs mapping discriminator * values with their corresponding model classes */ static discriminatorMap() { return {}; } } module.exports = PortRuleModel;
from typing import List, Optional from holidaycal.holiday import AbstractHoliday, LondonBankHolidays, NYBankHolidays class AbstractCalendar: """ Abstract object to create a calendar with a list of holiday rules. """ rules: List[AbstractHoliday] = [] def __init__(self, name: Optional[str] = None, rules: Optional[List[AbstractHoliday]] = None): """Base calendar object. Initializes a calendar with holidays. Normally the class defines the list of holiday rules. Args: name: Name of the calendar, defaults to class name rules: Holiday or list of Holiday objects """ super().__init__() if name is None: name = type(self).__name__ self.name = name if rules is not None: self.rules = rules def holidays(self, start_date, end_date, names=False, observed=False): """Returns the holidays between start_date and end_date. Returns the holidays between start_date and end_date, inclusive, optionally with holiday names. If observed is True, adjusts holidays to observed weekday dates. Args: start_date (datetime-like): Starting date end_date (datetime-like): Ending date names (bool): If True, return observed (bool): Observed holidays, default True Returns: list: List of dates or (date, holiday name) """ if len(self.rules) == 0: raise ValueError('Calendar must have holiday rules.') holidays = [(r.name, h) for r in self.rules for h in r.dates(start_date, end_date, observed)] holidays.sort(key=lambda h: h[1]) if names is False: holidays = [h[1] for h in holidays] return holidays def holiday_names(self): """Returns the names of the holiday rules in the calendar.""" return [h.name for h in self.rules] def __repr__(self): num_rules = len(self.rules) if self.rules else 0 return f'Calendar: {self.name} ({num_rules} holiday rules)' class NYBankHolidayCalendar(AbstractCalendar): rules = [ NYBankHolidays.NewYearsDay, NYBankHolidays.MLKDay, # NYBankHolidays.LincolnsBirthday, NYBankHolidays.WashingtonsBirthday, NYBankHolidays.MemorialDay, # NYBankHolidays. FlagDay, NYBankHolidays.Juneteenth, NYBankHolidays.IndependenceDay, NYBankHolidays.LaborDay, NYBankHolidays.ColumbusDay, # NYBankHolidays.GeneralElection, NYBankHolidays.VeteransDay, NYBankHolidays.Thanksgiving, NYBankHolidays.ChristmasDay ] class LondonBankHolidayCalendar(AbstractCalendar): rules = [ LondonBankHolidays.NewYearsDay, LondonBankHolidays.GoodFriday, LondonBankHolidays.EasterMonday, LondonBankHolidays.EarlyMay, LondonBankHolidays.EarlyMayVEAnniversary, LondonBankHolidays.SpringHoliday, LondonBankHolidays.SummerHoliday, LondonBankHolidays.Christmas, LondonBankHolidays.BoxingDay, LondonBankHolidays.Jubilees ]
// Copyright (c) %%year%% by Code Computerlove (http://www.codecomputerlove.com) // Licensed under the MIT license // version: %%version%% (function (window, Util) { Util.extend(Util, { Events: { /* * Function: add * Add an event handler */ add: function(obj, type, handler){ this._checkHandlersProperty(obj); if (type === 'mousewheel'){ type = this._normaliseMouseWheelType(); } if (typeof obj.__eventHandlers[type] === 'undefined'){ obj.__eventHandlers[type] = []; } obj.__eventHandlers[type].push(handler); // DOM element if (this._isBrowserObject(obj)){ obj.addEventListener(type, handler, false); } }, /* * Function: remove * Removes a handler or all handlers associated with a type */ remove: function(obj, type, handler){ this._checkHandlersProperty(obj); if (type === 'mousewheel'){ type = this._normaliseMouseWheelType(); } if (obj.__eventHandlers[type] instanceof Array){ var i, j, handlers = obj.__eventHandlers[type]; // Removing all handlers for a type if (Util.isNothing(handler)){ if (this._isBrowserObject(obj)){ for (i=0, j=handlers.length; i<j; i++){ obj.removeEventListener(type, handlers[i], false); } } obj.__eventHandlers[type] = []; return; } // Removing a specific handler for (i=0, j=handlers.length; i<j; i++){ if (handlers[i] === handler){ handlers.splice(i, 1); break; } } // DOM element if (this._isBrowserObject(obj)){ obj.removeEventListener(type, handler, false); return; } } }, /* * Function: fire * Fire an event */ fire: function(obj, type){ var i, j, event, listeners, listener, args = Array.prototype.slice.call(arguments).splice(2), isNative; if (type === 'mousewheel'){ type = this._normaliseMouseWheelType(); } // DOM element if (this._isBrowserObject(obj)){ if (typeof type !== "string"){ throw 'type must be a string for DOM elements'; } isNative = this._NATIVE_EVENTS[type]; event = document.createEvent(isNative ? "HTMLEvents" : "UIEvents"); event[isNative ? 'initEvent' : 'initUIEvent'](type, true, true, window, 1); // Fire an event on an element that has no extra arguments if (args.length < 1){ obj.dispatchEvent(event); return; } } this._checkHandlersProperty(obj); if (typeof type === "string"){ event = { type: type }; } else{ event = type; } if (!event.target){ event.target = obj; } if (!event.type){ throw new Error("Event object missing 'type' property."); } if (obj.__eventHandlers[event.type] instanceof Array){ listeners = obj.__eventHandlers[event.type]; args.unshift(event); for (i=0, j=listeners.length; i<j; i++){ listener = listeners[i]; if (!Util.isNothing(listener)){ listener.apply(obj, args); } } } }, /* * Function: getMousePosition */ getMousePosition: function(event){ var retval = { x: 0, y: 0 }; if (event.pageX) { retval.x = event.pageX; } else if (event.clientX) { retval.x = event.clientX + (document.documentElement.scrollLeft || document.body.scrollLeft); } if (event.pageY) { retval.y = event.pageY; } else if (event.clientY) { retval.y = event.clientY + ( document.documentElement.scrollTop || document.body.scrollTop); } return retval; }, /* * Function: getTouchEvent */ getTouchEvent: function(event){ return event; }, /* * Function: getWheelDelta */ getWheelDelta: function(event){ var delta = 0; if (!Util.isNothing(event.wheelDelta)){ delta = event.wheelDelta / 120; } else if (!Util.isNothing(event.detail)){ delta = -event.detail / 3; } return delta; }, /* * Function: domReady */ domReady: function(handler){ document.addEventListener('DOMContentLoaded', handler, false); }, _checkHandlersProperty: function(obj){ if (Util.isNothing(obj.__eventHandlers)){ Util.extend(obj, { __eventHandlers: { } }); } }, _isBrowserObject: function(obj){ if (obj === window || obj === window.document){ return true; } return this._isElement(obj) || this._isNode(obj); }, _isElement: function(obj){ return ( typeof window.HTMLElement === "object" ? obj instanceof window.HTMLElement : //DOM2 typeof obj === "object" && obj.nodeType === 1 && typeof obj.nodeName==="string" ); }, _isNode: function(obj){ return ( typeof window.Node === "object" ? obj instanceof window.Node : typeof obj === "object" && typeof obj.nodeType === "number" && typeof obj.nodeName==="string" ); }, _normaliseMouseWheelType: function(){ if (Util.Browser.isEventSupported('mousewheel')){ return 'mousewheel'; } return 'DOMMouseScroll'; }, _NATIVE_EVENTS: { click: 1, dblclick: 1, mouseup: 1, mousedown: 1, contextmenu: 1, //mouse buttons mousewheel: 1, DOMMouseScroll: 1, //mouse wheel mouseover: 1, mouseout: 1, mousemove: 1, selectstart: 1, selectend: 1, //mouse movement keydown: 1, keypress: 1, keyup: 1, //keyboard orientationchange: 1, // mobile touchstart: 1, touchmove: 1, touchend: 1, touchcancel: 1, // touch gesturestart: 1, gesturechange: 1, gestureend: 1, // gesture focus: 1, blur: 1, change: 1, reset: 1, select: 1, submit: 1, //form elements load: 1, unload: 1, beforeunload: 1, resize: 1, move: 1, DOMContentLoaded: 1, readystatechange: 1, //window error: 1, abort: 1, scroll: 1 } } }); } ( window, window.Code.Util ));
import os import sys BASE_DIR = os.path.dirname( os.path.dirname(os.path.dirname(os.path.dirname( os.path.abspath(__file__))))) sys.path.append(BASE_DIR) import numpy as np from tools.path import CIFAR100_path from simpleAICV.classification import backbones from simpleAICV.classification import losses from simpleAICV.classification.datasets.cifar100dataset import CIFAR100Dataset from simpleAICV.classification.common import Opencv2PIL, TorchPad, TorchRandomHorizontalFlip, TorchRandomCrop, TorchMeanStdNormalize, ClassificationCollater, load_state_dict import torch import torchvision.transforms as transforms class config: network = 'resnet101cifar' num_classes = 100 input_image_size = 32 model = backbones.__dict__[network](**{ 'num_classes': num_classes, }) # load pretrained model or not trained_model_path = '' load_state_dict(trained_model_path, model) criterion = losses.__dict__['CELoss']() train_dataset = CIFAR100Dataset( root_dir=CIFAR100_path, set_name='train', transform=transforms.Compose([ Opencv2PIL(), TorchPad(padding=4, fill=0, padding_mode='reflect'), TorchRandomHorizontalFlip(prob=0.5), TorchRandomCrop(resize=input_image_size), TorchMeanStdNormalize(mean=np.array([125.3, 123.0, 113.9]) / 255.0, std=np.array([63.0, 62.1, 66.7]) / 255.0), ])) val_dataset = CIFAR100Dataset( root_dir=CIFAR100_path, set_name='test', transform=transforms.Compose([ Opencv2PIL(), TorchMeanStdNormalize(mean=np.array([125.3, 123.0, 113.9]) / 255.0, std=np.array([63.0, 62.1, 66.7]) / 255.0), ])) collater = ClassificationCollater() seed = 0 # batch_size is total size batch_size = 128 # num_workers is total workers num_workers = 16 # choose 'SGD' or 'AdamW' optimizer = ( 'SGD', { 'lr': 0.1, 'momentum': 0.9, 'weight_decay': 5e-4, }, ) scheduler = ( 'MultiStepLR', { 'warm_up_epochs': 0, 'gamma': 0.2, 'milestones': [60, 120, 160], }, ) epochs = 200 print_interval = 50 sync_bn = False apex = True
module.exports = api => { api.cache(false) return { presets: ['@babel/preset-typescript', '@babel/preset-env'], plugins: ['@babel/plugin-proposal-class-properties'] } }
import numpy as np import gym from gym import Wrapper from gym.wrappers.time_limit import TimeLimit from collections import namedtuple import paper_gym import gym_minigrid import gym_miniworld import gym_classics from rlpyt.envs.base import EnvSpaces, EnvStep from rlpyt.envs.wrappers import RLPYT_WRAPPER_KEY from rlpyt.spaces.gym_wrapper import GymSpaceWrapper from rlpyt.utils.collections import is_namedtuple_class class GymEnvWrapper(Wrapper): """Gym-style wrapper for converting the Openai Gym interface to the rlpyt interface. Action and observation spaces are wrapped by rlpyt's ``GymSpaceWrapper``. Output `env_info` is automatically converted from a dictionary to a corresponding namedtuple, which the rlpyt sampler expects. For this to work, every key that might appear in the gym environments `env_info` at any step must appear at the first step after a reset, as the `env_info` entries will have sampler memory pre-allocated for them (so they also cannot change dtype or shape). (see `EnvInfoWrapper`, `build_info_tuples`, and `info_to_nt` in file or more help/details) Warning: Unrecognized keys in `env_info` appearing later during use will be silently ignored. This wrapper looks for gym's ``TimeLimit`` env wrapper to see whether to add the field ``timeout`` to env info. """ def __init__(self, env, act_null_value=0, obs_null_value=0, force_float32=True): super().__init__(env) o = self.env.reset() o, r, d, info = self.env.step(self.env.action_space.sample()) env_ = self.env time_limit = isinstance(self.env, TimeLimit) while not time_limit and hasattr(env_, "env"): env_ = env_.env time_limit = isinstance(env_, TimeLimit) if time_limit: info["timeout"] = False # gym's TimeLimit.truncated invalid name. self.transfer = getattr(env_, "transfer", self.transfer) # Check environment for a transfer function (or use default which does nothing) self._time_limit = time_limit self.action_space = GymSpaceWrapper( space=self.env.action_space, name="act", null_value=act_null_value, force_float32=force_float32, ) self.observation_space = GymSpaceWrapper( space=self.env.observation_space, name="obs", null_value=obs_null_value, force_float32=force_float32, ) build_info_tuples(info) def step(self, action): """Reverts the action from rlpyt format to gym format (i.e. if composite-to- dictionary spaces), steps the gym environment, converts the observation from gym to rlpyt format (i.e. if dict-to-composite), and converts the env_info from dictionary into namedtuple.""" a = self.action_space.revert(action) if a.size == 1: a = a.item() o, r, d, info = self.env.step(a) obs = self.observation_space.convert(o) obs = np.asarray(obs) if self._time_limit: if "TimeLimit.truncated" in info: info["timeout"] = info.pop("TimeLimit.truncated") else: info["timeout"] = False info = info_to_nt(info) if isinstance(r, float): r = np.dtype("float32").type(r) # Scalar float32. return EnvStep(obs, r, d, info) def reset(self): """Returns converted observation from gym env reset.""" return self.observation_space.convert(self.env.reset()) def transfer(self, arg): """Non-functioning environment transfer""" return None @property def spaces(self): """Returns the rlpyt spaces for the wrapped env.""" return EnvSpaces( observation=self.observation_space, action=self.action_space, ) def build_info_tuples(info, name="info"): # Define namedtuples at module level for pickle. # Only place rlpyt uses pickle is in the sampler, when getting the # first examples, to avoid MKL threading issues...can probably turn # that off, (look for subprocess=True --> False), and then might # be able to define these directly within the class. ntc = globals().get(name) # Define at module level for pickle. info_keys = [str(k).replace(".", "_") for k in info.keys()] if ntc is None: globals()[name] = namedtuple(name, info_keys) elif not (is_namedtuple_class(ntc) and sorted(ntc._fields) == sorted(info_keys)): raise ValueError(f"Name clash in globals: {name}.") for k, v in info.items(): if isinstance(v, dict): build_info_tuples(v, "_".join([name, k])) def info_to_nt(value, name="info"): if not isinstance(value, dict): return value ntc = globals()[name] # Disregard unrecognized keys: values = {k: info_to_nt(v, "_".join([name, k])) for k, v in value.items() if k in ntc._fields} # Can catch some missing values (doesn't nest): values.update({k: 0 for k in ntc._fields if k not in values}) return ntc(**values) # To use: return a dict of keys and default values which sometimes appear in # the wrapped env's env_info, so this env always presents those values (i.e. # make keys and values keep the same structure and shape at all time steps.) # Here, a dict of kwargs to be fed to `sometimes_info` should be passed as an # env_kwarg into the `make` function, which should be used as the EnvCls. # def sometimes_info(*args, **kwargs): # # e.g. Feed the env_id. # # Return a dictionary (possibly nested) of keys: default_values # # for this env. # return {} class EnvInfoWrapper(Wrapper): """Gym-style environment wrapper to infill the `env_info` dict of every ``step()`` with a pre-defined set of examples, so that `env_info` has those fields at every step and they are made available to the algorithm in the sampler's batch of data. """ def __init__(self, env, info_example): super().__init__(env) # self._sometimes_info = sometimes_info(**sometimes_info_kwargs) self._sometimes_info = info_example def step(self, action): """If need be, put extra fields into the `env_info` dict returned. See file for function ``infill_info()`` for details.""" o, r, d, info = super().step(action) # Try to make info dict same key structure at every step. return o, r, d, infill_info(info, self._sometimes_info) def infill_info(info, sometimes_info): for k, v in sometimes_info.items(): if k not in info: info[k] = v elif isinstance(v, dict): infill_info(info[k], v) return info def make(*args, info_example=None, **kwargs): """Use as factory function for making instances of gym environment with rlpyt's ``GymEnvWrapper``, using ``gym.make(*args, **kwargs)``. If ``info_example`` is not ``None``, will include the ``EnvInfoWrapper``. Updated to look for a list of wrappers to apply in FIFO order to the underlying gym environment (before applying rlpyt wrapper) """ wrapper_classes = kwargs.pop(RLPYT_WRAPPER_KEY, []) # Pop a list of gym wrappers (or None) env = gym.make(*args, **kwargs) for wrapper_class in wrapper_classes: env = wrapper_class(env) if info_example is None: return GymEnvWrapper(env) else: return GymEnvWrapper(EnvInfoWrapper(env, info_example))
from django.conf.urls import url, include from . import views from django.conf import settings from django.conf.urls.static import static urlpatterns = [ url(r'^$', views.index, name='Index'), url(r'^create/profile$', views.create_profile, name='create-profile'), url(r'^new/project$', views.new_project, name='new-project'), url(r'^directory/', views.directory, name='directory'), url(r'^profile/', views.profile, name='profile'), url(r'^site/(\d+)', views.site, name='site'), url(r'^search/', views.search_results, name='search_results'), url(r'^user/(?P<username>\w{0,50})', views.user_profile, name='user-profile'), url(r'^api/profiles/$', views.ProfileList.as_view()), url(r'^api/projects/$', views.ProjectList.as_view()), url(r'^api/categories/$', views.categoriesList.as_view()), url(r'^api/countries/$', views.countriesList.as_view()), url(r'^api/technologies/$', views.technologiesList.as_view()), url(r'^api/colors/$', views.colorsList.as_view()), ] if settings.DEBUG:urlpatterns += static(settings.MEDIA_URL,document_root=settings.MEDIA_ROOT)